feat: Implement an Electron-based broadcasting client with screen/window and audio source selection, including Pipewire integration, and add auto-unmute to the viewer.
This commit is contained in:
368
client/renderer.js
Normal file
368
client/renderer.js
Normal file
@@ -0,0 +1,368 @@
|
||||
const serverUrlInput = document.getElementById('serverUrl');
|
||||
const serverPasswordInput = document.getElementById('serverPassword');
|
||||
const sourcesGrid = document.getElementById('sourcesGrid');
|
||||
const audioSelect = document.getElementById('audioSelect');
|
||||
const getSourcesBtn = document.getElementById('getSourcesBtn');
|
||||
const startBtn = document.getElementById('startBtn');
|
||||
const stopBtn = document.getElementById('stopBtn');
|
||||
const localVideo = document.getElementById('localVideo');
|
||||
const statusText = document.getElementById('statusText');
|
||||
const statsPanel = document.getElementById('statsPanel');
|
||||
|
||||
let socket;
|
||||
let peerConnections = {};
|
||||
let activeStream;
|
||||
let selectedVideoSourceId = null;
|
||||
|
||||
const config = {
|
||||
iceServers: [
|
||||
{ urls: "stun:localhost:3478" },
|
||||
{ urls: "turn:localhost:3478", username: "myuser", credential: "mypassword" }
|
||||
]
|
||||
};
|
||||
|
||||
// 1. Get Desktop Sources from Main Process and populate raw select tags
|
||||
// Also enumerate native audio devices from navigator!
|
||||
getSourcesBtn.addEventListener('click', async () => {
|
||||
sourcesGrid.innerHTML = '<div style="color:var(--text-secondary); width:100%;">Loading sources...</div>';
|
||||
audioSelect.innerHTML = '<option value="">Loading audio devices...</option>';
|
||||
startBtn.disabled = true;
|
||||
selectedVideoSourceId = null;
|
||||
|
||||
try {
|
||||
// --- Fetch Virtual Video Sources ---
|
||||
const sources = await window.electronAPI.getSources();
|
||||
sourcesGrid.innerHTML = '';
|
||||
sources.forEach(source => {
|
||||
const item = document.createElement('div');
|
||||
item.className = 'source-item';
|
||||
|
||||
const img = document.createElement('img');
|
||||
img.src = source.thumbnail;
|
||||
|
||||
const label = document.createElement('span');
|
||||
label.innerText = source.name;
|
||||
label.title = source.name;
|
||||
|
||||
item.appendChild(img);
|
||||
item.appendChild(label);
|
||||
|
||||
item.addEventListener('click', () => {
|
||||
document.querySelectorAll('.source-item').forEach(i => i.classList.remove('selected'));
|
||||
item.classList.add('selected');
|
||||
selectedVideoSourceId = source.id;
|
||||
startPreview(source.id);
|
||||
startBtn.disabled = false;
|
||||
});
|
||||
|
||||
sourcesGrid.appendChild(item);
|
||||
});
|
||||
|
||||
// --- Fetch Application Audio Sources via built Pipewire Helper ---
|
||||
const audioApps = await window.electronAPI.getAudioApps();
|
||||
audioSelect.innerHTML = '<option value="none">No Audio (Video Only)</option>';
|
||||
audioApps.forEach(app => {
|
||||
const option = document.createElement('option');
|
||||
// We pass the actual application name into the value so the main process can find it via pw-dump
|
||||
option.value = app.name;
|
||||
option.text = `${app.name} (${app.mediaName})`;
|
||||
audioSelect.appendChild(option);
|
||||
});
|
||||
|
||||
// If we don't disable start button here, it would be enabled before user clicked a grid item
|
||||
startBtn.disabled = true;
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
sourcesGrid.innerHTML = '<div style="color:red; width:100%;">Error loading sources</div>';
|
||||
audioSelect.innerHTML = '<option value="none">Error loading audio</option>';
|
||||
}
|
||||
});
|
||||
|
||||
// --- Preview Stream Logic ---
|
||||
let previewStream = null;
|
||||
|
||||
async function startPreview(videoSourceId) {
|
||||
// Cleanup previous preview
|
||||
if (previewStream) {
|
||||
previewStream.getTracks().forEach(t => t.stop());
|
||||
previewStream = null;
|
||||
}
|
||||
|
||||
if (!videoSourceId) {
|
||||
localVideo.style.display = 'none';
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
previewStream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: false,
|
||||
video: {
|
||||
mandatory: {
|
||||
chromeMediaSource: 'desktop',
|
||||
chromeMediaSourceId: videoSourceId,
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Removed 1080p ideal limit to prevent Chromium from green-padding non-16:9 window captures!
|
||||
const videoTrack = previewStream.getVideoTracks()[0];
|
||||
await videoTrack.applyConstraints({ frameRate: { ideal: 60 } });
|
||||
|
||||
localVideo.srcObject = previewStream;
|
||||
localVideo.style.display = 'block';
|
||||
} catch (e) {
|
||||
console.error("Failed to start preview stream:", e);
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Start Broadcast
|
||||
startBtn.addEventListener('click', async () => {
|
||||
const url = serverUrlInput.value;
|
||||
const password = serverPasswordInput.value;
|
||||
const videoSourceId = selectedVideoSourceId;
|
||||
const targetAppName = audioSelect.value;
|
||||
|
||||
if (!videoSourceId || !url || !password) {
|
||||
alert("Please fill out URL, Password, and select a visual source.");
|
||||
return;
|
||||
}
|
||||
|
||||
// Save credentials for next time
|
||||
window.electronAPI.saveConfig({ serverUrl: url, serverPassword: password });
|
||||
|
||||
try {
|
||||
// Stop the preview grab so we can grab the real stream cleanly
|
||||
if (previewStream) {
|
||||
previewStream.getTracks().forEach(t => t.stop());
|
||||
previewStream = null;
|
||||
}
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: false,
|
||||
video: {
|
||||
mandatory: {
|
||||
chromeMediaSource: 'desktop',
|
||||
chromeMediaSourceId: videoSourceId,
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const videoTrack = stream.getVideoTracks()[0];
|
||||
await videoTrack.applyConstraints({ frameRate: { ideal: 60 } });
|
||||
|
||||
// If user selected an application, grab the Virtual Mic input and link the app to it!
|
||||
if (targetAppName && targetAppName !== 'none') {
|
||||
const linked = await window.electronAPI.linkAppAudio(targetAppName);
|
||||
if (linked) {
|
||||
// Now that the pipewire graph is linked, we just need to read from our Virtual Mic sink!
|
||||
// Chromium registers this as a standard Input device
|
||||
const devices = await navigator.mediaDevices.enumerateDevices();
|
||||
const virtMic = devices.find(d => d.kind === 'audioinput' && d.label.toLowerCase().includes('simplescreenshare'));
|
||||
|
||||
if (virtMic) {
|
||||
const audioStream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: {
|
||||
deviceId: { exact: virtMic.deviceId },
|
||||
echoCancellation: false,
|
||||
autoGainControl: false,
|
||||
noiseSuppression: false,
|
||||
googAutoGainControl: false,
|
||||
googEchoCancellation: false,
|
||||
googNoiseSuppression: false,
|
||||
googHighpassFilter: false,
|
||||
channelCount: 2,
|
||||
sampleRate: 48000
|
||||
},
|
||||
video: false
|
||||
});
|
||||
stream.addTrack(audioStream.getAudioTracks()[0]);
|
||||
} else {
|
||||
console.warn("Virtual mic device not found in navigator enumeration");
|
||||
}
|
||||
} else {
|
||||
alert("Failed to link application audio. Broadcasting video only.");
|
||||
}
|
||||
}
|
||||
|
||||
activeStream = stream;
|
||||
localVideo.srcObject = stream;
|
||||
localVideo.style.display = 'block';
|
||||
|
||||
connectAndBroadcast(url, password);
|
||||
|
||||
startBtn.style.display = 'none';
|
||||
stopBtn.style.display = 'inline-block';
|
||||
statsPanel.style.display = 'block';
|
||||
statusText.innerText = `Broadcasting to ${url}`;
|
||||
|
||||
// Auto stop if user closes the requested window
|
||||
stream.getVideoTracks()[0].onended = stopSharing;
|
||||
|
||||
} catch (e) {
|
||||
console.error("Stream capture error:", e);
|
||||
alert("Failed to capture screen. See console for details.");
|
||||
}
|
||||
});
|
||||
|
||||
function connectAndBroadcast(url, password) {
|
||||
socket = io(url);
|
||||
|
||||
socket.on('connect', () => {
|
||||
socket.emit('broadcaster', password);
|
||||
});
|
||||
|
||||
socket.on('authError', (msg) => {
|
||||
alert(msg);
|
||||
stopSharing();
|
||||
});
|
||||
|
||||
socket.on('viewer', id => {
|
||||
if (!activeStream) return;
|
||||
|
||||
const peerConnection = new RTCPeerConnection(config);
|
||||
peerConnections[id] = peerConnection;
|
||||
|
||||
activeStream.getTracks().forEach(track => {
|
||||
const sender = peerConnection.addTrack(track, activeStream);
|
||||
if (track.kind === 'video') {
|
||||
const params = sender.getParameters();
|
||||
if (!params.encodings) params.encodings = [{}];
|
||||
params.encodings[0].maxBitrate = 10000000;
|
||||
sender.setParameters(params).catch(e => console.error(e));
|
||||
} else if (track.kind === 'audio') {
|
||||
const params = sender.getParameters();
|
||||
if (!params.encodings) params.encodings = [{}];
|
||||
params.encodings[0].maxBitrate = 510000; // max Opus bitrate
|
||||
sender.setParameters(params).catch(e => console.error(e));
|
||||
}
|
||||
});
|
||||
|
||||
peerConnection.onicecandidate = event => {
|
||||
if (event.candidate) {
|
||||
socket.emit('candidate', id, event.candidate);
|
||||
}
|
||||
};
|
||||
|
||||
peerConnection.createOffer().then(sdp => {
|
||||
if (window.RTCRtpSender && window.RTCRtpSender.getCapabilities) {
|
||||
const caps = window.RTCRtpSender.getCapabilities('video');
|
||||
if (caps && caps.codecs) {
|
||||
const h264 = caps.codecs.filter(c => c.mimeType.toLowerCase() === 'video/h264' || c.mimeType.toLowerCase() === 'video/vp8');
|
||||
const transceivers = peerConnection.getTransceivers();
|
||||
transceivers.forEach(t => {
|
||||
if (t.receiver.track.kind === 'video') t.setCodecPreferences(h264);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// WebRTC defaults to voice-optimized ~32kbps mono. Let's force high-fidelity stereo!
|
||||
let sdpLines = sdp.sdp.split('\r\n');
|
||||
let opusPayloadType = null;
|
||||
for (let i = 0; i < sdpLines.length; i++) {
|
||||
if (sdpLines[i].includes('a=rtpmap:') && sdpLines[i].includes('opus/48000/2')) {
|
||||
const match = sdpLines[i].match(/a=rtpmap:(\d+) /);
|
||||
if (match) opusPayloadType = match[1];
|
||||
}
|
||||
}
|
||||
if (opusPayloadType) {
|
||||
let fmtpFound = false;
|
||||
for (let i = 0; i < sdpLines.length; i++) {
|
||||
if (sdpLines[i].startsWith(`a=fmtp:${opusPayloadType}`)) {
|
||||
// Completely overwrite the opus config for pristine stereo
|
||||
sdpLines[i] = `a=fmtp:${opusPayloadType} minptime=10;useinbandfec=1;maxplaybackrate=48000;stereo=1;sprop-stereo=1;maxaveragebitrate=510000;cbr=1`;
|
||||
fmtpFound = true;
|
||||
}
|
||||
}
|
||||
if (!fmtpFound) {
|
||||
sdpLines.push(`a=fmtp:${opusPayloadType} minptime=10;useinbandfec=1;maxplaybackrate=48000;stereo=1;sprop-stereo=1;maxaveragebitrate=510000;cbr=1`);
|
||||
}
|
||||
}
|
||||
sdp.sdp = sdpLines.join('\r\n');
|
||||
|
||||
return peerConnection.setLocalDescription(sdp);
|
||||
}).then(() => {
|
||||
socket.emit('offer', id, peerConnection.localDescription);
|
||||
});
|
||||
});
|
||||
|
||||
socket.on('answer', (id, description) => {
|
||||
if (peerConnections[id]) peerConnections[id].setRemoteDescription(description);
|
||||
});
|
||||
|
||||
socket.on('candidate', (id, candidate) => {
|
||||
if (peerConnections[id]) peerConnections[id].addIceCandidate(new RTCIceCandidate(candidate));
|
||||
});
|
||||
|
||||
socket.on('disconnectPeer', id => {
|
||||
if (peerConnections[id]) {
|
||||
peerConnections[id].close();
|
||||
delete peerConnections[id];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function stopSharing() {
|
||||
if (activeStream) {
|
||||
activeStream.getTracks().forEach(t => t.stop());
|
||||
activeStream = null;
|
||||
}
|
||||
if (socket) {
|
||||
socket.disconnect();
|
||||
socket = null;
|
||||
}
|
||||
Object.values(peerConnections).forEach(pc => pc.close());
|
||||
peerConnections = {};
|
||||
|
||||
localVideo.style.display = 'none';
|
||||
statsPanel.style.display = 'none';
|
||||
startBtn.style.display = 'inline-block';
|
||||
stopBtn.style.display = 'none';
|
||||
statusText.innerText = 'Not Broadcasting';
|
||||
}
|
||||
|
||||
stopBtn.addEventListener('click', stopSharing);
|
||||
|
||||
// --- Stats Monitoring Loop ---
|
||||
let lastBytesSent = 0;
|
||||
let lastTimestamp = 0;
|
||||
|
||||
setInterval(async () => {
|
||||
if (!activeStream || Object.keys(peerConnections).length === 0) return;
|
||||
|
||||
// Get stats from the first active peer connection
|
||||
const pc = Object.values(peerConnections)[0];
|
||||
if (!pc) return;
|
||||
|
||||
try {
|
||||
const stats = await pc.getStats();
|
||||
stats.forEach(report => {
|
||||
if (report.type === 'outbound-rtp' && report.kind === 'video') {
|
||||
const fps = report.framesPerSecond || 0;
|
||||
const bytesSent = report.bytesSent || 0;
|
||||
const timestamp = report.timestamp;
|
||||
const res = `${report.frameWidth || 0}x${report.frameHeight || 0}`;
|
||||
|
||||
let bitrate = 0;
|
||||
if (lastTimestamp && lastBytesSent) {
|
||||
const timeDiff = timestamp - lastTimestamp; // ms
|
||||
const bytesDiff = bytesSent - lastBytesSent;
|
||||
// convert bytes/ms to kbps: (bytes * 8 / 1000) / (timeDiff / 1000) => (bytes * 8) / timeDiff
|
||||
bitrate = Math.round((bytesDiff * 8) / timeDiff);
|
||||
}
|
||||
lastBytesSent = bytesSent;
|
||||
lastTimestamp = timestamp;
|
||||
|
||||
document.getElementById('statsFps').innerText = fps;
|
||||
document.getElementById('statsRes').innerText = res;
|
||||
document.getElementById('statsBitrate').innerText = bitrate + ' kbps';
|
||||
}
|
||||
});
|
||||
} catch (e) { console.error("Stats error", e); }
|
||||
}, 1000);
|
||||
|
||||
// Initial load of sources & config
|
||||
window.electronAPI.getConfig().then(cfg => {
|
||||
if (cfg.serverUrl) serverUrlInput.value = cfg.serverUrl;
|
||||
if (cfg.serverPassword) serverPasswordInput.value = cfg.serverPassword;
|
||||
});
|
||||
getSourcesBtn.click();
|
||||
Reference in New Issue
Block a user