refactor: Migrate screen sharing media handling from direct WebRTC to Mediasoup.

This commit is contained in:
2026-02-23 16:48:05 +01:00
parent ff013b206a
commit 0f250d5c2a
19 changed files with 1925 additions and 574 deletions

View File

@@ -1,3 +1,6 @@
// mediasoupClient is loaded via <script> tag in index.html (esbuild bundle)
const { Device } = mediasoupClient;
const serverUrlInput = document.getElementById('serverUrl');
const serverPasswordInput = document.getElementById('serverPassword');
const sourcesGrid = document.getElementById('sourcesGrid');
@@ -11,39 +14,19 @@ const statsPanel = document.getElementById('statsPanel');
const viewerCountDiv = document.getElementById('viewerCount');
const qualitySelect = document.getElementById('qualitySelect');
function updateViewerCount() {
if (viewerCountDiv) {
viewerCountDiv.innerText = `Viewers: ${Object.keys(peerConnections).length}`;
}
}
let socket;
let peerConnections = {};
let activeStream;
let selectedVideoSourceId = null;
// --- Mediasoup State ---
let device;
let sendTransport;
let videoProducer;
let audioProducer;
// Chart.js instance tracking
let bitrateChart = null;
// Build ICE config dynamically based on server URL
function getIceConfig(serverUrl, turnUser = 'myuser', turnPass = 'mypassword') {
let turnHost = 'localhost';
try {
const url = new URL(serverUrl);
turnHost = url.hostname;
} catch (e) {}
return {
iceServers: [
{ urls: `stun:${turnHost}:3478` },
{ urls: `turn:${turnHost}:3478`, username: turnUser, credential: turnPass }
],
iceCandidatePoolSize: 5
};
}
let config = getIceConfig('http://localhost:3000');
// 1. Get Desktop Sources / Switch Video Source Mid-Stream
getSourcesBtn.addEventListener('click', async () => {
// --- Mid-Stream Video Source Switching ---
@@ -51,7 +34,6 @@ getSourcesBtn.addEventListener('click', async () => {
try {
// On Wayland, the compositor limits concurrent ScreenCast sessions.
// We MUST stop the old session BEFORE requesting a new one.
// Stop ALL video tracks to ensure the old PipeWire session is fully released.
activeStream.getVideoTracks().forEach(t => {
t.onended = null;
t.stop();
@@ -60,7 +42,6 @@ getSourcesBtn.addEventListener('click', async () => {
// Give the compositor time to tear down the old ScreenCast session
await new Promise(r => setTimeout(r, 1000));
// Now request a new source — this opens the Wayland portal
const newStream = await navigator.mediaDevices.getUserMedia({
audio: false,
video: { mandatory: { chromeMediaSource: 'desktop' } }
@@ -77,22 +58,16 @@ getSourcesBtn.addEventListener('click', async () => {
}
activeStream.addTrack(newVideoTrack);
// Hot-swap on all peer connections without renegotiation
Object.values(peerConnections).forEach(pc => {
const sender = pc.getSenders().find(s => s.track && s.track.kind === 'video');
if (sender) {
sender.replaceTrack(newVideoTrack).catch(e => console.error("replaceTrack error:", e));
}
});
// Hot-swap on the mediasoup producer (no renegotiation needed!)
if (videoProducer) {
await videoProducer.replaceTrack({ track: newVideoTrack });
}
// Update local preview
localVideo.srcObject = activeStream;
// Re-attach onended to auto-stop if the window closes
newVideoTrack.onended = stopSharing;
} catch (e) {
console.error("Failed to switch video source:", e);
// If switching failed, stop broadcast since we already killed the old track
stopSharing();
}
return;
@@ -104,7 +79,6 @@ getSourcesBtn.addEventListener('click', async () => {
selectedVideoSourceId = null;
try {
// --- Fetch Virtual Video Sources ---
const sources = await window.electronAPI.getSources();
sourcesGrid.innerHTML = '';
sources.forEach(source => {
@@ -115,7 +89,6 @@ getSourcesBtn.addEventListener('click', async () => {
img.src = source.thumbnail;
const label = document.createElement('span');
// source.name usually contains the application name
label.innerText = source.name || `Screen ${source.id}`;
label.title = source.name || `Screen ${source.id}`;
@@ -133,12 +106,8 @@ getSourcesBtn.addEventListener('click', async () => {
sourcesGrid.appendChild(item);
});
// Add custom formatting if there's only one item (like on Wayland)
if (sources.length === 1) {
sourcesGrid.classList.add('single-item');
// On Wayland with a single source, just auto-select it WITHOUT calling startPreview.
// startPreview triggers another getUserMedia which opens a SECOND Wayland portal dialog.
// The thumbnail already shows what the source looks like.
selectedVideoSourceId = sources[0].id;
sourcesGrid.firstChild.classList.add('selected');
startBtn.disabled = false;
@@ -146,7 +115,6 @@ getSourcesBtn.addEventListener('click', async () => {
sourcesGrid.classList.remove('single-item');
}
// Ensure start button remains disabled if no source was auto-selected
if (!selectedVideoSourceId) {
startBtn.disabled = true;
}
@@ -160,7 +128,6 @@ getSourcesBtn.addEventListener('click', async () => {
let previewStream = null;
async function startPreview(videoSourceId) {
// Cleanup previous preview
if (previewStream) {
previewStream.getTracks().forEach(t => t.stop());
previewStream = null;
@@ -184,7 +151,6 @@ async function startPreview(videoSourceId) {
}
});
// Removed 1080p ideal limit to prevent Chromium from green-padding non-16:9 window captures!
const videoTrack = previewStream.getVideoTracks()[0];
await videoTrack.applyConstraints({ frameRate: { ideal: 60 } });
@@ -202,9 +168,6 @@ async function getAudioStream(targetAppName, videoSourceId) {
if (!targetAppName || targetAppName === 'none') return null;
if (targetAppName === 'all_desktop') {
// Use Pipewire to link the system's default audio output monitor to our virtual mic.
// This avoids Chromium's broken chromeMediaSource desktop audio which causes echoing
// and double Wayland ScreenCast portal prompts.
const linked = await window.electronAPI.linkMonitorAudio();
if (linked) {
const devices = await navigator.mediaDevices.enumerateDevices();
@@ -270,19 +233,14 @@ startBtn.addEventListener('click', async () => {
return;
}
// Save credentials for next time
window.electronAPI.saveConfig({ serverUrl: url, serverPassword: password });
try {
// Reuse the preview stream if available, otherwise create a new one.
// On Wayland, this is typically the ONLY portal prompt since we skip getSources on startup.
let stream;
if (previewStream) {
stream = previewStream;
previewStream = null;
} else {
// Build video constraints — omit chromeMediaSourceId if no source was pre-selected.
// On Wayland this lets the portal handle source selection.
const videoMandatory = { chromeMediaSource: 'desktop' };
if (selectedVideoSourceId) {
videoMandatory.chromeMediaSourceId = selectedVideoSourceId;
@@ -297,7 +255,6 @@ startBtn.addEventListener('click', async () => {
const [, targetFps] = (qualitySelect.value || '8000000|60').split('|');
if (videoTrack) await videoTrack.applyConstraints({ frameRate: { ideal: parseInt(targetFps) } });
// Add audio if requested (virtual mic capture does NOT trigger a Wayland portal)
if (targetAppName && targetAppName !== 'none') {
const audioStream = await getAudioStream(targetAppName, videoSourceId);
if (audioStream) {
@@ -309,7 +266,7 @@ startBtn.addEventListener('click', async () => {
localVideo.srcObject = stream;
localVideo.style.display = 'block';
connectAndBroadcast(url, password);
await connectAndBroadcast(url, password);
startBtn.style.display = 'none';
stopBtn.style.display = 'inline-block';
@@ -317,7 +274,6 @@ startBtn.addEventListener('click', async () => {
if (viewerCountDiv) viewerCountDiv.style.display = 'block';
statusText.innerText = `Broadcasting to ${url}`;
// Auto stop if user closes the requested window
stream.getVideoTracks()[0].onended = stopSharing;
} catch (e) {
@@ -326,131 +282,161 @@ startBtn.addEventListener('click', async () => {
}
});
function connectAndBroadcast(url, password) {
// Fetch TURN credentials from the server, then update ICE config
fetch(new URL('/turn-config', url).href)
.then(r => r.json())
.then(turn => {
config = getIceConfig(url, turn.username, turn.credential);
})
.catch(() => {
config = getIceConfig(url); // fallback to defaults
});
socket = io(url);
// --- Mediasoup SFU Connection ---
async function connectAndBroadcast(url, password) {
return new Promise((resolve, reject) => {
// io() is available globally from socket.io-client script tag in index.html
socket = io(url);
socket.on('connect', () => {
socket.emit('broadcaster', password);
});
socket.on('connect', async () => {
try {
// 1. Authenticate as broadcaster
socket.emit('broadcaster', password);
socket.on('authError', (msg) => {
alert(msg);
stopSharing();
});
// 2. Get router RTP capabilities
const rtpCapabilities = await new Promise((res, rej) => {
socket.emit('getRouterRtpCapabilities', (data) => {
if (data.error) rej(new Error(data.error));
else res(data);
});
});
socket.on('viewer', id => {
if (!activeStream) return;
const peerConnection = new RTCPeerConnection(config);
peerConnections[id] = peerConnection;
updateViewerCount();
// 3. Create mediasoup Device and load capabilities
device = new Device();
await device.load({ routerRtpCapabilities: rtpCapabilities });
activeStream.getTracks().forEach(track => {
const sender = peerConnection.addTrack(track, activeStream);
const [targetBitrate] = (qualitySelect.value || '8000000|60').split('|');
if (track.kind === 'video') {
const params = sender.getParameters();
if (!params.encodings) params.encodings = [{}];
params.encodings[0].maxBitrate = parseInt(targetBitrate);
sender.setParameters(params).catch(e => console.error(e));
} else if (track.kind === 'audio') {
const params = sender.getParameters();
if (!params.encodings) params.encodings = [{}];
params.encodings[0].maxBitrate = 510000; // max Opus bitrate
sender.setParameters(params).catch(e => console.error(e));
}
});
// 4. Create send transport
const transportParams = await new Promise((res, rej) => {
socket.emit('createWebRtcTransport', { direction: 'send' }, (data) => {
if (data.error) rej(new Error(data.error));
else res(data);
});
});
peerConnection.onicecandidate = event => {
if (event.candidate) {
socket.emit('candidate', id, event.candidate);
}
};
sendTransport = device.createSendTransport(transportParams);
// Monitor ICE state for stability
peerConnection.oniceconnectionstatechange = () => {
console.log(`Viewer ${id} ICE state:`, peerConnection.iceConnectionState);
if (peerConnection.iceConnectionState === 'failed') {
peerConnection.restartIce();
} else if (peerConnection.iceConnectionState === 'disconnected') {
setTimeout(() => {
if (peerConnections[id] && peerConnections[id].iceConnectionState === 'disconnected') {
peerConnections[id].restartIce();
// Transport 'connect' event: DTLS handshake
sendTransport.on('connect', async ({ dtlsParameters }, callback, errback) => {
try {
await new Promise((res, rej) => {
socket.emit('connectTransport', {
transportId: sendTransport.id,
dtlsParameters
}, (result) => {
if (result && result.error) rej(new Error(result.error));
else res();
});
});
callback();
} catch (e) {
errback(e);
}
}, 3000);
}
};
});
peerConnection.createOffer().then(sdp => {
if (window.RTCRtpSender && window.RTCRtpSender.getCapabilities) {
const caps = window.RTCRtpSender.getCapabilities('video');
if (caps && caps.codecs) {
const h264 = caps.codecs.filter(c => c.mimeType.toLowerCase() === 'video/h264' || c.mimeType.toLowerCase() === 'video/vp8');
const transceivers = peerConnection.getTransceivers();
transceivers.forEach(t => {
if (t.receiver.track.kind === 'video') t.setCodecPreferences(h264);
// Transport 'produce' event: server creates the Producer
sendTransport.on('produce', async ({ kind, rtpParameters }, callback, errback) => {
try {
const result = await new Promise((res, rej) => {
socket.emit('produce', {
transportId: sendTransport.id,
kind,
rtpParameters
}, (data) => {
if (data.error) rej(new Error(data.error));
else res(data);
});
});
callback({ id: result.id });
} catch (e) {
errback(e);
}
});
// 5. Produce video
const videoTrack = activeStream.getVideoTracks()[0];
if (videoTrack) {
const [targetBitrate] = (qualitySelect.value || '8000000|60').split('|');
videoProducer = await sendTransport.produce({
track: videoTrack,
encodings: [{
maxBitrate: parseInt(targetBitrate),
}],
codecOptions: {
videoGoogleStartBitrate: 1000
}
});
videoProducer.on('transportclose', () => {
videoProducer = null;
});
}
}
// WebRTC defaults to voice-optimized ~32kbps mono. Let's force high-fidelity stereo!
let sdpLines = sdp.sdp.split('\r\n');
let opusPayloadType = null;
for (let i = 0; i < sdpLines.length; i++) {
if (sdpLines[i].includes('a=rtpmap:') && sdpLines[i].includes('opus/48000/2')) {
const match = sdpLines[i].match(/a=rtpmap:(\d+) /);
if (match) opusPayloadType = match[1];
}
}
if (opusPayloadType) {
let fmtpFound = false;
for (let i = 0; i < sdpLines.length; i++) {
if (sdpLines[i].startsWith(`a=fmtp:${opusPayloadType}`)) {
// Completely overwrite the opus config for pristine stereo
sdpLines[i] = `a=fmtp:${opusPayloadType} minptime=10;useinbandfec=1;maxplaybackrate=48000;stereo=1;sprop-stereo=1;maxaveragebitrate=510000;cbr=1`;
fmtpFound = true;
}
}
if (!fmtpFound) {
sdpLines.push(`a=fmtp:${opusPayloadType} minptime=10;useinbandfec=1;maxplaybackrate=48000;stereo=1;sprop-stereo=1;maxaveragebitrate=510000;cbr=1`);
}
}
sdp.sdp = sdpLines.join('\r\n');
return peerConnection.setLocalDescription(sdp);
}).then(() => {
socket.emit('offer', id, peerConnection.localDescription);
// 6. Produce audio (if present)
const audioTrack = activeStream.getAudioTracks()[0];
if (audioTrack) {
audioProducer = await sendTransport.produce({
track: audioTrack,
codecOptions: {
opusStereo: true,
opusDtx: true,
opusMaxPlaybackRate: 48000,
opusMaxAverageBitrate: 510000
}
});
audioProducer.on('transportclose', () => {
audioProducer = null;
});
}
// 7. Track viewer count
socket.on('viewerCount', (count) => {
if (viewerCountDiv) viewerCountDiv.innerText = `Viewers: ${count}`;
});
// Get initial viewer count
socket.emit('getViewerCount', (count) => {
if (viewerCountDiv) viewerCountDiv.innerText = `Viewers: ${count}`;
});
resolve();
} catch (e) {
console.error('Mediasoup setup error:', e);
reject(e);
}
});
});
socket.on('answer', (id, description) => {
if (peerConnections[id]) peerConnections[id].setRemoteDescription(description);
});
socket.on('authError', (msg) => {
alert(msg);
stopSharing();
reject(new Error(msg));
});
socket.on('candidate', (id, candidate) => {
if (peerConnections[id]) peerConnections[id].addIceCandidate(new RTCIceCandidate(candidate));
});
socket.on('disconnectPeer', id => {
if (peerConnections[id]) {
peerConnections[id].close();
delete peerConnections[id];
updateViewerCount();
}
socket.on('connect_error', (err) => {
console.error('Socket connection error:', err);
});
});
}
function stopSharing() {
// Close producers
if (videoProducer) {
videoProducer.close();
videoProducer = null;
}
if (audioProducer) {
audioProducer.close();
audioProducer = null;
}
// Close transport
if (sendTransport) {
sendTransport.close();
sendTransport = null;
}
device = null;
if (activeStream) {
activeStream.getTracks().forEach(t => t.stop());
activeStream = null;
@@ -459,8 +445,6 @@ function stopSharing() {
socket.disconnect();
socket = null;
}
Object.values(peerConnections).forEach(pc => pc.close());
peerConnections = {};
localVideo.style.display = 'none';
const placeholder = document.getElementById('videoPlaceholder');
@@ -485,7 +469,7 @@ stopBtn.addEventListener('click', stopSharing);
// --- Dynamic Audio Switching ---
audioSelect.addEventListener('change', async () => {
if (!activeStream) return; // ignore if not actively broadcasting
if (!activeStream || !sendTransport) return;
const targetAppName = audioSelect.value;
try {
@@ -501,27 +485,31 @@ audioSelect.addEventListener('change', async () => {
});
}
// Add new track
if (newAudioTrack) {
activeStream.addTrack(newAudioTrack);
}
// Directly hot-swap the audio track on all established WebRTC connections
Object.values(peerConnections).forEach(pc => {
const sender = pc.getSenders().find(s => s.track && s.track.kind === 'audio');
// `replaceTrack` allows hot-swapping without renegotiation!
// If newAudioTrack is null (No Audio), replacing with null mutes the stream nicely.
if (sender) {
sender.replaceTrack(newAudioTrack || null).catch(e => console.error("replaceTrack error:", e));
} else if (newAudioTrack) {
// Edge case: if the broadcast was originally started with 'No Audio',
// there's no audio transceiver created yet!
// We'd have to trigger renegotiation to add one, which acts as a restart.
console.warn("Cannot add audio dynamically to a stream that started with 'No Audio'. Please restart the broadcast.");
alert("Cannot swap to audio mid-stream if the broadcast started with 'No Audio'. Please stop and restart.");
}
});
// Hot-swap on the mediasoup audio producer
if (audioProducer && newAudioTrack) {
await audioProducer.replaceTrack({ track: newAudioTrack });
} else if (audioProducer && !newAudioTrack) {
// Mute by pausing the producer
await audioProducer.pause();
} else if (!audioProducer && newAudioTrack) {
// Need to create a new producer for audio
audioProducer = await sendTransport.produce({
track: newAudioTrack,
codecOptions: {
opusStereo: true,
opusDtx: true,
opusMaxPlaybackRate: 48000,
opusMaxAverageBitrate: 510000
}
});
audioProducer.on('transportclose', () => {
audioProducer = null;
});
}
} catch (e) {
console.error("Failed to switch audio dynamically:", e);
@@ -530,7 +518,7 @@ audioSelect.addEventListener('change', async () => {
// --- Dynamic Quality Switching ---
qualitySelect.addEventListener('change', async () => {
if (!activeStream) return;
if (!activeStream || !videoProducer) return;
const [targetBitrate, targetFps] = qualitySelect.value.split('|');
@@ -540,17 +528,18 @@ qualitySelect.addEventListener('change', async () => {
await videoTrack.applyConstraints({ frameRate: { ideal: parseInt(targetFps) } }).catch(e => console.error(e));
}
// Update bitrate on all existing peer connections
Object.values(peerConnections).forEach(pc => {
pc.getSenders().forEach(sender => {
if (sender.track && sender.track.kind === 'video') {
const params = sender.getParameters();
if (!params.encodings) params.encodings = [{}];
// Update max bitrate on the producer's encoding
if (videoProducer) {
try {
const params = videoProducer.rtpSender.getParameters();
if (params.encodings && params.encodings.length > 0) {
params.encodings[0].maxBitrate = parseInt(targetBitrate);
sender.setParameters(params).catch(e => console.error(e));
await videoProducer.rtpSender.setParameters(params);
}
});
});
} catch (e) {
console.error("Failed to update bitrate:", e);
}
}
});
// --- Stats Monitoring Loop ---
@@ -558,7 +547,7 @@ let lastBytesSent = 0;
let lastTimestamp = 0;
setInterval(async () => {
if (!activeStream || Object.keys(peerConnections).length === 0) return;
if (!activeStream || !videoProducer) return;
// Initialize chart if not present
if (!bitrateChart) {
@@ -598,16 +587,11 @@ setInterval(async () => {
});
}
// Get stats from the first active peer connection
const pc = Object.values(peerConnections)[0];
if (!pc) return;
try {
const stats = await pc.getStats();
const stats = await videoProducer.getStats();
let videoCodec = 'Unknown';
let audioCodec = 'Unknown';
// Scan for codec objects globally
stats.forEach(report => {
if (report.type === 'codec') {
if (report.mimeType.toLowerCase().includes('video')) videoCodec = report.mimeType.split('/')[1] || report.mimeType;
@@ -624,9 +608,8 @@ setInterval(async () => {
let bitrate = 0;
if (lastTimestamp && lastBytesSent) {
const timeDiff = timestamp - lastTimestamp; // ms
const timeDiff = timestamp - lastTimestamp;
const bytesDiff = bytesSent - lastBytesSent;
// convert bytes/ms to kbps: (bytes * 8 / 1000) / (timeDiff / 1000) => (bytes * 8) / timeDiff
bitrate = Math.round((bytesDiff * 8) / timeDiff);
}
lastBytesSent = bytesSent;
@@ -637,50 +620,70 @@ setInterval(async () => {
document.getElementById('statsBitrate').innerText = bitrate + ' kbps';
document.getElementById('statsVideoCodec').innerText = videoCodec;
// Update chart
if (bitrateChart) {
bitrateChart.data.datasets[0].data.shift();
bitrateChart.data.datasets[0].data.push(bitrate);
bitrateChart.update();
}
} else if (report.type === 'outbound-rtp' && report.kind === 'audio') {
document.getElementById('statsAudioCodec').innerText = audioCodec;
}
});
// Get audio codec from audio producer stats
if (audioProducer) {
const audioStats = await audioProducer.getStats();
audioStats.forEach(report => {
if (report.type === 'codec' && report.mimeType.toLowerCase().includes('audio')) {
document.getElementById('statsAudioCodec').innerText = report.mimeType.split('/')[1] || report.mimeType;
}
});
}
} catch (e) { console.error("Stats error", e); }
}, 1000);
// --- Reusable Audio Dropdown Population ---
function populateAudioSelect(audioApps) {
const currentValue = audioSelect.value;
audioSelect.innerHTML = '<option value="none">No Audio (Video Only)</option>';
const allDesktopOption = document.createElement('option');
allDesktopOption.value = 'all_desktop';
allDesktopOption.text = 'All Desktop Audio (System Default)';
audioSelect.appendChild(allDesktopOption);
audioApps.forEach(app => {
const option = document.createElement('option');
option.value = app.name;
option.text = `${app.name} (${app.mediaName})`;
audioSelect.appendChild(option);
});
const options = Array.from(audioSelect.options);
if (options.some(o => o.value === currentValue)) {
audioSelect.value = currentValue;
}
}
// Listen for live audio source updates from PipeWire monitor
window.electronAPI.onAudioAppsUpdated((apps) => {
populateAudioSelect(apps);
});
// Initial load: config + audio apps only (no portal prompt on startup)
window.electronAPI.getConfig().then(cfg => {
if (cfg.serverUrl) serverUrlInput.value = cfg.serverUrl;
if (cfg.serverPassword) serverPasswordInput.value = cfg.serverPassword;
});
// Fetch audio applications on startup (this only reads PipeWire, no Wayland portal)
// Fetch audio applications on startup
(async () => {
try {
const audioApps = await window.electronAPI.getAudioApps();
audioSelect.innerHTML = '<option value="none">No Audio (Video Only)</option>';
const allDesktopOption = document.createElement('option');
allDesktopOption.value = 'all_desktop';
allDesktopOption.text = 'All Desktop Audio (System Default)';
audioSelect.appendChild(allDesktopOption);
audioApps.forEach(app => {
const option = document.createElement('option');
option.value = app.name;
option.text = `${app.name} (${app.mediaName})`;
audioSelect.appendChild(option);
});
populateAudioSelect(audioApps);
} catch (e) {
console.error('Failed to load audio apps:', e);
audioSelect.innerHTML = '<option value="none">No Audio (Video Only)</option>';
}
// Show the source grid as ready (user can optionally click "Select Sources" for thumbnails)
sourcesGrid.innerHTML = '<div style="color:var(--text-secondary); width:100%; text-align:center; padding:1rem;">Click "Start Broadcast" to select a source, or use "Select Sources" for thumbnails.</div>';
// Start button is always enabled — source selection happens via the portal
sourcesGrid.innerHTML = '<div style="color:var(--text-secondary); width:100%; text-align:center; padding:1rem;"></div>';
startBtn.disabled = false;
})();