571 lines
22 KiB
JavaScript
571 lines
22 KiB
JavaScript
const serverUrlInput = document.getElementById('serverUrl');
|
|
const serverPasswordInput = document.getElementById('serverPassword');
|
|
const sourcesGrid = document.getElementById('sourcesGrid');
|
|
const audioSelect = document.getElementById('audioSelect');
|
|
const getSourcesBtn = document.getElementById('getSourcesBtn');
|
|
const startBtn = document.getElementById('startBtn');
|
|
const stopBtn = document.getElementById('stopBtn');
|
|
const localVideo = document.getElementById('localVideo');
|
|
const statusText = document.getElementById('statusText');
|
|
const statsPanel = document.getElementById('statsPanel');
|
|
const viewerCountDiv = document.getElementById('viewerCount');
|
|
|
|
function updateViewerCount() {
|
|
if (viewerCountDiv) {
|
|
viewerCountDiv.innerText = `Viewers: ${Object.keys(peerConnections).length}`;
|
|
}
|
|
}
|
|
|
|
let socket;
|
|
let peerConnections = {};
|
|
let activeStream;
|
|
let selectedVideoSourceId = null;
|
|
|
|
// Chart.js instance tracking
|
|
let bitrateChart = null;
|
|
|
|
const config = {
|
|
iceServers: [
|
|
{ urls: "stun:localhost:3478" },
|
|
{ urls: "turn:localhost:3478", username: "myuser", credential: "mypassword" }
|
|
]
|
|
};
|
|
|
|
// 1. Get Desktop Sources from Main Process and populate raw select tags
|
|
// Also enumerate native audio devices from navigator!
|
|
getSourcesBtn.addEventListener('click', async () => {
|
|
sourcesGrid.innerHTML = '<div style="color:var(--text-secondary); width:100%;">Loading sources...</div>';
|
|
startBtn.disabled = true;
|
|
selectedVideoSourceId = null;
|
|
|
|
try {
|
|
// --- Fetch Virtual Video Sources ---
|
|
const sources = await window.electronAPI.getSources();
|
|
sourcesGrid.innerHTML = '';
|
|
sources.forEach(source => {
|
|
const item = document.createElement('div');
|
|
item.className = 'source-item';
|
|
|
|
const img = document.createElement('img');
|
|
img.src = source.thumbnail;
|
|
|
|
const label = document.createElement('span');
|
|
// source.name usually contains the application name
|
|
label.innerText = source.name || `Screen ${source.id}`;
|
|
label.title = source.name || `Screen ${source.id}`;
|
|
|
|
item.appendChild(img);
|
|
item.appendChild(label);
|
|
|
|
item.addEventListener('click', () => {
|
|
document.querySelectorAll('.source-item').forEach(i => i.classList.remove('selected'));
|
|
item.classList.add('selected');
|
|
selectedVideoSourceId = source.id;
|
|
startPreview(source.id);
|
|
startBtn.disabled = false;
|
|
});
|
|
|
|
sourcesGrid.appendChild(item);
|
|
});
|
|
|
|
// Add custom formatting if there's only one item (like on Wayland)
|
|
if (sources.length === 1) {
|
|
sourcesGrid.classList.add('single-item');
|
|
// On Wayland with a single source, just auto-select it WITHOUT calling startPreview.
|
|
// startPreview triggers another getUserMedia which opens a SECOND Wayland portal dialog.
|
|
// The thumbnail already shows what the source looks like.
|
|
selectedVideoSourceId = sources[0].id;
|
|
sourcesGrid.firstChild.classList.add('selected');
|
|
startBtn.disabled = false;
|
|
} else {
|
|
sourcesGrid.classList.remove('single-item');
|
|
}
|
|
|
|
// Ensure start button remains disabled if no source was auto-selected
|
|
if (!selectedVideoSourceId) {
|
|
startBtn.disabled = true;
|
|
}
|
|
} catch (e) {
|
|
console.error(e);
|
|
sourcesGrid.innerHTML = '<div style="color:red; width:100%;">Error loading sources</div>';
|
|
}
|
|
});
|
|
|
|
// --- Preview Stream Logic ---
|
|
let previewStream = null;
|
|
|
|
async function startPreview(videoSourceId) {
|
|
// Cleanup previous preview
|
|
if (previewStream) {
|
|
previewStream.getTracks().forEach(t => t.stop());
|
|
previewStream = null;
|
|
}
|
|
|
|
if (!videoSourceId) {
|
|
localVideo.style.display = 'none';
|
|
const placeholder = document.getElementById('videoPlaceholder');
|
|
if (placeholder) placeholder.style.display = 'block';
|
|
return;
|
|
}
|
|
|
|
try {
|
|
previewStream = await navigator.mediaDevices.getUserMedia({
|
|
audio: false,
|
|
video: {
|
|
mandatory: {
|
|
chromeMediaSource: 'desktop',
|
|
chromeMediaSourceId: videoSourceId,
|
|
}
|
|
}
|
|
});
|
|
|
|
// Removed 1080p ideal limit to prevent Chromium from green-padding non-16:9 window captures!
|
|
const videoTrack = previewStream.getVideoTracks()[0];
|
|
await videoTrack.applyConstraints({ frameRate: { ideal: 60 } });
|
|
|
|
localVideo.srcObject = previewStream;
|
|
localVideo.style.display = 'block';
|
|
const placeholder = document.getElementById('videoPlaceholder');
|
|
if (placeholder) placeholder.style.display = 'none';
|
|
} catch (e) {
|
|
console.error("Failed to start preview stream:", e);
|
|
}
|
|
}
|
|
|
|
// --- Audio Capture Helper ---
|
|
async function getAudioStream(targetAppName, videoSourceId) {
|
|
if (!targetAppName || targetAppName === 'none') return null;
|
|
|
|
if (targetAppName === 'all_desktop') {
|
|
// Use Pipewire to link the system's default audio output monitor to our virtual mic.
|
|
// This avoids Chromium's broken chromeMediaSource desktop audio which causes echoing
|
|
// and double Wayland ScreenCast portal prompts.
|
|
const linked = await window.electronAPI.linkMonitorAudio();
|
|
if (linked) {
|
|
const devices = await navigator.mediaDevices.enumerateDevices();
|
|
const virtMic = devices.find(d => d.kind === 'audioinput' && d.label.toLowerCase().includes('simplescreenshare'));
|
|
|
|
if (virtMic) {
|
|
return await navigator.mediaDevices.getUserMedia({
|
|
audio: {
|
|
deviceId: { exact: virtMic.deviceId },
|
|
echoCancellation: { exact: false },
|
|
autoGainControl: { exact: false },
|
|
noiseSuppression: { exact: false },
|
|
channelCount: 2,
|
|
sampleRate: 48000
|
|
},
|
|
video: false
|
|
});
|
|
} else {
|
|
console.warn("Virtual mic device not found for monitor capture");
|
|
}
|
|
} else {
|
|
console.warn("Failed to link system monitor audio.");
|
|
}
|
|
return null;
|
|
}
|
|
|
|
// Application specific (Pipewire)
|
|
const linked = await window.electronAPI.linkAppAudio(targetAppName);
|
|
if (linked) {
|
|
const devices = await navigator.mediaDevices.enumerateDevices();
|
|
const virtMic = devices.find(d => d.kind === 'audioinput' && d.label.toLowerCase().includes('simplescreenshare'));
|
|
|
|
if (virtMic) {
|
|
return await navigator.mediaDevices.getUserMedia({
|
|
audio: {
|
|
deviceId: { exact: virtMic.deviceId },
|
|
echoCancellation: { exact: false },
|
|
autoGainControl: { exact: false },
|
|
noiseSuppression: { exact: false },
|
|
channelCount: 2,
|
|
sampleRate: 48000
|
|
},
|
|
video: false
|
|
});
|
|
} else {
|
|
console.warn("Virtual mic device not found in navigator enumeration");
|
|
}
|
|
} else {
|
|
console.warn("Failed to link application audio.");
|
|
}
|
|
return null;
|
|
}
|
|
|
|
// 2. Start Broadcast
|
|
startBtn.addEventListener('click', async () => {
|
|
const url = serverUrlInput.value;
|
|
const password = serverPasswordInput.value;
|
|
const videoSourceId = selectedVideoSourceId;
|
|
const targetAppName = audioSelect.value;
|
|
|
|
if (!url || !password) {
|
|
alert("Please fill out URL and Password.");
|
|
return;
|
|
}
|
|
|
|
// Save credentials for next time
|
|
window.electronAPI.saveConfig({ serverUrl: url, serverPassword: password });
|
|
|
|
try {
|
|
// Reuse the preview stream if available, otherwise create a new one.
|
|
// On Wayland, this is typically the ONLY portal prompt since we skip getSources on startup.
|
|
let stream;
|
|
if (previewStream) {
|
|
stream = previewStream;
|
|
previewStream = null;
|
|
} else {
|
|
// Build video constraints — omit chromeMediaSourceId if no source was pre-selected.
|
|
// On Wayland this lets the portal handle source selection.
|
|
const videoMandatory = { chromeMediaSource: 'desktop' };
|
|
if (selectedVideoSourceId) {
|
|
videoMandatory.chromeMediaSourceId = selectedVideoSourceId;
|
|
}
|
|
stream = await navigator.mediaDevices.getUserMedia({
|
|
audio: false,
|
|
video: { mandatory: videoMandatory }
|
|
});
|
|
}
|
|
|
|
const videoTrack = stream.getVideoTracks()[0];
|
|
if (videoTrack) await videoTrack.applyConstraints({ frameRate: { ideal: 60 } });
|
|
|
|
// Add audio if requested (virtual mic capture does NOT trigger a Wayland portal)
|
|
if (targetAppName && targetAppName !== 'none') {
|
|
const audioStream = await getAudioStream(targetAppName, videoSourceId);
|
|
if (audioStream) {
|
|
stream.addTrack(audioStream.getAudioTracks()[0]);
|
|
}
|
|
}
|
|
|
|
activeStream = stream;
|
|
localVideo.srcObject = stream;
|
|
localVideo.style.display = 'block';
|
|
|
|
connectAndBroadcast(url, password);
|
|
|
|
startBtn.style.display = 'none';
|
|
stopBtn.style.display = 'inline-block';
|
|
statsPanel.style.display = 'block';
|
|
if (viewerCountDiv) viewerCountDiv.style.display = 'block';
|
|
statusText.innerText = `Broadcasting to ${url}`;
|
|
|
|
// Auto stop if user closes the requested window
|
|
stream.getVideoTracks()[0].onended = stopSharing;
|
|
|
|
} catch (e) {
|
|
console.error("Stream capture error:", e);
|
|
alert("Failed to capture screen. See console for details.");
|
|
}
|
|
});
|
|
|
|
function connectAndBroadcast(url, password) {
|
|
socket = io(url);
|
|
|
|
socket.on('connect', () => {
|
|
socket.emit('broadcaster', password);
|
|
});
|
|
|
|
socket.on('authError', (msg) => {
|
|
alert(msg);
|
|
stopSharing();
|
|
});
|
|
|
|
socket.on('viewer', id => {
|
|
if (!activeStream) return;
|
|
|
|
const peerConnection = new RTCPeerConnection(config);
|
|
peerConnections[id] = peerConnection;
|
|
updateViewerCount();
|
|
|
|
activeStream.getTracks().forEach(track => {
|
|
const sender = peerConnection.addTrack(track, activeStream);
|
|
if (track.kind === 'video') {
|
|
const params = sender.getParameters();
|
|
if (!params.encodings) params.encodings = [{}];
|
|
params.encodings[0].maxBitrate = 10000000;
|
|
sender.setParameters(params).catch(e => console.error(e));
|
|
} else if (track.kind === 'audio') {
|
|
const params = sender.getParameters();
|
|
if (!params.encodings) params.encodings = [{}];
|
|
params.encodings[0].maxBitrate = 510000; // max Opus bitrate
|
|
sender.setParameters(params).catch(e => console.error(e));
|
|
}
|
|
});
|
|
|
|
peerConnection.onicecandidate = event => {
|
|
if (event.candidate) {
|
|
socket.emit('candidate', id, event.candidate);
|
|
}
|
|
};
|
|
|
|
peerConnection.createOffer().then(sdp => {
|
|
if (window.RTCRtpSender && window.RTCRtpSender.getCapabilities) {
|
|
const caps = window.RTCRtpSender.getCapabilities('video');
|
|
if (caps && caps.codecs) {
|
|
const h264 = caps.codecs.filter(c => c.mimeType.toLowerCase() === 'video/h264' || c.mimeType.toLowerCase() === 'video/vp8');
|
|
const transceivers = peerConnection.getTransceivers();
|
|
transceivers.forEach(t => {
|
|
if (t.receiver.track.kind === 'video') t.setCodecPreferences(h264);
|
|
});
|
|
}
|
|
}
|
|
|
|
// WebRTC defaults to voice-optimized ~32kbps mono. Let's force high-fidelity stereo!
|
|
let sdpLines = sdp.sdp.split('\r\n');
|
|
let opusPayloadType = null;
|
|
for (let i = 0; i < sdpLines.length; i++) {
|
|
if (sdpLines[i].includes('a=rtpmap:') && sdpLines[i].includes('opus/48000/2')) {
|
|
const match = sdpLines[i].match(/a=rtpmap:(\d+) /);
|
|
if (match) opusPayloadType = match[1];
|
|
}
|
|
}
|
|
if (opusPayloadType) {
|
|
let fmtpFound = false;
|
|
for (let i = 0; i < sdpLines.length; i++) {
|
|
if (sdpLines[i].startsWith(`a=fmtp:${opusPayloadType}`)) {
|
|
// Completely overwrite the opus config for pristine stereo
|
|
sdpLines[i] = `a=fmtp:${opusPayloadType} minptime=10;useinbandfec=1;maxplaybackrate=48000;stereo=1;sprop-stereo=1;maxaveragebitrate=510000;cbr=1`;
|
|
fmtpFound = true;
|
|
}
|
|
}
|
|
if (!fmtpFound) {
|
|
sdpLines.push(`a=fmtp:${opusPayloadType} minptime=10;useinbandfec=1;maxplaybackrate=48000;stereo=1;sprop-stereo=1;maxaveragebitrate=510000;cbr=1`);
|
|
}
|
|
}
|
|
sdp.sdp = sdpLines.join('\r\n');
|
|
|
|
return peerConnection.setLocalDescription(sdp);
|
|
}).then(() => {
|
|
socket.emit('offer', id, peerConnection.localDescription);
|
|
});
|
|
});
|
|
|
|
socket.on('answer', (id, description) => {
|
|
if (peerConnections[id]) peerConnections[id].setRemoteDescription(description);
|
|
});
|
|
|
|
socket.on('candidate', (id, candidate) => {
|
|
if (peerConnections[id]) peerConnections[id].addIceCandidate(new RTCIceCandidate(candidate));
|
|
});
|
|
|
|
socket.on('disconnectPeer', id => {
|
|
if (peerConnections[id]) {
|
|
peerConnections[id].close();
|
|
delete peerConnections[id];
|
|
updateViewerCount();
|
|
}
|
|
});
|
|
}
|
|
|
|
function stopSharing() {
|
|
if (activeStream) {
|
|
activeStream.getTracks().forEach(t => t.stop());
|
|
activeStream = null;
|
|
}
|
|
if (socket) {
|
|
socket.disconnect();
|
|
socket = null;
|
|
}
|
|
Object.values(peerConnections).forEach(pc => pc.close());
|
|
peerConnections = {};
|
|
|
|
localVideo.style.display = 'none';
|
|
const placeholder = document.getElementById('videoPlaceholder');
|
|
if (placeholder) placeholder.style.display = 'block';
|
|
|
|
statsPanel.style.display = 'none';
|
|
startBtn.style.display = 'inline-block';
|
|
stopBtn.style.display = 'none';
|
|
statusText.innerText = 'Not Broadcasting';
|
|
if (viewerCountDiv) {
|
|
viewerCountDiv.style.display = 'none';
|
|
viewerCountDiv.innerText = 'Viewers: 0';
|
|
}
|
|
|
|
if (bitrateChart) {
|
|
bitrateChart.destroy();
|
|
bitrateChart = null;
|
|
}
|
|
}
|
|
|
|
stopBtn.addEventListener('click', stopSharing);
|
|
|
|
// --- Dynamic Audio Switching ---
|
|
audioSelect.addEventListener('change', async () => {
|
|
if (!activeStream) return; // ignore if not actively broadcasting
|
|
|
|
const targetAppName = audioSelect.value;
|
|
try {
|
|
const newAudioStream = await getAudioStream(targetAppName, selectedVideoSourceId);
|
|
const newAudioTrack = newAudioStream ? newAudioStream.getAudioTracks()[0] : null;
|
|
|
|
// Remove old track from local active stream
|
|
const oldAudioTracks = activeStream.getAudioTracks();
|
|
if (oldAudioTracks.length > 0) {
|
|
oldAudioTracks.forEach(t => {
|
|
t.stop();
|
|
activeStream.removeTrack(t);
|
|
});
|
|
}
|
|
|
|
// Add new track
|
|
if (newAudioTrack) {
|
|
activeStream.addTrack(newAudioTrack);
|
|
}
|
|
|
|
// Directly hot-swap the audio track on all established WebRTC connections
|
|
Object.values(peerConnections).forEach(pc => {
|
|
const sender = pc.getSenders().find(s => s.track && s.track.kind === 'audio');
|
|
|
|
// `replaceTrack` allows hot-swapping without renegotiation!
|
|
// If newAudioTrack is null (No Audio), replacing with null mutes the stream nicely.
|
|
if (sender) {
|
|
sender.replaceTrack(newAudioTrack || null).catch(e => console.error("replaceTrack error:", e));
|
|
} else if (newAudioTrack) {
|
|
// Edge case: if the broadcast was originally started with 'No Audio',
|
|
// there's no audio transceiver created yet!
|
|
// We'd have to trigger renegotiation to add one, which acts as a restart.
|
|
console.warn("Cannot add audio dynamically to a stream that started with 'No Audio'. Please restart the broadcast.");
|
|
alert("Cannot swap to audio mid-stream if the broadcast started with 'No Audio'. Please stop and restart.");
|
|
}
|
|
});
|
|
|
|
} catch (e) {
|
|
console.error("Failed to switch audio dynamically:", e);
|
|
}
|
|
});
|
|
|
|
// --- Stats Monitoring Loop ---
|
|
let lastBytesSent = 0;
|
|
let lastTimestamp = 0;
|
|
|
|
setInterval(async () => {
|
|
if (!activeStream || Object.keys(peerConnections).length === 0) return;
|
|
|
|
// Initialize chart if not present
|
|
if (!bitrateChart) {
|
|
const ctx = document.getElementById('bitrateChart').getContext('2d');
|
|
bitrateChart = new Chart(ctx, {
|
|
type: 'line',
|
|
data: {
|
|
labels: Array(20).fill(''),
|
|
datasets: [{
|
|
label: 'Bitrate (kbps)',
|
|
data: Array(20).fill(0),
|
|
borderColor: '#aaaaaa',
|
|
backgroundColor: 'rgba(170, 170, 170, 0.1)',
|
|
borderWidth: 2,
|
|
fill: true,
|
|
tension: 0.4,
|
|
pointRadius: 0
|
|
}]
|
|
},
|
|
options: {
|
|
responsive: true,
|
|
maintainAspectRatio: false,
|
|
animation: false,
|
|
plugins: {
|
|
legend: { display: false }
|
|
},
|
|
scales: {
|
|
x: { display: false },
|
|
y: {
|
|
display: true,
|
|
position: 'right',
|
|
ticks: { color: '#94a3b8', font: { size: 10 } },
|
|
grid: { color: 'rgba(255,255,255,0.05)' }
|
|
}
|
|
}
|
|
}
|
|
});
|
|
}
|
|
|
|
// Get stats from the first active peer connection
|
|
const pc = Object.values(peerConnections)[0];
|
|
if (!pc) return;
|
|
|
|
try {
|
|
const stats = await pc.getStats();
|
|
let videoCodec = 'Unknown';
|
|
let audioCodec = 'Unknown';
|
|
|
|
// Scan for codec objects globally
|
|
stats.forEach(report => {
|
|
if (report.type === 'codec') {
|
|
if (report.mimeType.toLowerCase().includes('video')) videoCodec = report.mimeType.split('/')[1] || report.mimeType;
|
|
if (report.mimeType.toLowerCase().includes('audio')) audioCodec = report.mimeType.split('/')[1] || report.mimeType;
|
|
}
|
|
});
|
|
|
|
stats.forEach(report => {
|
|
if (report.type === 'outbound-rtp' && report.kind === 'video') {
|
|
const fps = report.framesPerSecond || 0;
|
|
const bytesSent = report.bytesSent || 0;
|
|
const timestamp = report.timestamp;
|
|
const res = `${report.frameWidth || 0}x${report.frameHeight || 0}`;
|
|
|
|
let bitrate = 0;
|
|
if (lastTimestamp && lastBytesSent) {
|
|
const timeDiff = timestamp - lastTimestamp; // ms
|
|
const bytesDiff = bytesSent - lastBytesSent;
|
|
// convert bytes/ms to kbps: (bytes * 8 / 1000) / (timeDiff / 1000) => (bytes * 8) / timeDiff
|
|
bitrate = Math.round((bytesDiff * 8) / timeDiff);
|
|
}
|
|
lastBytesSent = bytesSent;
|
|
lastTimestamp = timestamp;
|
|
|
|
document.getElementById('statsFps').innerText = fps;
|
|
document.getElementById('statsRes').innerText = res;
|
|
document.getElementById('statsBitrate').innerText = bitrate + ' kbps';
|
|
document.getElementById('statsVideoCodec').innerText = videoCodec;
|
|
|
|
// Update chart
|
|
if (bitrateChart) {
|
|
bitrateChart.data.datasets[0].data.shift();
|
|
bitrateChart.data.datasets[0].data.push(bitrate);
|
|
bitrateChart.update();
|
|
}
|
|
|
|
} else if (report.type === 'outbound-rtp' && report.kind === 'audio') {
|
|
document.getElementById('statsAudioCodec').innerText = audioCodec;
|
|
}
|
|
});
|
|
} catch (e) { console.error("Stats error", e); }
|
|
}, 1000);
|
|
|
|
// Initial load: config + audio apps only (no portal prompt on startup)
|
|
window.electronAPI.getConfig().then(cfg => {
|
|
if (cfg.serverUrl) serverUrlInput.value = cfg.serverUrl;
|
|
if (cfg.serverPassword) serverPasswordInput.value = cfg.serverPassword;
|
|
});
|
|
|
|
// Fetch audio applications on startup (this only reads PipeWire, no Wayland portal)
|
|
(async () => {
|
|
try {
|
|
const audioApps = await window.electronAPI.getAudioApps();
|
|
audioSelect.innerHTML = '<option value="none">No Audio (Video Only)</option>';
|
|
|
|
const allDesktopOption = document.createElement('option');
|
|
allDesktopOption.value = 'all_desktop';
|
|
allDesktopOption.text = 'All Desktop Audio (System Default)';
|
|
audioSelect.appendChild(allDesktopOption);
|
|
|
|
audioApps.forEach(app => {
|
|
const option = document.createElement('option');
|
|
option.value = app.name;
|
|
option.text = `${app.name} (${app.mediaName})`;
|
|
audioSelect.appendChild(option);
|
|
});
|
|
} catch (e) {
|
|
console.error('Failed to load audio apps:', e);
|
|
audioSelect.innerHTML = '<option value="none">No Audio (Video Only)</option>';
|
|
}
|
|
|
|
// Show the source grid as ready (user can optionally click "Select Sources" for thumbnails)
|
|
sourcesGrid.innerHTML = '<div style="color:var(--text-secondary); width:100%; text-align:center; padding:1rem;">Click "Start Broadcast" to select a source, or use "Select Sources" for thumbnails.</div>';
|
|
// Start button is always enabled — source selection happens via the portal
|
|
startBtn.disabled = false;
|
|
})();
|