before its fucked again

This commit is contained in:
2026-02-23 05:31:33 +01:00
parent 831d76c10e
commit 0d7a51ddcd
8 changed files with 686 additions and 137 deletions

View File

@@ -8,12 +8,22 @@ const stopBtn = document.getElementById('stopBtn');
const localVideo = document.getElementById('localVideo');
const statusText = document.getElementById('statusText');
const statsPanel = document.getElementById('statsPanel');
const viewerCountDiv = document.getElementById('viewerCount');
function updateViewerCount() {
if (viewerCountDiv) {
viewerCountDiv.innerText = `Viewers: ${Object.keys(peerConnections).length}`;
}
}
let socket;
let peerConnections = {};
let activeStream;
let selectedVideoSourceId = null;
// Chart.js instance tracking
let bitrateChart = null;
const config = {
iceServers: [
{ urls: "stun:localhost:3478" },
@@ -25,7 +35,6 @@ const config = {
// Also enumerate native audio devices from navigator!
getSourcesBtn.addEventListener('click', async () => {
sourcesGrid.innerHTML = '<div style="color:var(--text-secondary); width:100%;">Loading sources...</div>';
audioSelect.innerHTML = '<option value="">Loading audio devices...</option>';
startBtn.disabled = true;
selectedVideoSourceId = null;
@@ -41,8 +50,9 @@ getSourcesBtn.addEventListener('click', async () => {
img.src = source.thumbnail;
const label = document.createElement('span');
label.innerText = source.name;
label.title = source.name;
// source.name usually contains the application name
label.innerText = source.name || `Screen ${source.id}`;
label.title = source.name || `Screen ${source.id}`;
item.appendChild(img);
item.appendChild(label);
@@ -58,23 +68,26 @@ getSourcesBtn.addEventListener('click', async () => {
sourcesGrid.appendChild(item);
});
// --- Fetch Application Audio Sources via built Pipewire Helper ---
const audioApps = await window.electronAPI.getAudioApps();
audioSelect.innerHTML = '<option value="none">No Audio (Video Only)</option>';
audioApps.forEach(app => {
const option = document.createElement('option');
// We pass the actual application name into the value so the main process can find it via pw-dump
option.value = app.name;
option.text = `${app.name} (${app.mediaName})`;
audioSelect.appendChild(option);
});
// Add custom formatting if there's only one item (like on Wayland)
if (sources.length === 1) {
sourcesGrid.classList.add('single-item');
// On Wayland with a single source, just auto-select it WITHOUT calling startPreview.
// startPreview triggers another getUserMedia which opens a SECOND Wayland portal dialog.
// The thumbnail already shows what the source looks like.
selectedVideoSourceId = sources[0].id;
sourcesGrid.firstChild.classList.add('selected');
startBtn.disabled = false;
} else {
sourcesGrid.classList.remove('single-item');
}
// If we don't disable start button here, it would be enabled before user clicked a grid item
startBtn.disabled = true;
// Ensure start button remains disabled if no source was auto-selected
if (!selectedVideoSourceId) {
startBtn.disabled = true;
}
} catch (e) {
console.error(e);
sourcesGrid.innerHTML = '<div style="color:red; width:100%;">Error loading sources</div>';
audioSelect.innerHTML = '<option value="none">Error loading audio</option>';
}
});
@@ -90,6 +103,8 @@ async function startPreview(videoSourceId) {
if (!videoSourceId) {
localVideo.style.display = 'none';
const placeholder = document.getElementById('videoPlaceholder');
if (placeholder) placeholder.style.display = 'block';
return;
}
@@ -110,11 +125,74 @@ async function startPreview(videoSourceId) {
localVideo.srcObject = previewStream;
localVideo.style.display = 'block';
const placeholder = document.getElementById('videoPlaceholder');
if (placeholder) placeholder.style.display = 'none';
} catch (e) {
console.error("Failed to start preview stream:", e);
}
}
// --- Audio Capture Helper ---
async function getAudioStream(targetAppName, videoSourceId) {
if (!targetAppName || targetAppName === 'none') return null;
if (targetAppName === 'all_desktop') {
// Use Pipewire to link the system's default audio output monitor to our virtual mic.
// This avoids Chromium's broken chromeMediaSource desktop audio which causes echoing
// and double Wayland ScreenCast portal prompts.
const linked = await window.electronAPI.linkMonitorAudio();
if (linked) {
const devices = await navigator.mediaDevices.enumerateDevices();
const virtMic = devices.find(d => d.kind === 'audioinput' && d.label.toLowerCase().includes('simplescreenshare'));
if (virtMic) {
return await navigator.mediaDevices.getUserMedia({
audio: {
deviceId: { exact: virtMic.deviceId },
echoCancellation: { exact: false },
autoGainControl: { exact: false },
noiseSuppression: { exact: false },
channelCount: 2,
sampleRate: 48000
},
video: false
});
} else {
console.warn("Virtual mic device not found for monitor capture");
}
} else {
console.warn("Failed to link system monitor audio.");
}
return null;
}
// Application specific (Pipewire)
const linked = await window.electronAPI.linkAppAudio(targetAppName);
if (linked) {
const devices = await navigator.mediaDevices.enumerateDevices();
const virtMic = devices.find(d => d.kind === 'audioinput' && d.label.toLowerCase().includes('simplescreenshare'));
if (virtMic) {
return await navigator.mediaDevices.getUserMedia({
audio: {
deviceId: { exact: virtMic.deviceId },
echoCancellation: { exact: false },
autoGainControl: { exact: false },
noiseSuppression: { exact: false },
channelCount: 2,
sampleRate: 48000
},
video: false
});
} else {
console.warn("Virtual mic device not found in navigator enumeration");
}
} else {
console.warn("Failed to link application audio.");
}
return null;
}
// 2. Start Broadcast
startBtn.addEventListener('click', async () => {
const url = serverUrlInput.value;
@@ -122,8 +200,8 @@ startBtn.addEventListener('click', async () => {
const videoSourceId = selectedVideoSourceId;
const targetAppName = audioSelect.value;
if (!videoSourceId || !url || !password) {
alert("Please fill out URL, Password, and select a visual source.");
if (!url || !password) {
alert("Please fill out URL and Password.");
return;
}
@@ -131,56 +209,33 @@ startBtn.addEventListener('click', async () => {
window.electronAPI.saveConfig({ serverUrl: url, serverPassword: password });
try {
// Stop the preview grab so we can grab the real stream cleanly
// Reuse the preview stream if available, otherwise create a new one.
// On Wayland, this is typically the ONLY portal prompt since we skip getSources on startup.
let stream;
if (previewStream) {
previewStream.getTracks().forEach(t => t.stop());
stream = previewStream;
previewStream = null;
} else {
// Build video constraints — omit chromeMediaSourceId if no source was pre-selected.
// On Wayland this lets the portal handle source selection.
const videoMandatory = { chromeMediaSource: 'desktop' };
if (selectedVideoSourceId) {
videoMandatory.chromeMediaSourceId = selectedVideoSourceId;
}
stream = await navigator.mediaDevices.getUserMedia({
audio: false,
video: { mandatory: videoMandatory }
});
}
const stream = await navigator.mediaDevices.getUserMedia({
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: videoSourceId,
}
}
});
const videoTrack = stream.getVideoTracks()[0];
await videoTrack.applyConstraints({ frameRate: { ideal: 60 } });
if (videoTrack) await videoTrack.applyConstraints({ frameRate: { ideal: 60 } });
// If user selected an application, grab the Virtual Mic input and link the app to it!
// Add audio if requested (virtual mic capture does NOT trigger a Wayland portal)
if (targetAppName && targetAppName !== 'none') {
const linked = await window.electronAPI.linkAppAudio(targetAppName);
if (linked) {
// Now that the pipewire graph is linked, we just need to read from our Virtual Mic sink!
// Chromium registers this as a standard Input device
const devices = await navigator.mediaDevices.enumerateDevices();
const virtMic = devices.find(d => d.kind === 'audioinput' && d.label.toLowerCase().includes('simplescreenshare'));
if (virtMic) {
const audioStream = await navigator.mediaDevices.getUserMedia({
audio: {
deviceId: { exact: virtMic.deviceId },
echoCancellation: false,
autoGainControl: false,
noiseSuppression: false,
googAutoGainControl: false,
googEchoCancellation: false,
googNoiseSuppression: false,
googHighpassFilter: false,
channelCount: 2,
sampleRate: 48000
},
video: false
});
stream.addTrack(audioStream.getAudioTracks()[0]);
} else {
console.warn("Virtual mic device not found in navigator enumeration");
}
} else {
alert("Failed to link application audio. Broadcasting video only.");
const audioStream = await getAudioStream(targetAppName, videoSourceId);
if (audioStream) {
stream.addTrack(audioStream.getAudioTracks()[0]);
}
}
@@ -193,6 +248,7 @@ startBtn.addEventListener('click', async () => {
startBtn.style.display = 'none';
stopBtn.style.display = 'inline-block';
statsPanel.style.display = 'block';
if (viewerCountDiv) viewerCountDiv.style.display = 'block';
statusText.innerText = `Broadcasting to ${url}`;
// Auto stop if user closes the requested window
@@ -221,6 +277,7 @@ function connectAndBroadcast(url, password) {
const peerConnection = new RTCPeerConnection(config);
peerConnections[id] = peerConnection;
updateViewerCount();
activeStream.getTracks().forEach(track => {
const sender = peerConnection.addTrack(track, activeStream);
@@ -297,6 +354,7 @@ function connectAndBroadcast(url, password) {
if (peerConnections[id]) {
peerConnections[id].close();
delete peerConnections[id];
updateViewerCount();
}
});
}
@@ -314,14 +372,71 @@ function stopSharing() {
peerConnections = {};
localVideo.style.display = 'none';
const placeholder = document.getElementById('videoPlaceholder');
if (placeholder) placeholder.style.display = 'block';
statsPanel.style.display = 'none';
startBtn.style.display = 'inline-block';
stopBtn.style.display = 'none';
statusText.innerText = 'Not Broadcasting';
if (viewerCountDiv) {
viewerCountDiv.style.display = 'none';
viewerCountDiv.innerText = 'Viewers: 0';
}
if (bitrateChart) {
bitrateChart.destroy();
bitrateChart = null;
}
}
stopBtn.addEventListener('click', stopSharing);
// --- Dynamic Audio Switching ---
audioSelect.addEventListener('change', async () => {
if (!activeStream) return; // ignore if not actively broadcasting
const targetAppName = audioSelect.value;
try {
const newAudioStream = await getAudioStream(targetAppName, selectedVideoSourceId);
const newAudioTrack = newAudioStream ? newAudioStream.getAudioTracks()[0] : null;
// Remove old track from local active stream
const oldAudioTracks = activeStream.getAudioTracks();
if (oldAudioTracks.length > 0) {
oldAudioTracks.forEach(t => {
t.stop();
activeStream.removeTrack(t);
});
}
// Add new track
if (newAudioTrack) {
activeStream.addTrack(newAudioTrack);
}
// Directly hot-swap the audio track on all established WebRTC connections
Object.values(peerConnections).forEach(pc => {
const sender = pc.getSenders().find(s => s.track && s.track.kind === 'audio');
// `replaceTrack` allows hot-swapping without renegotiation!
// If newAudioTrack is null (No Audio), replacing with null mutes the stream nicely.
if (sender) {
sender.replaceTrack(newAudioTrack || null).catch(e => console.error("replaceTrack error:", e));
} else if (newAudioTrack) {
// Edge case: if the broadcast was originally started with 'No Audio',
// there's no audio transceiver created yet!
// We'd have to trigger renegotiation to add one, which acts as a restart.
console.warn("Cannot add audio dynamically to a stream that started with 'No Audio'. Please restart the broadcast.");
alert("Cannot swap to audio mid-stream if the broadcast started with 'No Audio'. Please stop and restart.");
}
});
} catch (e) {
console.error("Failed to switch audio dynamically:", e);
}
});
// --- Stats Monitoring Loop ---
let lastBytesSent = 0;
let lastTimestamp = 0;
@@ -329,12 +444,61 @@ let lastTimestamp = 0;
setInterval(async () => {
if (!activeStream || Object.keys(peerConnections).length === 0) return;
// Initialize chart if not present
if (!bitrateChart) {
const ctx = document.getElementById('bitrateChart').getContext('2d');
bitrateChart = new Chart(ctx, {
type: 'line',
data: {
labels: Array(20).fill(''),
datasets: [{
label: 'Bitrate (kbps)',
data: Array(20).fill(0),
borderColor: '#aaaaaa',
backgroundColor: 'rgba(170, 170, 170, 0.1)',
borderWidth: 2,
fill: true,
tension: 0.4,
pointRadius: 0
}]
},
options: {
responsive: true,
maintainAspectRatio: false,
animation: false,
plugins: {
legend: { display: false }
},
scales: {
x: { display: false },
y: {
display: true,
position: 'right',
ticks: { color: '#94a3b8', font: { size: 10 } },
grid: { color: 'rgba(255,255,255,0.05)' }
}
}
}
});
}
// Get stats from the first active peer connection
const pc = Object.values(peerConnections)[0];
if (!pc) return;
try {
const stats = await pc.getStats();
let videoCodec = 'Unknown';
let audioCodec = 'Unknown';
// Scan for codec objects globally
stats.forEach(report => {
if (report.type === 'codec') {
if (report.mimeType.toLowerCase().includes('video')) videoCodec = report.mimeType.split('/')[1] || report.mimeType;
if (report.mimeType.toLowerCase().includes('audio')) audioCodec = report.mimeType.split('/')[1] || report.mimeType;
}
});
stats.forEach(report => {
if (report.type === 'outbound-rtp' && report.kind === 'video') {
const fps = report.framesPerSecond || 0;
@@ -355,14 +519,52 @@ setInterval(async () => {
document.getElementById('statsFps').innerText = fps;
document.getElementById('statsRes').innerText = res;
document.getElementById('statsBitrate').innerText = bitrate + ' kbps';
document.getElementById('statsVideoCodec').innerText = videoCodec;
// Update chart
if (bitrateChart) {
bitrateChart.data.datasets[0].data.shift();
bitrateChart.data.datasets[0].data.push(bitrate);
bitrateChart.update();
}
} else if (report.type === 'outbound-rtp' && report.kind === 'audio') {
document.getElementById('statsAudioCodec').innerText = audioCodec;
}
});
} catch (e) { console.error("Stats error", e); }
}, 1000);
// Initial load of sources & config
// Initial load: config + audio apps only (no portal prompt on startup)
window.electronAPI.getConfig().then(cfg => {
if (cfg.serverUrl) serverUrlInput.value = cfg.serverUrl;
if (cfg.serverPassword) serverPasswordInput.value = cfg.serverPassword;
});
getSourcesBtn.click();
// Fetch audio applications on startup (this only reads PipeWire, no Wayland portal)
(async () => {
try {
const audioApps = await window.electronAPI.getAudioApps();
audioSelect.innerHTML = '<option value="none">No Audio (Video Only)</option>';
const allDesktopOption = document.createElement('option');
allDesktopOption.value = 'all_desktop';
allDesktopOption.text = 'All Desktop Audio (System Default)';
audioSelect.appendChild(allDesktopOption);
audioApps.forEach(app => {
const option = document.createElement('option');
option.value = app.name;
option.text = `${app.name} (${app.mediaName})`;
audioSelect.appendChild(option);
});
} catch (e) {
console.error('Failed to load audio apps:', e);
audioSelect.innerHTML = '<option value="none">No Audio (Video Only)</option>';
}
// Show the source grid as ready (user can optionally click "Select Sources" for thumbnails)
sourcesGrid.innerHTML = '<div style="color:var(--text-secondary); width:100%; text-align:center; padding:1rem;">Click "Start Broadcast" to select a source, or use "Select Sources" for thumbnails.</div>';
// Start button is always enabled — source selection happens via the portal
startBtn.disabled = false;
})();