before its fucked again

This commit is contained in:
2026-02-23 05:31:33 +01:00
parent 831d76c10e
commit 0d7a51ddcd
8 changed files with 686 additions and 137 deletions

View File

@@ -7,45 +7,77 @@
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;600;800&display=swap" rel="stylesheet">
<style>
:root {
--bg-color: #0f172a;
--text-primary: #f8fafc;
--text-secondary: #94a3b8;
--accent-color: #3b82f6;
--glass-bg: rgba(30, 41, 59, 0.7);
--glass-border: rgba(255, 255, 255, 0.1);
--bg-color: #000000;
--text-primary: #ffffff;
--text-secondary: #aaaaaa;
--accent-color: #555555;
--glass-bg: #111111;
--glass-border: #333333;
}
body {
font-family: 'Inter', sans-serif;
background-color: var(--bg-color);
color: var(--text-primary);
margin: 0;
padding: 2rem;
height: 100vh;
display: flex;
flex-direction: column;
overflow: hidden; /* Prevent body scroll */
}
.header {
padding: 1.5rem 2rem 0.5rem 2rem;
}
h1 { margin: 0; font-size: 1.8rem; }
.main-content {
display: flex;
flex: 1;
padding: 1rem 2rem 2rem 2rem;
gap: 2rem;
overflow: hidden;
}
.controls {
flex: 0 0 350px;
background: var(--glass-bg);
border: 1px solid var(--glass-border);
padding: 1.5rem;
display: flex;
flex-direction: column;
overflow-y: auto;
}
.preview-container {
flex: 1;
background: var(--glass-bg);
border: 1px solid var(--glass-border);
padding: 1.5rem;
display: flex;
flex-direction: column;
align-items: center;
}
h1 { margin-top: 0; }
.controls {
background: var(--glass-bg);
border: 1px solid var(--glass-border);
border-radius: 12px;
padding: 2rem;
.preview-header {
width: 100%;
max-width: 600px;
text-align: center;
margin-bottom: 2rem;
margin-bottom: 1rem;
}
.preview-header h3 {
margin: 0;
color: var(--text-primary);
}
input, button, select {
font-family: inherit;
padding: 0.8rem 1rem;
border-radius: 8px;
border: 1px solid var(--glass-border);
margin-bottom: 1rem;
width: 90%;
width: 100%;
box-sizing: border-box;
font-size: 1rem;
}
input, select {
background: rgba(0,0,0,0.2);
background: rgba(0,0,0,0.5);
color: white;
outline: none;
}
@@ -57,120 +89,219 @@
border: none;
transition: all 0.2s;
}
button:hover { background: #2563eb; transform: translateY(-2px); }
button:disabled { background: #475569; cursor: not-allowed; transform: none; }
button:hover { background: #777777; transform: translateY(-2px); }
button:disabled { background: #333333; cursor: not-allowed; transform: none; }
.video-wrapper {
flex: 1;
width: 100%;
background: #000;
display: flex;
align-items: center;
justify-content: center;
overflow: hidden;
position: relative;
}
video {
width: 100%;
max-width: 800px;
border-radius: 12px;
background: #000;
height: 100%;
object-fit: contain;
display: none;
}
.status { color: var(--text-secondary); margin-bottom: 1rem; }
.video-placeholder {
color: var(--text-secondary);
text-align: center;
position: absolute;
}
.status { color: var(--text-secondary); margin-bottom: 1rem; text-align: center; }
.label {
display: block;
text-align: left;
width: 90%;
margin: 0 auto 0.5rem auto;
margin: 0 0 0.5rem 0;
color: var(--text-secondary);
font-size: 0.9rem;
}
.section-header {
display: flex;
justify-content: space-between;
align-items: center;
margin: 1.5rem 0 1rem 0;
}
.section-header h3 { margin: 0; font-size: 1.1rem; }
.section-header button { width: auto; margin: 0; padding: 0.4rem 0.8rem; font-size: 0.85rem;}
/* Source Grid */
.sources-grid {
display: grid;
grid-template-columns: repeat(auto-fill, minmax(140px, 1fr));
gap: 15px;
width: 90%;
grid-template-columns: repeat(auto-fill, minmax(130px, 1fr));
gap: 10px;
max-height: 250px;
overflow-y: auto;
margin: 0 auto 1.5rem auto;
margin: 0 0 1.5rem 0;
padding: 10px;
background: rgba(0,0,0,0.2);
border-radius: 8px;
background: rgba(0,0,0,0.5);
border: 1px solid var(--glass-border);
}
/* Single Item Override */
.sources-grid.single-item {
display: flex;
justify-content: center;
align-items: center;
}
.sources-grid.single-item .source-item {
width: 250px;
min-width: 250px;
flex-shrink: 0;
box-sizing: border-box;
}
.source-item {
cursor: pointer;
border-radius: 6px;
padding: 8px;
background: rgba(255,255,255,0.05);
transition: all 0.2s ease;
text-align: center;
border: 2px solid transparent;
display: flex;
flex-direction: column;
}
.source-item:hover {
background: rgba(255,255,255,0.1);
}
.source-item.selected {
background: rgba(59, 130, 246, 0.2);
border-color: var(--accent-color);
background: rgba(255, 255, 255, 0.2);
border-color: var(--text-primary);
}
.source-item img {
width: 100%;
border-radius: 4px;
margin-bottom: 8px;
object-fit: cover;
margin-bottom: 4px;
object-fit: contain;
aspect-ratio: 16/9;
background: #000;
}
.source-item span {
display: block;
font-size: 0.85rem;
font-size: 0.8rem;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
color: #e2e8f0;
line-height: 1.2;
padding-top: 4px;
}
/* Stats Panel */
.stats-panel {
background: rgba(0,0,0,0.4);
border-radius: 8px;
background: rgba(0,0,0,0.7);
padding: 1rem;
width: 90%;
margin: 1rem auto;
margin-top: 1rem;
text-align: left;
display: none;
font-family: monospace;
font-size: 1.1rem;
color: #10b981; /* green text */
font-size: 0.95rem;
color: #aaaaaa; /* grey text */
}
/* Custom Scrollbar */
::-webkit-scrollbar {
width: 8px;
height: 8px;
}
::-webkit-scrollbar-track {
background: var(--glass-bg);
border-left: 1px solid var(--glass-border);
}
::-webkit-scrollbar-thumb {
background: var(--accent-color);
}
::-webkit-scrollbar-thumb:hover {
background: #777777;
}
/* Responsive Layout */
@media (max-width: 900px) {
body {
overflow-y: auto;
height: auto;
}
.main-content {
flex-direction: column;
overflow: visible;
padding: 1rem;
}
.controls {
flex: none;
width: 100%;
overflow: visible;
box-sizing: border-box;
}
.preview-container {
min-height: 400px;
}
}
</style>
</head>
<body>
<div class="header">
<h1>Broadcaster Client</h1>
</div>
<div class="main-content">
<!-- Left Column: Controls -->
<div class="controls" id="controlsPanel">
<label class="label">Server Connection</label>
<input type="text" id="serverUrl" placeholder="Server URL (e.g. http://localhost:3000)" value="http://localhost:3000">
<input type="text" id="serverUrl" placeholder="URL (e.g. http://localhost:3000)" value="http://localhost:3000">
<input type="password" id="serverPassword" placeholder="Stream Password">
<div style="display:flex; justify-content:space-between; align-items:center; width: 90%; margin: 10px auto;">
<h3 style="margin:0;">Media Sources</h3>
<button id="getSourcesBtn" style="width:auto; margin:0; padding: 0.4rem 0.8rem;">Refresh Devices</button>
<div class="section-header">
<h3>Media Sources</h3>
<button id="getSourcesBtn">Select new Source</button>
</div>
<label class="label">Visual Source (Screen/Window)</label>
<div id="sourcesGrid" class="sources-grid"></div>
<label class="label">Audio Source (Microphone/Pipewire Virtual Sinks)</label>
<label class="label">Audio Source (Microphone/Virtual Sinks)</label>
<select id="audioSelect"></select>
<button id="startBtn" disabled style="margin-top: 1.5rem;">Start Broadcast</button>
<div style="margin-top: auto;">
<button id="startBtn" disabled style="margin-bottom: 0.5rem;">Start Broadcast</button>
<div class="status" id="statusText">Not Broadcasting</div>
<button id="stopBtn" style="display:none; background:#ef4444;">Stop Broadcast</button>
<div class="status" id="viewerCount" style="display:none; font-weight: bold; color: var(--text-primary);">Viewers: 0</div>
<button id="stopBtn" style="display:none; background:#444444; margin-bottom: 0.5rem;">Stop Broadcast</button>
</div>
<div class="stats-panel" id="statsPanel">
<div><strong>Resolution:</strong> <span id="statsRes">0x0</span></div>
<div><strong>Res:</strong> <span id="statsRes">0x0</span></div>
<div><strong>FPS:</strong> <span id="statsFps">0</span></div>
<div><strong>Upstream:</strong> <span id="statsBitrate">0 kbps</span></div>
<div><strong>Up:</strong> <span id="statsBitrate">0 kbps</span></div>
<div><strong>V-Codec:</strong> <span id="statsVideoCodec">...</span></div>
<div><strong>A-Codec:</strong> <span id="statsAudioCodec">...</span></div>
<div style="margin-top: 10px; height: 100px;">
<canvas id="bitrateChart"></canvas>
</div>
</div>
</div>
<!-- Right Column: Big Preview -->
<div class="preview-container">
<div class="preview-header">
</div>
<div class="video-wrapper">
<div id="videoPlaceholder" class="video-placeholder"></div>
<video id="localVideo" autoplay playsinline muted></video>
</div>
</div>
</div>
<!-- Use socket.io client script locally installed via npm -->
<script src="./node_modules/socket.io-client/dist/socket.io.js"></script>
<script src="./node_modules/chart.js/dist/chart.umd.js"></script>
<script src="renderer.js"></script>
</body>
</html>

View File

@@ -44,11 +44,35 @@ app.on('window-all-closed', () => {
// Handle IPC request from renderer to get screen/audio sources
ipcMain.handle('get-sources', async () => {
const inputSources = await desktopCapturer.getSources({
let inputSources = await desktopCapturer.getSources({
types: ['window', 'screen'],
fetchWindowIcons: true
});
// Wayland Workaround: If we only see generic "WebRTC PipeWire capturer" windows,
// try to fetch real window titles via our python helper
try {
const genericNames = ['webrtc pipewire capturer', 'screen 1', 'screen 2'];
const hasGeneric = inputSources.some(s => genericNames.includes(s.name.toLowerCase()));
if (hasGeneric || inputSources.length === 1) {
const { execSync } = require('child_process');
const pyPath = path.join(__dirname, 'wayland-helper.py');
const out = execSync(`python3 ${pyPath}`, { timeout: 2000 }).toString();
const waylandWindows = JSON.parse(out);
if (waylandWindows && waylandWindows.length > 0) {
// If we only have 1 capturer source (common on Wayland compositors),
// rename it to the first active window title we found to be helpful.
if (inputSources.length === 1 && waylandWindows[0].title) {
inputSources[0].name = waylandWindows[0].title;
}
}
}
} catch (e) {
console.error("Wayland helper failed:", e.message);
}
return inputSources.map(source => ({
id: source.id,
name: source.name,
@@ -65,6 +89,10 @@ ipcMain.handle('link-app-audio', async (event, appName) => {
return await PipewireHelper.linkApplicationToMic(appName);
});
ipcMain.handle('link-monitor-audio', async () => {
return await PipewireHelper.linkMonitorToMic();
});
// Handle saving and loading the config.json profile
const fs = require('fs');
const configPath = path.join(__dirname, 'config.json');

View File

@@ -9,6 +9,7 @@
"version": "1.0.0",
"license": "ISC",
"dependencies": {
"chart.js": "^4.5.1",
"electron": "^40.6.0",
"socket.io-client": "^4.8.3"
}
@@ -34,6 +35,12 @@
"global-agent": "^3.0.0"
}
},
"node_modules/@kurkle/color": {
"version": "0.3.4",
"resolved": "https://registry.npmjs.org/@kurkle/color/-/color-0.3.4.tgz",
"integrity": "sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w==",
"license": "MIT"
},
"node_modules/@sindresorhus/is": {
"version": "4.6.0",
"resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz",
@@ -163,6 +170,18 @@
"node": ">=8"
}
},
"node_modules/chart.js": {
"version": "4.5.1",
"resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.5.1.tgz",
"integrity": "sha512-GIjfiT9dbmHRiYi6Nl2yFCq7kkwdkp1W/lp2J99rX0yo9tgJGn3lKQATztIjb5tVtevcBtIdICNWqlq5+E8/Pw==",
"license": "MIT",
"dependencies": {
"@kurkle/color": "^0.3.0"
},
"engines": {
"pnpm": ">=8"
}
},
"node_modules/clone-response": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.3.tgz",

View File

@@ -11,6 +11,7 @@
"license": "ISC",
"type": "commonjs",
"dependencies": {
"chart.js": "^4.5.1",
"electron": "^40.6.0",
"socket.io-client": "^4.8.3"
}

View File

@@ -12,7 +12,7 @@ class PipewireHelper {
try {
await execAsync(`pw-cli destroy ${VIRT_MIC_NAME}`).catch(() => {}); // Cleanup old
const cmd = `pw-cli create-node adapter '{ factory.name=support.null-audio-sink node.name=${VIRT_MIC_NAME} node.description="SimpleScreenshare Audio" media.class=Audio/Source/Virtual object.linger=1 audio.position=[FL,FR] }'`;
const cmd = `pw-cli create-node adapter \'{ factory.name=support.null-audio-sink node.name=${VIRT_MIC_NAME} node.description=\"SimpleScreenshare Audio\" media.class=Audio/Source/Virtual object.linger=1 audio.position=[FL,FR] audio.rate=48000 audio.channels=2 }\'`;
const { stdout } = await execAsync(cmd);
console.log("Created virtual mic:", stdout.trim());
@@ -79,6 +79,47 @@ class PipewireHelper {
}
}
// Remove all existing links TO the virtual mic's input ports
// This prevents echo from stale connections when switching audio sources
static async unlinkAllFromMic() {
try {
// IMPORTANT: Use `pw-link -l` NOT `pw-link -l -I` — the -I flag hangs when piped
const { stdout } = await execAsync(`pw-link -l`, { maxBuffer: 1024 * 1024, timeout: 3000 }).catch(() => ({ stdout: '' }));
if (!stdout) return;
const lines = stdout.split('\n');
// pw-link -l format:
// alsa_output...:monitor_FL (source port - NOT indented)
// |-> simplescreenshare-audio:input_FL (outgoing link - indented with |->)
let currentSourcePort = null;
for (const line of lines) {
if (!line.trim()) continue;
// Non-indented line = port declaration
if (!line.startsWith(' ')) {
currentSourcePort = line.trim();
continue;
}
// Indented line with |-> targeting our virtual mic
const trimmed = line.trim();
if (trimmed.startsWith('|->') && (trimmed.includes(`${VIRT_MIC_NAME}:input_`) || trimmed.includes('SimpleScreenshare Audio:input_'))) {
const targetPort = trimmed.replace('|->', '').trim();
if (currentSourcePort && targetPort) {
console.log(`Unlinking: "${currentSourcePort}" -> "${targetPort}"`);
await execAsync(`pw-link -d "${currentSourcePort}" "${targetPort}"`).catch(e =>
console.log("pw-link unlink:", e.message)
);
}
}
}
} catch (error) {
console.error("Failed to unlink from mic:", error);
}
}
// Link a target application's output to our Virtual Microphone
static async linkApplicationToMic(targetAppName) {
try {
@@ -108,6 +149,9 @@ class PipewireHelper {
console.log(`Linking ${targetAppName} (ID: ${targetNode.id}) to ${VIRT_MIC_NAME} (ID: ${micNode.id})`);
// Clean up any existing links to prevent echo from stale connections
await this.unlinkAllFromMic();
// 4. Find the Ports for both nodes
const ports = dump.filter(n => n.type === 'PipeWire:Interface:Port');
@@ -143,6 +187,73 @@ class PipewireHelper {
return false;
}
}
// Link the system's default audio output monitor to our Virtual Microphone
// This captures ALL desktop audio cleanly via Pipewire without Chromium's broken desktop audio capture
static async linkMonitorToMic() {
try {
const { stdout: dumpOut } = await execAsync('pw-dump', { maxBuffer: 1024 * 1024 * 50 });
const dump = JSON.parse(dumpOut);
// Find the default audio sink (the system's main output)
const sinkNode = dump.find(node =>
node.info &&
node.info.props &&
node.info.props['media.class'] === 'Audio/Sink' &&
(node.info.props['node.name'] || '').includes('output')
);
// Find our virtual mic node
const micNode = dump.find(node =>
node.info &&
node.info.props &&
node.info.props['node.name'] === VIRT_MIC_NAME
);
if (!sinkNode || !micNode) {
console.error("Could not find default sink or virtual mic node");
return false;
}
console.log(`Linking system monitor (ID: ${sinkNode.id}) to ${VIRT_MIC_NAME} (ID: ${micNode.id})`);
// Clean up any existing links to prevent echo from stale connections
await this.unlinkAllFromMic();
const ports = dump.filter(n => n.type === 'PipeWire:Interface:Port');
// The monitor ports on a sink are "output" direction (they output what the sink is playing)
const sinkMonitorPorts = ports.filter(p =>
p.info.props['node.id'] === sinkNode.id && p.info.direction === 'output'
);
const sinkFL = sinkMonitorPorts.find(p => p.info.props['audio.channel'] === 'FL');
const sinkFR = sinkMonitorPorts.find(p => p.info.props['audio.channel'] === 'FR');
const micPorts = ports.filter(p => p.info.props['node.id'] === micNode.id && p.info.direction === 'input');
const micFL = micPorts.find(p => p.info.props['audio.channel'] === 'FL');
const micFR = micPorts.find(p => p.info.props['audio.channel'] === 'FR');
if (!sinkFL || !sinkFR || !micFL || !micFR) {
console.error("Could not find stereo monitor/mic ports for linking");
return false;
}
const sinkFlAlias = sinkFL.info.props['port.alias'] || sinkFL.info.props['object.path'] || sinkFL.id;
const sinkFrAlias = sinkFR.info.props['port.alias'] || sinkFR.info.props['object.path'] || sinkFR.id;
const micFlAlias = micFL.info.props['port.alias'] || micFL.info.props['object.path'] || micFL.id;
const micFrAlias = micFR.info.props['port.alias'] || micFR.info.props['object.path'] || micFR.id;
await execAsync(`pw-link "${sinkFlAlias}" "${micFlAlias}"`).catch(e => console.log("pw-link output:", e.message));
await execAsync(`pw-link "${sinkFrAlias}" "${micFrAlias}"`).catch(e => console.log("pw-link output:", e.message));
console.log("Successfully linked system monitor audio.");
return true;
} catch (error) {
console.error("Failed to link monitor:", error);
return false;
}
}
}
module.exports = PipewireHelper;

View File

@@ -4,6 +4,7 @@ contextBridge.exposeInMainWorld('electronAPI', {
getSources: () => ipcRenderer.invoke('get-sources'),
getAudioApps: () => ipcRenderer.invoke('get-audio-apps'),
linkAppAudio: (appName) => ipcRenderer.invoke('link-app-audio', appName),
linkMonitorAudio: () => ipcRenderer.invoke('link-monitor-audio'),
getConfig: () => ipcRenderer.invoke('get-config'),
saveConfig: (config) => ipcRenderer.invoke('save-config', config)
});

View File

@@ -8,12 +8,22 @@ const stopBtn = document.getElementById('stopBtn');
const localVideo = document.getElementById('localVideo');
const statusText = document.getElementById('statusText');
const statsPanel = document.getElementById('statsPanel');
const viewerCountDiv = document.getElementById('viewerCount');
function updateViewerCount() {
if (viewerCountDiv) {
viewerCountDiv.innerText = `Viewers: ${Object.keys(peerConnections).length}`;
}
}
let socket;
let peerConnections = {};
let activeStream;
let selectedVideoSourceId = null;
// Chart.js instance tracking
let bitrateChart = null;
const config = {
iceServers: [
{ urls: "stun:localhost:3478" },
@@ -25,7 +35,6 @@ const config = {
// Also enumerate native audio devices from navigator!
getSourcesBtn.addEventListener('click', async () => {
sourcesGrid.innerHTML = '<div style="color:var(--text-secondary); width:100%;">Loading sources...</div>';
audioSelect.innerHTML = '<option value="">Loading audio devices...</option>';
startBtn.disabled = true;
selectedVideoSourceId = null;
@@ -41,8 +50,9 @@ getSourcesBtn.addEventListener('click', async () => {
img.src = source.thumbnail;
const label = document.createElement('span');
label.innerText = source.name;
label.title = source.name;
// source.name usually contains the application name
label.innerText = source.name || `Screen ${source.id}`;
label.title = source.name || `Screen ${source.id}`;
item.appendChild(img);
item.appendChild(label);
@@ -58,23 +68,26 @@ getSourcesBtn.addEventListener('click', async () => {
sourcesGrid.appendChild(item);
});
// --- Fetch Application Audio Sources via built Pipewire Helper ---
const audioApps = await window.electronAPI.getAudioApps();
audioSelect.innerHTML = '<option value="none">No Audio (Video Only)</option>';
audioApps.forEach(app => {
const option = document.createElement('option');
// We pass the actual application name into the value so the main process can find it via pw-dump
option.value = app.name;
option.text = `${app.name} (${app.mediaName})`;
audioSelect.appendChild(option);
});
// Add custom formatting if there's only one item (like on Wayland)
if (sources.length === 1) {
sourcesGrid.classList.add('single-item');
// On Wayland with a single source, just auto-select it WITHOUT calling startPreview.
// startPreview triggers another getUserMedia which opens a SECOND Wayland portal dialog.
// The thumbnail already shows what the source looks like.
selectedVideoSourceId = sources[0].id;
sourcesGrid.firstChild.classList.add('selected');
startBtn.disabled = false;
} else {
sourcesGrid.classList.remove('single-item');
}
// If we don't disable start button here, it would be enabled before user clicked a grid item
// Ensure start button remains disabled if no source was auto-selected
if (!selectedVideoSourceId) {
startBtn.disabled = true;
}
} catch (e) {
console.error(e);
sourcesGrid.innerHTML = '<div style="color:red; width:100%;">Error loading sources</div>';
audioSelect.innerHTML = '<option value="none">Error loading audio</option>';
}
});
@@ -90,6 +103,8 @@ async function startPreview(videoSourceId) {
if (!videoSourceId) {
localVideo.style.display = 'none';
const placeholder = document.getElementById('videoPlaceholder');
if (placeholder) placeholder.style.display = 'block';
return;
}
@@ -110,11 +125,74 @@ async function startPreview(videoSourceId) {
localVideo.srcObject = previewStream;
localVideo.style.display = 'block';
const placeholder = document.getElementById('videoPlaceholder');
if (placeholder) placeholder.style.display = 'none';
} catch (e) {
console.error("Failed to start preview stream:", e);
}
}
// --- Audio Capture Helper ---
async function getAudioStream(targetAppName, videoSourceId) {
if (!targetAppName || targetAppName === 'none') return null;
if (targetAppName === 'all_desktop') {
// Use Pipewire to link the system's default audio output monitor to our virtual mic.
// This avoids Chromium's broken chromeMediaSource desktop audio which causes echoing
// and double Wayland ScreenCast portal prompts.
const linked = await window.electronAPI.linkMonitorAudio();
if (linked) {
const devices = await navigator.mediaDevices.enumerateDevices();
const virtMic = devices.find(d => d.kind === 'audioinput' && d.label.toLowerCase().includes('simplescreenshare'));
if (virtMic) {
return await navigator.mediaDevices.getUserMedia({
audio: {
deviceId: { exact: virtMic.deviceId },
echoCancellation: { exact: false },
autoGainControl: { exact: false },
noiseSuppression: { exact: false },
channelCount: 2,
sampleRate: 48000
},
video: false
});
} else {
console.warn("Virtual mic device not found for monitor capture");
}
} else {
console.warn("Failed to link system monitor audio.");
}
return null;
}
// Application specific (Pipewire)
const linked = await window.electronAPI.linkAppAudio(targetAppName);
if (linked) {
const devices = await navigator.mediaDevices.enumerateDevices();
const virtMic = devices.find(d => d.kind === 'audioinput' && d.label.toLowerCase().includes('simplescreenshare'));
if (virtMic) {
return await navigator.mediaDevices.getUserMedia({
audio: {
deviceId: { exact: virtMic.deviceId },
echoCancellation: { exact: false },
autoGainControl: { exact: false },
noiseSuppression: { exact: false },
channelCount: 2,
sampleRate: 48000
},
video: false
});
} else {
console.warn("Virtual mic device not found in navigator enumeration");
}
} else {
console.warn("Failed to link application audio.");
}
return null;
}
// 2. Start Broadcast
startBtn.addEventListener('click', async () => {
const url = serverUrlInput.value;
@@ -122,8 +200,8 @@ startBtn.addEventListener('click', async () => {
const videoSourceId = selectedVideoSourceId;
const targetAppName = audioSelect.value;
if (!videoSourceId || !url || !password) {
alert("Please fill out URL, Password, and select a visual source.");
if (!url || !password) {
alert("Please fill out URL and Password.");
return;
}
@@ -131,56 +209,33 @@ startBtn.addEventListener('click', async () => {
window.electronAPI.saveConfig({ serverUrl: url, serverPassword: password });
try {
// Stop the preview grab so we can grab the real stream cleanly
// Reuse the preview stream if available, otherwise create a new one.
// On Wayland, this is typically the ONLY portal prompt since we skip getSources on startup.
let stream;
if (previewStream) {
previewStream.getTracks().forEach(t => t.stop());
stream = previewStream;
previewStream = null;
} else {
// Build video constraints — omit chromeMediaSourceId if no source was pre-selected.
// On Wayland this lets the portal handle source selection.
const videoMandatory = { chromeMediaSource: 'desktop' };
if (selectedVideoSourceId) {
videoMandatory.chromeMediaSourceId = selectedVideoSourceId;
}
const stream = await navigator.mediaDevices.getUserMedia({
stream = await navigator.mediaDevices.getUserMedia({
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: videoSourceId,
}
}
video: { mandatory: videoMandatory }
});
}
const videoTrack = stream.getVideoTracks()[0];
await videoTrack.applyConstraints({ frameRate: { ideal: 60 } });
if (videoTrack) await videoTrack.applyConstraints({ frameRate: { ideal: 60 } });
// If user selected an application, grab the Virtual Mic input and link the app to it!
// Add audio if requested (virtual mic capture does NOT trigger a Wayland portal)
if (targetAppName && targetAppName !== 'none') {
const linked = await window.electronAPI.linkAppAudio(targetAppName);
if (linked) {
// Now that the pipewire graph is linked, we just need to read from our Virtual Mic sink!
// Chromium registers this as a standard Input device
const devices = await navigator.mediaDevices.enumerateDevices();
const virtMic = devices.find(d => d.kind === 'audioinput' && d.label.toLowerCase().includes('simplescreenshare'));
if (virtMic) {
const audioStream = await navigator.mediaDevices.getUserMedia({
audio: {
deviceId: { exact: virtMic.deviceId },
echoCancellation: false,
autoGainControl: false,
noiseSuppression: false,
googAutoGainControl: false,
googEchoCancellation: false,
googNoiseSuppression: false,
googHighpassFilter: false,
channelCount: 2,
sampleRate: 48000
},
video: false
});
const audioStream = await getAudioStream(targetAppName, videoSourceId);
if (audioStream) {
stream.addTrack(audioStream.getAudioTracks()[0]);
} else {
console.warn("Virtual mic device not found in navigator enumeration");
}
} else {
alert("Failed to link application audio. Broadcasting video only.");
}
}
@@ -193,6 +248,7 @@ startBtn.addEventListener('click', async () => {
startBtn.style.display = 'none';
stopBtn.style.display = 'inline-block';
statsPanel.style.display = 'block';
if (viewerCountDiv) viewerCountDiv.style.display = 'block';
statusText.innerText = `Broadcasting to ${url}`;
// Auto stop if user closes the requested window
@@ -221,6 +277,7 @@ function connectAndBroadcast(url, password) {
const peerConnection = new RTCPeerConnection(config);
peerConnections[id] = peerConnection;
updateViewerCount();
activeStream.getTracks().forEach(track => {
const sender = peerConnection.addTrack(track, activeStream);
@@ -297,6 +354,7 @@ function connectAndBroadcast(url, password) {
if (peerConnections[id]) {
peerConnections[id].close();
delete peerConnections[id];
updateViewerCount();
}
});
}
@@ -314,14 +372,71 @@ function stopSharing() {
peerConnections = {};
localVideo.style.display = 'none';
const placeholder = document.getElementById('videoPlaceholder');
if (placeholder) placeholder.style.display = 'block';
statsPanel.style.display = 'none';
startBtn.style.display = 'inline-block';
stopBtn.style.display = 'none';
statusText.innerText = 'Not Broadcasting';
if (viewerCountDiv) {
viewerCountDiv.style.display = 'none';
viewerCountDiv.innerText = 'Viewers: 0';
}
if (bitrateChart) {
bitrateChart.destroy();
bitrateChart = null;
}
}
stopBtn.addEventListener('click', stopSharing);
// --- Dynamic Audio Switching ---
audioSelect.addEventListener('change', async () => {
if (!activeStream) return; // ignore if not actively broadcasting
const targetAppName = audioSelect.value;
try {
const newAudioStream = await getAudioStream(targetAppName, selectedVideoSourceId);
const newAudioTrack = newAudioStream ? newAudioStream.getAudioTracks()[0] : null;
// Remove old track from local active stream
const oldAudioTracks = activeStream.getAudioTracks();
if (oldAudioTracks.length > 0) {
oldAudioTracks.forEach(t => {
t.stop();
activeStream.removeTrack(t);
});
}
// Add new track
if (newAudioTrack) {
activeStream.addTrack(newAudioTrack);
}
// Directly hot-swap the audio track on all established WebRTC connections
Object.values(peerConnections).forEach(pc => {
const sender = pc.getSenders().find(s => s.track && s.track.kind === 'audio');
// `replaceTrack` allows hot-swapping without renegotiation!
// If newAudioTrack is null (No Audio), replacing with null mutes the stream nicely.
if (sender) {
sender.replaceTrack(newAudioTrack || null).catch(e => console.error("replaceTrack error:", e));
} else if (newAudioTrack) {
// Edge case: if the broadcast was originally started with 'No Audio',
// there's no audio transceiver created yet!
// We'd have to trigger renegotiation to add one, which acts as a restart.
console.warn("Cannot add audio dynamically to a stream that started with 'No Audio'. Please restart the broadcast.");
alert("Cannot swap to audio mid-stream if the broadcast started with 'No Audio'. Please stop and restart.");
}
});
} catch (e) {
console.error("Failed to switch audio dynamically:", e);
}
});
// --- Stats Monitoring Loop ---
let lastBytesSent = 0;
let lastTimestamp = 0;
@@ -329,12 +444,61 @@ let lastTimestamp = 0;
setInterval(async () => {
if (!activeStream || Object.keys(peerConnections).length === 0) return;
// Initialize chart if not present
if (!bitrateChart) {
const ctx = document.getElementById('bitrateChart').getContext('2d');
bitrateChart = new Chart(ctx, {
type: 'line',
data: {
labels: Array(20).fill(''),
datasets: [{
label: 'Bitrate (kbps)',
data: Array(20).fill(0),
borderColor: '#aaaaaa',
backgroundColor: 'rgba(170, 170, 170, 0.1)',
borderWidth: 2,
fill: true,
tension: 0.4,
pointRadius: 0
}]
},
options: {
responsive: true,
maintainAspectRatio: false,
animation: false,
plugins: {
legend: { display: false }
},
scales: {
x: { display: false },
y: {
display: true,
position: 'right',
ticks: { color: '#94a3b8', font: { size: 10 } },
grid: { color: 'rgba(255,255,255,0.05)' }
}
}
}
});
}
// Get stats from the first active peer connection
const pc = Object.values(peerConnections)[0];
if (!pc) return;
try {
const stats = await pc.getStats();
let videoCodec = 'Unknown';
let audioCodec = 'Unknown';
// Scan for codec objects globally
stats.forEach(report => {
if (report.type === 'codec') {
if (report.mimeType.toLowerCase().includes('video')) videoCodec = report.mimeType.split('/')[1] || report.mimeType;
if (report.mimeType.toLowerCase().includes('audio')) audioCodec = report.mimeType.split('/')[1] || report.mimeType;
}
});
stats.forEach(report => {
if (report.type === 'outbound-rtp' && report.kind === 'video') {
const fps = report.framesPerSecond || 0;
@@ -355,14 +519,52 @@ setInterval(async () => {
document.getElementById('statsFps').innerText = fps;
document.getElementById('statsRes').innerText = res;
document.getElementById('statsBitrate').innerText = bitrate + ' kbps';
document.getElementById('statsVideoCodec').innerText = videoCodec;
// Update chart
if (bitrateChart) {
bitrateChart.data.datasets[0].data.shift();
bitrateChart.data.datasets[0].data.push(bitrate);
bitrateChart.update();
}
} else if (report.type === 'outbound-rtp' && report.kind === 'audio') {
document.getElementById('statsAudioCodec').innerText = audioCodec;
}
});
} catch (e) { console.error("Stats error", e); }
}, 1000);
// Initial load of sources & config
// Initial load: config + audio apps only (no portal prompt on startup)
window.electronAPI.getConfig().then(cfg => {
if (cfg.serverUrl) serverUrlInput.value = cfg.serverUrl;
if (cfg.serverPassword) serverPasswordInput.value = cfg.serverPassword;
});
getSourcesBtn.click();
// Fetch audio applications on startup (this only reads PipeWire, no Wayland portal)
(async () => {
try {
const audioApps = await window.electronAPI.getAudioApps();
audioSelect.innerHTML = '<option value="none">No Audio (Video Only)</option>';
const allDesktopOption = document.createElement('option');
allDesktopOption.value = 'all_desktop';
allDesktopOption.text = 'All Desktop Audio (System Default)';
audioSelect.appendChild(allDesktopOption);
audioApps.forEach(app => {
const option = document.createElement('option');
option.value = app.name;
option.text = `${app.name} (${app.mediaName})`;
audioSelect.appendChild(option);
});
} catch (e) {
console.error('Failed to load audio apps:', e);
audioSelect.innerHTML = '<option value="none">No Audio (Video Only)</option>';
}
// Show the source grid as ready (user can optionally click "Select Sources" for thumbnails)
sourcesGrid.innerHTML = '<div style="color:var(--text-secondary); width:100%; text-align:center; padding:1rem;">Click "Start Broadcast" to select a source, or use "Select Sources" for thumbnails.</div>';
// Start button is always enabled — source selection happens via the portal
startBtn.disabled = false;
})();

56
client/wayland-helper.py Executable file
View File

@@ -0,0 +1,56 @@
#!/usr/bin/env python3
import json
import subprocess
import os
def get_wayland_windows():
"""
Since Wayland aggressively isolates window metadata from standard utilities,
and KWin DBus scripts are restricted on this machine, we check for common
running GUI applications via `ps` to label the composite pipewire sink.
"""
windows = []
# Try XWayland fallback first
try:
wmctrl_out = subprocess.run(['wmctrl', '-l'], capture_output=True, text=True, timeout=1).stdout
for line in wmctrl_out.splitlines():
parts = line.split(maxsplit=3)
if len(parts) >= 4:
windows.append({"title": parts[3]})
except Exception:
pass
# Process scraping for common GUI apps on Wayland
try:
ps_out = subprocess.run(['ps', '-eo', 'comm='], capture_output=True, text=True).stdout
running_procs = ps_out.lower().splitlines()
common_apps = {
'spotify': 'Spotify',
'discord': 'Discord',
'chrome': 'Google Chrome',
'chromium': 'Chromium',
'firefox': 'Firefox',
'code': 'VS Code',
'obsidian': 'Obsidian',
'telegram': 'Telegram',
'slack': 'Slack',
'steam': 'Steam'
}
for proc, name in common_apps.items():
if proc in running_procs and not any(name in w["title"] for w in windows):
windows.append({"title": name})
except Exception:
pass
# If we found absolutely nothing, provide a generic fallback
if not windows:
windows.append({"title": "Wayland Desktop / App"})
print(json.dumps(windows))
if __name__ == '__main__':
get_wayland_windows()