690 lines
25 KiB
JavaScript
690 lines
25 KiB
JavaScript
// mediasoupClient is loaded via <script> tag in index.html (esbuild bundle)
|
|
const { Device } = mediasoupClient;
|
|
|
|
const serverUrlInput = document.getElementById('serverUrl');
|
|
const serverPasswordInput = document.getElementById('serverPassword');
|
|
const sourcesGrid = document.getElementById('sourcesGrid');
|
|
const audioSelect = document.getElementById('audioSelect');
|
|
const getSourcesBtn = document.getElementById('getSourcesBtn');
|
|
const startBtn = document.getElementById('startBtn');
|
|
const stopBtn = document.getElementById('stopBtn');
|
|
const localVideo = document.getElementById('localVideo');
|
|
const statusText = document.getElementById('statusText');
|
|
const statsPanel = document.getElementById('statsPanel');
|
|
const viewerCountDiv = document.getElementById('viewerCount');
|
|
const qualitySelect = document.getElementById('qualitySelect');
|
|
|
|
let socket;
|
|
let activeStream;
|
|
let selectedVideoSourceId = null;
|
|
|
|
// --- Mediasoup State ---
|
|
let device;
|
|
let sendTransport;
|
|
let videoProducer;
|
|
let audioProducer;
|
|
|
|
// Chart.js instance tracking
|
|
let bitrateChart = null;
|
|
|
|
// 1. Get Desktop Sources / Switch Video Source Mid-Stream
|
|
getSourcesBtn.addEventListener('click', async () => {
|
|
// --- Mid-Stream Video Source Switching ---
|
|
if (activeStream) {
|
|
try {
|
|
// On Wayland, the compositor limits concurrent ScreenCast sessions.
|
|
// We MUST stop the old session BEFORE requesting a new one.
|
|
activeStream.getVideoTracks().forEach(t => {
|
|
t.onended = null;
|
|
t.stop();
|
|
});
|
|
|
|
// Give the compositor time to tear down the old ScreenCast session
|
|
await new Promise(r => setTimeout(r, 1000));
|
|
|
|
const newStream = await navigator.mediaDevices.getUserMedia({
|
|
audio: false,
|
|
video: { mandatory: { chromeMediaSource: 'desktop' } }
|
|
});
|
|
|
|
const newVideoTrack = newStream.getVideoTracks()[0];
|
|
if (!newVideoTrack) return;
|
|
|
|
await newVideoTrack.applyConstraints({ frameRate: { ideal: 60 } });
|
|
|
|
// Swap the track in the active stream
|
|
if (activeStream.getVideoTracks()[0]) {
|
|
activeStream.removeTrack(activeStream.getVideoTracks()[0]);
|
|
}
|
|
activeStream.addTrack(newVideoTrack);
|
|
|
|
// Hot-swap on the mediasoup producer (no renegotiation needed!)
|
|
if (videoProducer) {
|
|
await videoProducer.replaceTrack({ track: newVideoTrack });
|
|
}
|
|
|
|
// Update local preview
|
|
localVideo.srcObject = activeStream;
|
|
newVideoTrack.onended = stopSharing;
|
|
} catch (e) {
|
|
console.error("Failed to switch video source:", e);
|
|
stopSharing();
|
|
}
|
|
return;
|
|
}
|
|
|
|
// --- Normal Source Selection (when not broadcasting) ---
|
|
sourcesGrid.innerHTML = '<div style="color:var(--text-secondary); width:100%;">Loading sources...</div>';
|
|
startBtn.disabled = true;
|
|
selectedVideoSourceId = null;
|
|
|
|
try {
|
|
const sources = await window.electronAPI.getSources();
|
|
sourcesGrid.innerHTML = '';
|
|
sources.forEach(source => {
|
|
const item = document.createElement('div');
|
|
item.className = 'source-item';
|
|
|
|
const img = document.createElement('img');
|
|
img.src = source.thumbnail;
|
|
|
|
const label = document.createElement('span');
|
|
label.innerText = source.name || `Screen ${source.id}`;
|
|
label.title = source.name || `Screen ${source.id}`;
|
|
|
|
item.appendChild(img);
|
|
item.appendChild(label);
|
|
|
|
item.addEventListener('click', () => {
|
|
document.querySelectorAll('.source-item').forEach(i => i.classList.remove('selected'));
|
|
item.classList.add('selected');
|
|
selectedVideoSourceId = source.id;
|
|
startPreview(source.id);
|
|
startBtn.disabled = false;
|
|
});
|
|
|
|
sourcesGrid.appendChild(item);
|
|
});
|
|
|
|
if (sources.length === 1) {
|
|
sourcesGrid.classList.add('single-item');
|
|
selectedVideoSourceId = sources[0].id;
|
|
sourcesGrid.firstChild.classList.add('selected');
|
|
startBtn.disabled = false;
|
|
} else {
|
|
sourcesGrid.classList.remove('single-item');
|
|
}
|
|
|
|
if (!selectedVideoSourceId) {
|
|
startBtn.disabled = true;
|
|
}
|
|
} catch (e) {
|
|
console.error(e);
|
|
sourcesGrid.innerHTML = '<div style="color:red; width:100%;">Error loading sources</div>';
|
|
}
|
|
});
|
|
|
|
// --- Preview Stream Logic ---
|
|
let previewStream = null;
|
|
|
|
async function startPreview(videoSourceId) {
|
|
if (previewStream) {
|
|
previewStream.getTracks().forEach(t => t.stop());
|
|
previewStream = null;
|
|
}
|
|
|
|
if (!videoSourceId) {
|
|
localVideo.style.display = 'none';
|
|
const placeholder = document.getElementById('videoPlaceholder');
|
|
if (placeholder) placeholder.style.display = 'block';
|
|
return;
|
|
}
|
|
|
|
try {
|
|
previewStream = await navigator.mediaDevices.getUserMedia({
|
|
audio: false,
|
|
video: {
|
|
mandatory: {
|
|
chromeMediaSource: 'desktop',
|
|
chromeMediaSourceId: videoSourceId,
|
|
}
|
|
}
|
|
});
|
|
|
|
const videoTrack = previewStream.getVideoTracks()[0];
|
|
await videoTrack.applyConstraints({ frameRate: { ideal: 60 } });
|
|
|
|
localVideo.srcObject = previewStream;
|
|
localVideo.style.display = 'block';
|
|
const placeholder = document.getElementById('videoPlaceholder');
|
|
if (placeholder) placeholder.style.display = 'none';
|
|
} catch (e) {
|
|
console.error("Failed to start preview stream:", e);
|
|
}
|
|
}
|
|
|
|
// --- Audio Capture Helper ---
|
|
async function getAudioStream(targetAppName, videoSourceId) {
|
|
if (!targetAppName || targetAppName === 'none') return null;
|
|
|
|
if (targetAppName === 'all_desktop') {
|
|
const linked = await window.electronAPI.linkMonitorAudio();
|
|
if (linked) {
|
|
const devices = await navigator.mediaDevices.enumerateDevices();
|
|
const virtMic = devices.find(d => d.kind === 'audioinput' && d.label.toLowerCase().includes('simplescreenshare'));
|
|
|
|
if (virtMic) {
|
|
return await navigator.mediaDevices.getUserMedia({
|
|
audio: {
|
|
deviceId: { exact: virtMic.deviceId },
|
|
echoCancellation: { exact: false },
|
|
autoGainControl: { exact: false },
|
|
noiseSuppression: { exact: false },
|
|
channelCount: 2,
|
|
sampleRate: 48000
|
|
},
|
|
video: false
|
|
});
|
|
} else {
|
|
console.warn("Virtual mic device not found for monitor capture");
|
|
}
|
|
} else {
|
|
console.warn("Failed to link system monitor audio.");
|
|
}
|
|
return null;
|
|
}
|
|
|
|
// Application specific (Pipewire)
|
|
const linked = await window.electronAPI.linkAppAudio(targetAppName);
|
|
if (linked) {
|
|
const devices = await navigator.mediaDevices.enumerateDevices();
|
|
const virtMic = devices.find(d => d.kind === 'audioinput' && d.label.toLowerCase().includes('simplescreenshare'));
|
|
|
|
if (virtMic) {
|
|
return await navigator.mediaDevices.getUserMedia({
|
|
audio: {
|
|
deviceId: { exact: virtMic.deviceId },
|
|
echoCancellation: { exact: false },
|
|
autoGainControl: { exact: false },
|
|
noiseSuppression: { exact: false },
|
|
channelCount: 2,
|
|
sampleRate: 48000
|
|
},
|
|
video: false
|
|
});
|
|
} else {
|
|
console.warn("Virtual mic device not found in navigator enumeration");
|
|
}
|
|
} else {
|
|
console.warn("Failed to link application audio.");
|
|
}
|
|
return null;
|
|
}
|
|
|
|
// 2. Start Broadcast
|
|
startBtn.addEventListener('click', async () => {
|
|
const url = serverUrlInput.value;
|
|
const password = serverPasswordInput.value;
|
|
const videoSourceId = selectedVideoSourceId;
|
|
const targetAppName = audioSelect.value;
|
|
|
|
if (!url || !password) {
|
|
alert("Please fill out URL and Password.");
|
|
return;
|
|
}
|
|
|
|
window.electronAPI.saveConfig({ serverUrl: url, serverPassword: password });
|
|
|
|
try {
|
|
let stream;
|
|
if (previewStream) {
|
|
stream = previewStream;
|
|
previewStream = null;
|
|
} else {
|
|
const videoMandatory = { chromeMediaSource: 'desktop' };
|
|
if (selectedVideoSourceId) {
|
|
videoMandatory.chromeMediaSourceId = selectedVideoSourceId;
|
|
}
|
|
stream = await navigator.mediaDevices.getUserMedia({
|
|
audio: false,
|
|
video: { mandatory: videoMandatory }
|
|
});
|
|
}
|
|
|
|
const videoTrack = stream.getVideoTracks()[0];
|
|
const [, targetFps] = (qualitySelect.value || '8000000|60').split('|');
|
|
if (videoTrack) await videoTrack.applyConstraints({ frameRate: { ideal: parseInt(targetFps) } });
|
|
|
|
if (targetAppName && targetAppName !== 'none') {
|
|
const audioStream = await getAudioStream(targetAppName, videoSourceId);
|
|
if (audioStream) {
|
|
stream.addTrack(audioStream.getAudioTracks()[0]);
|
|
}
|
|
}
|
|
|
|
activeStream = stream;
|
|
localVideo.srcObject = stream;
|
|
localVideo.style.display = 'block';
|
|
|
|
await connectAndBroadcast(url, password);
|
|
|
|
startBtn.style.display = 'none';
|
|
stopBtn.style.display = 'inline-block';
|
|
statsPanel.style.display = 'block';
|
|
if (viewerCountDiv) viewerCountDiv.style.display = 'block';
|
|
statusText.innerText = `Broadcasting to ${url}`;
|
|
|
|
stream.getVideoTracks()[0].onended = stopSharing;
|
|
|
|
} catch (e) {
|
|
console.error("Stream capture error:", e);
|
|
alert("Failed to capture screen. See console for details.");
|
|
}
|
|
});
|
|
|
|
// --- Mediasoup SFU Connection ---
|
|
async function connectAndBroadcast(url, password) {
|
|
return new Promise((resolve, reject) => {
|
|
// io() is available globally from socket.io-client script tag in index.html
|
|
socket = io(url);
|
|
|
|
socket.on('connect', async () => {
|
|
try {
|
|
// 1. Authenticate as broadcaster
|
|
socket.emit('broadcaster', password);
|
|
|
|
// 2. Get router RTP capabilities
|
|
const rtpCapabilities = await new Promise((res, rej) => {
|
|
socket.emit('getRouterRtpCapabilities', (data) => {
|
|
if (data.error) rej(new Error(data.error));
|
|
else res(data);
|
|
});
|
|
});
|
|
|
|
// 3. Create mediasoup Device and load capabilities
|
|
device = new Device();
|
|
await device.load({ routerRtpCapabilities: rtpCapabilities });
|
|
|
|
// 4. Create send transport
|
|
const transportParams = await new Promise((res, rej) => {
|
|
socket.emit('createWebRtcTransport', { direction: 'send' }, (data) => {
|
|
if (data.error) rej(new Error(data.error));
|
|
else res(data);
|
|
});
|
|
});
|
|
|
|
sendTransport = device.createSendTransport(transportParams);
|
|
|
|
// Transport 'connect' event: DTLS handshake
|
|
sendTransport.on('connect', async ({ dtlsParameters }, callback, errback) => {
|
|
try {
|
|
await new Promise((res, rej) => {
|
|
socket.emit('connectTransport', {
|
|
transportId: sendTransport.id,
|
|
dtlsParameters
|
|
}, (result) => {
|
|
if (result && result.error) rej(new Error(result.error));
|
|
else res();
|
|
});
|
|
});
|
|
callback();
|
|
} catch (e) {
|
|
errback(e);
|
|
}
|
|
});
|
|
|
|
// Transport 'produce' event: server creates the Producer
|
|
sendTransport.on('produce', async ({ kind, rtpParameters }, callback, errback) => {
|
|
try {
|
|
const result = await new Promise((res, rej) => {
|
|
socket.emit('produce', {
|
|
transportId: sendTransport.id,
|
|
kind,
|
|
rtpParameters
|
|
}, (data) => {
|
|
if (data.error) rej(new Error(data.error));
|
|
else res(data);
|
|
});
|
|
});
|
|
callback({ id: result.id });
|
|
} catch (e) {
|
|
errback(e);
|
|
}
|
|
});
|
|
|
|
// 5. Produce video
|
|
const videoTrack = activeStream.getVideoTracks()[0];
|
|
if (videoTrack) {
|
|
const [targetBitrate] = (qualitySelect.value || '8000000|60').split('|');
|
|
videoProducer = await sendTransport.produce({
|
|
track: videoTrack,
|
|
encodings: [{
|
|
maxBitrate: parseInt(targetBitrate),
|
|
}],
|
|
codecOptions: {
|
|
videoGoogleStartBitrate: 1000
|
|
}
|
|
});
|
|
|
|
videoProducer.on('transportclose', () => {
|
|
videoProducer = null;
|
|
});
|
|
}
|
|
|
|
// 6. Produce audio (if present)
|
|
const audioTrack = activeStream.getAudioTracks()[0];
|
|
if (audioTrack) {
|
|
audioProducer = await sendTransport.produce({
|
|
track: audioTrack,
|
|
codecOptions: {
|
|
opusStereo: true,
|
|
opusDtx: true,
|
|
opusMaxPlaybackRate: 48000,
|
|
opusMaxAverageBitrate: 510000
|
|
}
|
|
});
|
|
|
|
audioProducer.on('transportclose', () => {
|
|
audioProducer = null;
|
|
});
|
|
}
|
|
|
|
// 7. Track viewer count
|
|
socket.on('viewerCount', (count) => {
|
|
if (viewerCountDiv) viewerCountDiv.innerText = `Viewers: ${count}`;
|
|
});
|
|
|
|
// Get initial viewer count
|
|
socket.emit('getViewerCount', (count) => {
|
|
if (viewerCountDiv) viewerCountDiv.innerText = `Viewers: ${count}`;
|
|
});
|
|
|
|
resolve();
|
|
} catch (e) {
|
|
console.error('Mediasoup setup error:', e);
|
|
reject(e);
|
|
}
|
|
});
|
|
|
|
socket.on('authError', (msg) => {
|
|
alert(msg);
|
|
stopSharing();
|
|
reject(new Error(msg));
|
|
});
|
|
|
|
socket.on('connect_error', (err) => {
|
|
console.error('Socket connection error:', err);
|
|
});
|
|
});
|
|
}
|
|
|
|
function stopSharing() {
|
|
// Close producers
|
|
if (videoProducer) {
|
|
videoProducer.close();
|
|
videoProducer = null;
|
|
}
|
|
if (audioProducer) {
|
|
audioProducer.close();
|
|
audioProducer = null;
|
|
}
|
|
|
|
// Close transport
|
|
if (sendTransport) {
|
|
sendTransport.close();
|
|
sendTransport = null;
|
|
}
|
|
|
|
device = null;
|
|
|
|
if (activeStream) {
|
|
activeStream.getTracks().forEach(t => t.stop());
|
|
activeStream = null;
|
|
}
|
|
if (socket) {
|
|
socket.disconnect();
|
|
socket = null;
|
|
}
|
|
|
|
localVideo.style.display = 'none';
|
|
const placeholder = document.getElementById('videoPlaceholder');
|
|
if (placeholder) placeholder.style.display = 'block';
|
|
|
|
statsPanel.style.display = 'none';
|
|
startBtn.style.display = 'inline-block';
|
|
stopBtn.style.display = 'none';
|
|
statusText.innerText = 'Not Broadcasting';
|
|
if (viewerCountDiv) {
|
|
viewerCountDiv.style.display = 'none';
|
|
viewerCountDiv.innerText = 'Viewers: 0';
|
|
}
|
|
|
|
if (bitrateChart) {
|
|
bitrateChart.destroy();
|
|
bitrateChart = null;
|
|
}
|
|
}
|
|
|
|
stopBtn.addEventListener('click', stopSharing);
|
|
|
|
// --- Dynamic Audio Switching ---
|
|
audioSelect.addEventListener('change', async () => {
|
|
if (!activeStream || !sendTransport) return;
|
|
|
|
const targetAppName = audioSelect.value;
|
|
try {
|
|
const newAudioStream = await getAudioStream(targetAppName, selectedVideoSourceId);
|
|
const newAudioTrack = newAudioStream ? newAudioStream.getAudioTracks()[0] : null;
|
|
|
|
// Remove old track from local active stream
|
|
const oldAudioTracks = activeStream.getAudioTracks();
|
|
if (oldAudioTracks.length > 0) {
|
|
oldAudioTracks.forEach(t => {
|
|
t.stop();
|
|
activeStream.removeTrack(t);
|
|
});
|
|
}
|
|
|
|
if (newAudioTrack) {
|
|
activeStream.addTrack(newAudioTrack);
|
|
}
|
|
|
|
// Hot-swap on the mediasoup audio producer
|
|
if (audioProducer && newAudioTrack) {
|
|
await audioProducer.replaceTrack({ track: newAudioTrack });
|
|
} else if (audioProducer && !newAudioTrack) {
|
|
// Mute by pausing the producer
|
|
await audioProducer.pause();
|
|
} else if (!audioProducer && newAudioTrack) {
|
|
// Need to create a new producer for audio
|
|
audioProducer = await sendTransport.produce({
|
|
track: newAudioTrack,
|
|
codecOptions: {
|
|
opusStereo: true,
|
|
opusDtx: true,
|
|
opusMaxPlaybackRate: 48000,
|
|
opusMaxAverageBitrate: 510000
|
|
}
|
|
});
|
|
audioProducer.on('transportclose', () => {
|
|
audioProducer = null;
|
|
});
|
|
}
|
|
|
|
} catch (e) {
|
|
console.error("Failed to switch audio dynamically:", e);
|
|
}
|
|
});
|
|
|
|
// --- Dynamic Quality Switching ---
|
|
qualitySelect.addEventListener('change', async () => {
|
|
if (!activeStream || !videoProducer) return;
|
|
|
|
const [targetBitrate, targetFps] = qualitySelect.value.split('|');
|
|
|
|
// Update frame rate on the video track
|
|
const videoTrack = activeStream.getVideoTracks()[0];
|
|
if (videoTrack) {
|
|
await videoTrack.applyConstraints({ frameRate: { ideal: parseInt(targetFps) } }).catch(e => console.error(e));
|
|
}
|
|
|
|
// Update max bitrate on the producer's encoding
|
|
if (videoProducer) {
|
|
try {
|
|
const params = videoProducer.rtpSender.getParameters();
|
|
if (params.encodings && params.encodings.length > 0) {
|
|
params.encodings[0].maxBitrate = parseInt(targetBitrate);
|
|
await videoProducer.rtpSender.setParameters(params);
|
|
}
|
|
} catch (e) {
|
|
console.error("Failed to update bitrate:", e);
|
|
}
|
|
}
|
|
});
|
|
|
|
// --- Stats Monitoring Loop ---
|
|
let lastBytesSent = 0;
|
|
let lastTimestamp = 0;
|
|
|
|
setInterval(async () => {
|
|
if (!activeStream || !videoProducer) return;
|
|
|
|
// Initialize chart if not present
|
|
if (!bitrateChart) {
|
|
const ctx = document.getElementById('bitrateChart').getContext('2d');
|
|
bitrateChart = new Chart(ctx, {
|
|
type: 'line',
|
|
data: {
|
|
labels: Array(20).fill(''),
|
|
datasets: [{
|
|
label: 'Bitrate (kbps)',
|
|
data: Array(20).fill(0),
|
|
borderColor: '#aaaaaa',
|
|
backgroundColor: 'rgba(170, 170, 170, 0.1)',
|
|
borderWidth: 2,
|
|
fill: true,
|
|
tension: 0.4,
|
|
pointRadius: 0
|
|
}]
|
|
},
|
|
options: {
|
|
responsive: true,
|
|
maintainAspectRatio: false,
|
|
animation: false,
|
|
plugins: {
|
|
legend: { display: false }
|
|
},
|
|
scales: {
|
|
x: { display: false },
|
|
y: {
|
|
display: true,
|
|
position: 'right',
|
|
ticks: { color: '#94a3b8', font: { size: 10 } },
|
|
grid: { color: 'rgba(255,255,255,0.05)' }
|
|
}
|
|
}
|
|
}
|
|
});
|
|
}
|
|
|
|
try {
|
|
const stats = await videoProducer.getStats();
|
|
let videoCodec = 'Unknown';
|
|
let audioCodec = 'Unknown';
|
|
|
|
stats.forEach(report => {
|
|
if (report.type === 'codec') {
|
|
if (report.mimeType.toLowerCase().includes('video')) videoCodec = report.mimeType.split('/')[1] || report.mimeType;
|
|
if (report.mimeType.toLowerCase().includes('audio')) audioCodec = report.mimeType.split('/')[1] || report.mimeType;
|
|
}
|
|
});
|
|
|
|
stats.forEach(report => {
|
|
if (report.type === 'outbound-rtp' && report.kind === 'video') {
|
|
const fps = report.framesPerSecond || 0;
|
|
const bytesSent = report.bytesSent || 0;
|
|
const timestamp = report.timestamp;
|
|
const res = `${report.frameWidth || 0}x${report.frameHeight || 0}`;
|
|
|
|
let bitrate = 0;
|
|
if (lastTimestamp && lastBytesSent) {
|
|
const timeDiff = timestamp - lastTimestamp;
|
|
const bytesDiff = bytesSent - lastBytesSent;
|
|
bitrate = Math.round((bytesDiff * 8) / timeDiff);
|
|
}
|
|
lastBytesSent = bytesSent;
|
|
lastTimestamp = timestamp;
|
|
|
|
document.getElementById('statsFps').innerText = fps;
|
|
document.getElementById('statsRes').innerText = res;
|
|
document.getElementById('statsBitrate').innerText = bitrate + ' kbps';
|
|
document.getElementById('statsVideoCodec').innerText = videoCodec;
|
|
|
|
if (bitrateChart) {
|
|
bitrateChart.data.datasets[0].data.shift();
|
|
bitrateChart.data.datasets[0].data.push(bitrate);
|
|
bitrateChart.update();
|
|
}
|
|
}
|
|
});
|
|
|
|
// Get audio codec from audio producer stats
|
|
if (audioProducer) {
|
|
const audioStats = await audioProducer.getStats();
|
|
audioStats.forEach(report => {
|
|
if (report.type === 'codec' && report.mimeType.toLowerCase().includes('audio')) {
|
|
document.getElementById('statsAudioCodec').innerText = report.mimeType.split('/')[1] || report.mimeType;
|
|
}
|
|
});
|
|
}
|
|
} catch (e) { console.error("Stats error", e); }
|
|
}, 1000);
|
|
|
|
// --- Reusable Audio Dropdown Population ---
|
|
function populateAudioSelect(audioApps) {
|
|
const currentValue = audioSelect.value;
|
|
audioSelect.innerHTML = '<option value="none">No Audio (Video Only)</option>';
|
|
|
|
const allDesktopOption = document.createElement('option');
|
|
allDesktopOption.value = 'all_desktop';
|
|
allDesktopOption.text = 'All Desktop Audio (System Default)';
|
|
audioSelect.appendChild(allDesktopOption);
|
|
|
|
audioApps.forEach(app => {
|
|
const option = document.createElement('option');
|
|
option.value = app.name;
|
|
option.text = `${app.name} (${app.mediaName})`;
|
|
audioSelect.appendChild(option);
|
|
});
|
|
|
|
const options = Array.from(audioSelect.options);
|
|
if (options.some(o => o.value === currentValue)) {
|
|
audioSelect.value = currentValue;
|
|
}
|
|
}
|
|
|
|
// Listen for live audio source updates from PipeWire monitor
|
|
window.electronAPI.onAudioAppsUpdated((apps) => {
|
|
populateAudioSelect(apps);
|
|
});
|
|
|
|
// Initial load: config + audio apps only (no portal prompt on startup)
|
|
window.electronAPI.getConfig().then(cfg => {
|
|
if (cfg.serverUrl) serverUrlInput.value = cfg.serverUrl;
|
|
if (cfg.serverPassword) serverPasswordInput.value = cfg.serverPassword;
|
|
});
|
|
|
|
// Fetch audio applications on startup
|
|
(async () => {
|
|
try {
|
|
const audioApps = await window.electronAPI.getAudioApps();
|
|
populateAudioSelect(audioApps);
|
|
} catch (e) {
|
|
console.error('Failed to load audio apps:', e);
|
|
audioSelect.innerHTML = '<option value="none">No Audio (Video Only)</option>';
|
|
}
|
|
|
|
sourcesGrid.innerHTML = '<div style="color:var(--text-secondary); width:100%; text-align:center; padding:1rem;"></div>';
|
|
startBtn.disabled = false;
|
|
})();
|