forked from computertech/techdj
Switch listener streaming to MP3-only
This commit is contained in:
241
script.js
241
script.js
@@ -1848,11 +1848,12 @@ function startBroadcast() {
|
||||
console.log(`🎚️ Starting broadcast at ${qualitySelect.value}kbps`);
|
||||
|
||||
const preferredTypes = [
|
||||
'audio/webm;codecs=opus',
|
||||
'audio/webm',
|
||||
'audio/ogg;codecs=opus',
|
||||
// Prefer MP4/AAC when available (broad device support)
|
||||
'audio/mp4;codecs=mp4a.40.2',
|
||||
'audio/mp4',
|
||||
// Fallbacks
|
||||
'audio/webm',
|
||||
'audio/ogg',
|
||||
];
|
||||
const chosenType = preferredTypes.find((t) => {
|
||||
try {
|
||||
@@ -1993,9 +1994,9 @@ function startBroadcast() {
|
||||
document.getElementById('broadcast-status').textContent = '🔴 LIVE';
|
||||
document.getElementById('broadcast-status').classList.add('live');
|
||||
|
||||
// Notify server (include codec/container so listeners can configure SourceBuffer)
|
||||
// Notify server that broadcast is active (listeners use MP3 stream)
|
||||
if (!socket) initSocket();
|
||||
socket.emit('start_broadcast', { mimeType: currentStreamMimeType });
|
||||
socket.emit('start_broadcast');
|
||||
socket.emit('get_listener_count');
|
||||
|
||||
console.log('✅ Broadcasting started successfully!');
|
||||
@@ -2153,7 +2154,7 @@ function toggleAutoStream(enabled) {
|
||||
// ========== LISTENER MODE ==========
|
||||
|
||||
function initListenerMode() {
|
||||
console.log('🎧 Initializing listener mode (MediaSource Pipeline)...');
|
||||
console.log('🎧 Initializing listener mode (MP3 stream)...');
|
||||
|
||||
// UI Feedback for listener
|
||||
const appContainer = document.querySelector('.app-container');
|
||||
@@ -2224,8 +2225,8 @@ function initListenerMode() {
|
||||
}
|
||||
|
||||
// Create a new hidden media element.
|
||||
// Note: MSE (MediaSource) support is often more reliable on <video> than <audio>.
|
||||
audio = document.createElement('video');
|
||||
// For MP3 we can use a plain <audio> element.
|
||||
audio = document.createElement('audio');
|
||||
audio.autoplay = false; // Don't autoplay - we use the Enable Audio button
|
||||
audio.muted = false;
|
||||
audio.controls = false;
|
||||
@@ -2233,73 +2234,12 @@ function initListenerMode() {
|
||||
audio.setAttribute('playsinline', '');
|
||||
audio.style.display = 'none';
|
||||
document.body.appendChild(audio);
|
||||
console.log('🆕 Created fresh media element (video) for listener');
|
||||
console.log('🆕 Created fresh media element (audio) for listener');
|
||||
|
||||
// Initialize MediaSource for streaming binary chunks
|
||||
const mediaSource = new MediaSource();
|
||||
audio.src = URL.createObjectURL(mediaSource);
|
||||
|
||||
// CRITICAL: Call load() to initialize the MediaSource
|
||||
// Without this, the audio element won't load the MediaSource until play() is called,
|
||||
// which will fail with "no supported sources" if no data is buffered yet
|
||||
// MP3 stream (server-side) — requires ffmpeg on the server.
|
||||
audio.src = getMp3FallbackUrl();
|
||||
audio.load();
|
||||
console.log('🎬 Audio element loading MediaSource...');
|
||||
|
||||
let sourceBuffer = null;
|
||||
let audioQueue = [];
|
||||
let chunksReceived = 0;
|
||||
let lastStatusUpdate = 0;
|
||||
|
||||
mediaSource.addEventListener('sourceopen', () => {
|
||||
console.log('📦 MediaSource opened');
|
||||
const mimeType = window.currentStreamMimeType || currentStreamMimeType || 'audio/webm;codecs=opus';
|
||||
|
||||
if (!MediaSource.isTypeSupported(mimeType)) {
|
||||
console.error(`❌ Browser does not support ${mimeType}`);
|
||||
const statusEl = document.getElementById('connection-status');
|
||||
if (statusEl) statusEl.textContent = '⚠️ WebM/Opus not supported - using MP3 fallback stream';
|
||||
|
||||
// Fallback to MP3 stream served by the backend (requires ffmpeg on server host)
|
||||
const fallbackUrl = getMp3FallbackUrl();
|
||||
console.log(`🎧 Switching to MP3 fallback: ${fallbackUrl}`);
|
||||
audio.src = fallbackUrl;
|
||||
audio.load();
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
sourceBuffer = mediaSource.addSourceBuffer(mimeType);
|
||||
sourceBuffer.mode = 'sequence';
|
||||
|
||||
// Kick off first append if data is already in queue
|
||||
if (audioQueue.length > 0 && !sourceBuffer.updating && mediaSource.readyState === 'open') {
|
||||
sourceBuffer.appendBuffer(audioQueue.shift());
|
||||
}
|
||||
|
||||
sourceBuffer.addEventListener('updateend', () => {
|
||||
// Process next chunk in queue
|
||||
if (audioQueue.length > 0 && !sourceBuffer.updating) {
|
||||
sourceBuffer.appendBuffer(audioQueue.shift());
|
||||
}
|
||||
|
||||
// Periodic cleanup of old buffer data to prevent memory bloat
|
||||
// Keep the last 60 seconds of audio data
|
||||
if (audio.buffered.length > 0 && !sourceBuffer.updating && mediaSource.readyState === 'open') {
|
||||
const end = audio.buffered.end(audio.buffered.length - 1);
|
||||
const start = audio.buffered.start(0);
|
||||
if (end - start > 120) { // If buffer is > 2 mins
|
||||
try {
|
||||
sourceBuffer.remove(0, end - 60);
|
||||
} catch (e) {
|
||||
console.warn('Buffer cleanup skipped:', e.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
console.error('❌ Failed to add SourceBuffer:', e);
|
||||
}
|
||||
});
|
||||
console.log(`🎧 Listener source set to MP3 stream: ${audio.src}`);
|
||||
|
||||
// Show enable audio button instead of attempting autoplay
|
||||
const enableAudioBtn = document.getElementById('enable-audio-btn');
|
||||
@@ -2309,75 +2249,19 @@ function initListenerMode() {
|
||||
enableAudioBtn.style.display = 'flex';
|
||||
}
|
||||
if (statusEl) {
|
||||
statusEl.textContent = '🔵 Click "Enable Audio" to start listening';
|
||||
statusEl.textContent = '🔵 Click "Enable Audio" to start listening (MP3)';
|
||||
}
|
||||
|
||||
// Store audio element and context for later activation
|
||||
window.listenerAudio = audio;
|
||||
window.listenerMediaSource = mediaSource;
|
||||
window.listenerMediaSource = null;
|
||||
window.listenerAudioEnabled = false; // Track if user has enabled audio
|
||||
|
||||
// Initialize socket and join
|
||||
initSocket();
|
||||
socket.emit('join_listener');
|
||||
|
||||
socket.on('stream_mime', (data) => {
|
||||
const mt = data && data.mimeType ? String(data.mimeType) : null;
|
||||
if (mt && mt !== window.currentStreamMimeType) {
|
||||
console.log(`📡 Stream mimeType announced: ${mt}`);
|
||||
window.currentStreamMimeType = mt;
|
||||
}
|
||||
});
|
||||
|
||||
let hasHeader = false;
|
||||
|
||||
socket.on('audio_data', (data) => {
|
||||
// We MUST have the header before we can do anything with broadcast chunks
|
||||
const isHeaderDirect = data instanceof ArrayBuffer && data.byteLength > 1000; // Heuristic
|
||||
|
||||
hasHeader = true; // No header request needed for WebM relay
|
||||
|
||||
chunksReceived++;
|
||||
listenerChunksReceived = chunksReceived;
|
||||
audioQueue.push(data);
|
||||
|
||||
// JITTER BUFFER: Reduced to 1 segments (buffered) for WebM/Opus
|
||||
const isHeader = false;
|
||||
|
||||
if (sourceBuffer && !sourceBuffer.updating && mediaSource.readyState === 'open') {
|
||||
if (audioQueue.length >= 1) {
|
||||
try {
|
||||
const next = audioQueue.shift();
|
||||
sourceBuffer.appendBuffer(next);
|
||||
|
||||
// Reset error counter on success
|
||||
if (window.sourceBufferErrorCount) window.sourceBufferErrorCount = 0;
|
||||
} catch (e) {
|
||||
console.error('Buffer append error:', e);
|
||||
window.sourceBufferErrorCount = (window.sourceBufferErrorCount || 0) + 1;
|
||||
|
||||
if (window.sourceBufferErrorCount >= 5) {
|
||||
console.error('❌ Too many SourceBuffer errors - attempting recovery...');
|
||||
const statusEl = document.getElementById('connection-status');
|
||||
if (statusEl) statusEl.textContent = '⚠️ Stream error - reconnecting...';
|
||||
audioQueue = [];
|
||||
chunksReceived = 0;
|
||||
window.sourceBufferErrorCount = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// UI Update (only if audio is already enabled, don't overwrite the enable prompt)
|
||||
const now = Date.now();
|
||||
if (now - lastStatusUpdate > 1000 && window.listenerAudioEnabled) {
|
||||
const statusEl = document.getElementById('connection-status');
|
||||
if (statusEl) {
|
||||
statusEl.textContent = `🟢 Connected - ${chunksReceived} chunks (${audioQueue.length} buffered)`;
|
||||
}
|
||||
lastStatusUpdate = now;
|
||||
}
|
||||
});
|
||||
// No socket audio chunks needed in MP3-only mode.
|
||||
|
||||
socket.on('broadcast_started', () => {
|
||||
const nowPlayingEl = document.getElementById('listener-now-playing');
|
||||
@@ -2395,8 +2279,6 @@ function initListenerMode() {
|
||||
socket.on('broadcast_stopped', () => {
|
||||
const nowPlayingEl = document.getElementById('listener-now-playing');
|
||||
if (nowPlayingEl) nowPlayingEl.textContent = 'Stream ended';
|
||||
chunksReceived = 0;
|
||||
audioQueue = [];
|
||||
});
|
||||
|
||||
socket.on('connect', () => {
|
||||
@@ -2491,66 +2373,18 @@ async function enableListenerAudio() {
|
||||
const volValue = volEl ? parseInt(volEl.value, 10) : 80;
|
||||
setListenerVolume(Number.isFinite(volValue) ? volValue : 80);
|
||||
|
||||
// Check if we have buffered data
|
||||
const hasBufferedData = () => {
|
||||
return window.listenerAudio.buffered && window.listenerAudio.buffered.length > 0;
|
||||
};
|
||||
|
||||
// CRITICAL: Wait for buffered data before calling play()
|
||||
// This prevents NotSupportedError when buffer is empty
|
||||
if (!hasBufferedData()) {
|
||||
console.log('⏳ Waiting for audio data to buffer before playback...');
|
||||
if (audioText) audioText.textContent = 'BUFFERING...';
|
||||
|
||||
// Wait for data with timeout (max 5 seconds)
|
||||
const waitForData = new Promise((resolve, reject) => {
|
||||
const timeout = setTimeout(() => {
|
||||
clearInterval(checkInterval);
|
||||
reject(new Error('Timeout waiting for audio data'));
|
||||
}, 5000);
|
||||
|
||||
const checkInterval = setInterval(() => {
|
||||
if (hasBufferedData()) {
|
||||
clearInterval(checkInterval);
|
||||
clearTimeout(timeout);
|
||||
console.log('✅ Audio data buffered, ready to play');
|
||||
resolve();
|
||||
}
|
||||
}, 100);
|
||||
});
|
||||
|
||||
try {
|
||||
await waitForData;
|
||||
} catch (e) {
|
||||
console.warn('⚠️ Timeout waiting for buffer data:', e.message);
|
||||
}
|
||||
} else {
|
||||
console.log('✅ Audio already has buffered data');
|
||||
}
|
||||
|
||||
// MP3 stream: call play() immediately to capture the user gesture.
|
||||
if (audioText) audioText.textContent = 'STARTING...';
|
||||
console.log('▶️ Attempting to play audio...');
|
||||
const playPromise = window.listenerAudio.play();
|
||||
|
||||
// If no buffered data yet, show status but don't block playback
|
||||
if (!hasBufferedData()) {
|
||||
console.log('⏳ Waiting for audio data to buffer...');
|
||||
const chunkCount = Number.isFinite(listenerChunksReceived) ? listenerChunksReceived : 0;
|
||||
if (audioText) {
|
||||
audioText.textContent = chunkCount > 0 ? 'BUFFERING...' : 'WAITING FOR STREAM...';
|
||||
}
|
||||
|
||||
// Start a background checker to update UI
|
||||
const checkInterval = setInterval(() => {
|
||||
if (hasBufferedData()) {
|
||||
clearInterval(checkInterval);
|
||||
console.log('✅ Audio data buffered');
|
||||
const chunkCount = Number.isFinite(listenerChunksReceived) ? listenerChunksReceived : 0;
|
||||
} else if (audioText && chunkCount > 0 && audioText.textContent === 'WAITING FOR STREAM...') {
|
||||
audioText.textContent = 'BUFFERING...';
|
||||
}
|
||||
}, 500);
|
||||
} else {
|
||||
console.log('✅ Audio already has buffered data');
|
||||
// If not buffered yet, show buffering but don't block.
|
||||
if (!hasBufferedData() && audioText) {
|
||||
audioText.textContent = 'BUFFERING...';
|
||||
}
|
||||
|
||||
await playPromise;
|
||||
@@ -2586,42 +2420,13 @@ async function enableListenerAudio() {
|
||||
if (error.name === 'NotAllowedError') {
|
||||
errorMsg = 'Browser blocked audio (NotAllowedError). Check permissions.';
|
||||
} else if (error.name === 'NotSupportedError') {
|
||||
errorMsg = 'Format not supported or buffer empty (NotSupportedError).';
|
||||
errorMsg = 'MP3 stream not supported or unavailable (NotSupportedError).';
|
||||
}
|
||||
|
||||
stashedStatus.textContent = '⚠️ ' + errorMsg;
|
||||
|
||||
if (error.name === 'NotSupportedError') {
|
||||
// Two common causes:
|
||||
// 1) WebM/Opus MSE isn't supported by this browser
|
||||
// 2) The element cannot play yet (empty buffer / transient)
|
||||
// Prefer a compatibility fallback to MP3 if available.
|
||||
try {
|
||||
const fallbackUrl = getMp3FallbackUrl();
|
||||
console.log(`🎧 NotSupportedError -> switching to MP3 fallback: ${fallbackUrl}`);
|
||||
window.listenerAudio.src = fallbackUrl;
|
||||
window.listenerAudio.load();
|
||||
|
||||
// Retry immediately (still within the click gesture)
|
||||
window.listenerAudio.play().then(() => {
|
||||
stashedStatus.textContent = '🟢 Playing via MP3 fallback';
|
||||
window.listenerAudioEnabled = true;
|
||||
}).catch((e) => {
|
||||
console.error('MP3 fallback play failed:', e);
|
||||
stashedStatus.textContent = '⚠️ MP3 fallback failed. Is ffmpeg installed on the server?';
|
||||
});
|
||||
} catch (e) {
|
||||
console.error('Failed to switch to MP3 fallback:', e);
|
||||
}
|
||||
|
||||
// Also keep a background retry in case it was just a buffer timing issue.
|
||||
console.log('🔄 Retrying playback in background once data arrives...');
|
||||
const retryInterval = setInterval(() => {
|
||||
if (window.listenerAudio.buffered && window.listenerAudio.buffered.length > 0) {
|
||||
clearInterval(retryInterval);
|
||||
window.listenerAudio.play().catch((e) => console.error('Background retry failed:', e));
|
||||
}
|
||||
}, 1000);
|
||||
stashedStatus.textContent = '⚠️ MP3 stream failed. Is ffmpeg installed on the server?';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user