Add listener spectrum visualizer

This commit is contained in:
3nd3r
2026-01-02 23:41:36 -06:00
parent 7141bd900e
commit c08ee70fe8
3 changed files with 98 additions and 5 deletions

View File

@@ -1545,7 +1545,67 @@ let isBroadcasting = false;
let autoStartStream = false;
let listenerAudioContext = null;
let listenerGainNode = null;
let listenerAnalyserNode = null;
let listenerMediaElementSourceNode = null;
let listenerVuMeterRunning = false;
let listenerChunksReceived = 0;
function startListenerVUMeter() {
if (listenerVuMeterRunning) return;
listenerVuMeterRunning = true;
const draw = () => {
if (!listenerVuMeterRunning) return;
requestAnimationFrame(draw);
const canvas = document.getElementById('viz-listener');
if (!canvas || !listenerAnalyserNode) return;
const ctx = canvas.getContext('2d');
if (!ctx) return;
// Keep canvas sized correctly for DPI
const dpr = window.devicePixelRatio || 1;
const rect = canvas.getBoundingClientRect();
const targetW = Math.max(1, Math.floor(rect.width * dpr));
const targetH = Math.max(1, Math.floor(rect.height * dpr));
if (canvas.width !== targetW || canvas.height !== targetH) {
canvas.width = targetW;
canvas.height = targetH;
}
const analyser = listenerAnalyserNode;
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
analyser.getByteFrequencyData(dataArray);
const width = canvas.width;
const height = canvas.height;
const barCount = 32;
const barWidth = width / barCount;
ctx.fillStyle = '#0a0a12';
ctx.fillRect(0, 0, width, height);
// Listener uses the magenta hue (matches Deck B styling)
const hue = 280;
for (let i = 0; i < barCount; i++) {
const freqIndex = Math.floor(Math.pow(i / barCount, 1.5) * bufferLength);
const value = (dataArray[freqIndex] || 0) / 255;
const barHeight = value * height;
const lightness = 30 + (value * 50);
const gradient = ctx.createLinearGradient(0, height, 0, height - barHeight);
gradient.addColorStop(0, `hsl(${hue}, 100%, ${lightness}%)`);
gradient.addColorStop(1, `hsl(${hue}, 100%, ${Math.min(lightness + 20, 80)}%)`);
ctx.fillStyle = gradient;
ctx.fillRect(i * barWidth, height - barHeight, barWidth - 2, barHeight);
}
};
draw();
}
let currentStreamMimeType = null;
function getMp3FallbackUrl() {
@@ -2351,17 +2411,36 @@ async function enableListenerAudio() {
}
// 3. Bridge Audio Element to AudioContext if not already connected
if (window.listenerAudio && !window.listenerAudio._connectedToContext) {
if (window.listenerAudio) {
try {
const sourceNode = listenerAudioContext.createMediaElementSource(window.listenerAudio);
if (!listenerGainNode) {
listenerGainNode = listenerAudioContext.createGain();
listenerGainNode.gain.value = 0.8;
listenerGainNode.connect(listenerAudioContext.destination);
}
sourceNode.connect(listenerGainNode);
if (!listenerAnalyserNode) {
listenerAnalyserNode = listenerAudioContext.createAnalyser();
listenerAnalyserNode.fftSize = 256;
}
if (!listenerMediaElementSourceNode) {
listenerMediaElementSourceNode = listenerAudioContext.createMediaElementSource(window.listenerAudio);
}
// Ensure a clean, single connection chain:
// media element -> analyser -> gain -> destination
try { listenerMediaElementSourceNode.disconnect(); } catch (_) { }
try { listenerAnalyserNode.disconnect(); } catch (_) { }
listenerMediaElementSourceNode.connect(listenerAnalyserNode);
listenerAnalyserNode.connect(listenerGainNode);
window.listenerAudio._connectedToContext = true;
console.log('🔗 Connected audio element to AudioContext');
console.log('🔗 Connected audio element to AudioContext (with analyser)');
// Start visualizer after the graph exists
startListenerVUMeter();
} catch (e) {
console.warn('⚠️ Could not connect to AudioContext:', e.message);
}