Search code examples
javascripttypescriptaudioweb-audio-api

Audio API center visualisation on frequency


I am working on an audio visualizer for the web that also lets the user "tune" the raw audio signal visualizer to a frequency. This is a feature of many hardware oscilloscopes. Basically, when a user centers on 440Hz and I have a 440Hz sine wave, the wave should stay still on the canvas and not move left or right. My plan was to move the graph to the left according to the frequency (440Hz = 1/440s to the left per second because the wave should repeat every 1/440s), but this does not work as it seems.

I could not find the units used by the Audio Analyzer Node's time domain data. I guess that it's in milliseconds, but I am not certain.

"use strict";
// Oscillator instead of mic for debugging
const USE_OSCILLATOR = true;
// Compatibility
if (!window.AudioContext)
    window.AudioContext = window.webkitAudioContext;
if (!navigator.getUserMedia)
    navigator.getUserMedia =
        navigator.mozGetUserMedia ||
            navigator.webkitGetUserMedia ||
            navigator.msGetUserMedia;
// Main
class App {
    constructor(visualizerElement, optionsElement) {
        this.visualizerElement = visualizerElement;
        this.optionsElement = optionsElement;
        // HTML elements
        this.canvas = document.createElement("canvas");
        // Context
        this.context = new AudioContext({
            // Low latency
            latencyHint: "interactive",
        });
        this.canvasCtx = this.canvas.getContext("2d", {
            // Low latency
            desynchronized: true,
            alpha: false,
        });
        // Audio nodes
        this.audioAnalyser = this.context.createAnalyser();
        this.audioBuffer = new Uint8Array(this.audioAnalyser.frequencyBinCount);
        this.audioInputStream = null;
        this.audioInputNode = null;
        if (this.canvasCtx === null)
            throw new Error("2D rendering Context not supported by browser.");
        this.updateCanvasSize();
        window.addEventListener("resize", () => this.updateCanvasSize());
        this.drawVisualizer();
        this.visualizerElement.appendChild(this.canvas);
        if (USE_OSCILLATOR) {
            let oscillator = this.context.createOscillator();
            oscillator.type = "sine";
            oscillator.frequency.setValueAtTime(440, this.context.currentTime);
            oscillator.connect(this.audioAnalyser);
            oscillator.start();
        }
        else {
            navigator.getUserMedia({ audio: true }, (stream) => {
                this.audioInputStream = stream;
                this.audioInputNode = this.context.createMediaStreamSource(stream);
                this.audioInputNode.channelCountMode = "explicit";
                this.audioInputNode.channelCount = 1;
                this.audioBuffer = new Uint8Array(this.audioAnalyser.frequencyBinCount);
                this.audioInputNode.connect(this.audioAnalyser);
            }, (err) => console.error(err));
        }
    }
    updateCanvasSize() {
        var _a;
        this.canvas.width = window.innerWidth;
        this.canvas.height = window.innerHeight;
        (_a = this.canvasCtx) === null || _a === void 0 ? void 0 : _a.setTransform(1, 0, 0, -1, 0, this.canvas.height * 0.5);
    }
    drawVisualizer() {
        if (this.canvasCtx === null)
            return;
        const ctx = this.canvasCtx;
        ctx.fillStyle = "black";
        ctx.fillRect(0, -0.5 * this.canvas.height, this.canvas.width, this.canvas.height);
        // Draw FFT
        this.audioAnalyser.getByteFrequencyData(this.audioBuffer);
        const step = this.canvas.width / this.audioBuffer.length;
        const scale = this.canvas.height / (2 * 255);
        ctx.beginPath();
        ctx.moveTo(-step, this.audioBuffer[0] * scale);
        this.audioBuffer.forEach((sample, index) => {
            ctx.lineTo(index * step, scale * sample);
        });
        ctx.strokeStyle = "white";
        ctx.stroke();
        // Get the highest dominant frequency
        let highestFreqHalfHz = 0;
        {
            /**
             * Highest frequency in 0.5Hz
             */
            let highestFreq = NaN;
            let highestFreqAmp = NaN;
            let remSteps = NaN;
            for (let i = this.audioBuffer.length - 1; i >= 0; i--) {
                const sample = this.audioBuffer[i];
                if (sample > 20 && (isNaN(highestFreqAmp) || sample > highestFreqAmp)) {
                    highestFreq = i;
                    highestFreqAmp = sample;
                    if (isNaN(remSteps))
                        remSteps = 500;
                }
                if (!isNaN(remSteps)) {
                    if (remSteps-- < 0)
                        break;
                }
            }
            if (!isNaN(highestFreq)) {
                ctx.beginPath();
                ctx.moveTo(highestFreq * step, 0);
                ctx.lineTo(highestFreq * step, scale * 255);
                ctx.strokeStyle = "green";
                ctx.stroke();
                highestFreqHalfHz = highestFreq;
            }
        }
        // Draw Audio
        this.audioAnalyser.getByteTimeDomainData(this.audioBuffer);
        {
            const bufferSize = this.audioBuffer.length;
            const offsetY = -this.canvas.height * 0.5;
            // I don't know what I am doing here:
            const offsetX = highestFreqHalfHz == 0
                ? 0
                : bufferSize -
                    Math.round(((this.context.currentTime * 1000) % (1 / 440)) % bufferSize);
            // Draw the audio graph with the given offset
            ctx.beginPath();
            ctx.moveTo(-step, this.audioBuffer[0] * scale + offsetY);
            for (let i = 0; i < bufferSize; i++) {
                const index = (offsetX + i) % bufferSize;
                const sample = this.audioBuffer[index];
                ctx.lineTo(i * step, scale * sample + offsetY);
            }
            ctx.strokeStyle = "white";
            ctx.stroke();
        }
    }
}
window.addEventListener("load", () => {
    const app = new App(document.getElementById("visualizer"), document.getElementById("options"));
    requestAnimationFrame(draw);
    function draw() {
        requestAnimationFrame(draw);
        app.drawVisualizer();
    }
});
html {
    background: black;
}
body {
    width: 100vw;
    height: 100vh;
    margin: 0;
    overflow: hidden;
}

#visualizer {
    position: fixed;
    inset: 0;
}
<!DOCTYPE html>
<html lang="en">
<head>
    <meta charset="UTF-8">
    <meta name="viewport" content="width=device-width, initial-scale=1.0">
    <title>Equalizer</title>
</head>
<body>
    <div id="visualizer"></div>
    <div id="options"></div>
</body>
</html>

The above snippet was generated from TypeScript. You can find the source here. If it worked as intended, the oscillating graph (bottom) would not be moving.


Solution

  • I was able to solve this problem thanks to Raymond Toy's comment and my maths teacher (thank you Mr. Klein). The solution was Math.round((this.context.currentTime % iv) * sampleRate) where iv is the interval of the frequency (1/Hz). The wave is not perfectly centered. The FFT approximation is not very accurate though. In the following example I forced the detected frequency to be the specified one.

    "use strict";
    // Oscillator instead of mic for debugging
    const USE_OSCILLATOR = true;
    const OSCILLATOR_HZ = 1000;
    // Compatibility
    if (!window.AudioContext)
        window.AudioContext = window.webkitAudioContext;
    if (!navigator.getUserMedia)
        navigator.getUserMedia =
            navigator.mozGetUserMedia ||
                navigator.webkitGetUserMedia ||
                navigator.msGetUserMedia;
    // Main
    class App {
        constructor(visualizerElement, optionsElement) {
            this.visualizerElement = visualizerElement;
            this.optionsElement = optionsElement;
            // HTML elements
            this.canvas = document.createElement("canvas");
            // Context
            this.context = new AudioContext({
                // Low latency
                latencyHint: "interactive",
            });
            this.canvasCtx = this.canvas.getContext("2d", {
                // Low latency
                desynchronized: true,
                alpha: false,
            });
            // Audio nodes
            this.audioAnalyser = this.context.createAnalyser();
            this.audioBuffer = new Uint8Array(0);
            this.audioInputStream = null;
            this.audioInputNode = null;
            if (this.canvasCtx === null)
                throw new Error("2D rendering Context not supported by browser.");
            this.updateCanvasSize();
            window.addEventListener("resize", () => this.updateCanvasSize());
            this.drawVisualizer();
            this.visualizerElement.appendChild(this.canvas);
            this.audioAnalyser.fftSize = 2048;
            this.audioAnalyser.maxDecibels = -10;
            this.audioBuffer = new Uint8Array(this.audioAnalyser.frequencyBinCount * 2);
            this.audioFilter = this.context.createBiquadFilter();
            this.audioFilter.type = "bandpass";
            this.audioFilter.frequency.value = 900;
            this.audioFilter.Q.value = 20;
            this.audioAmplifier = this.context.createGain();
            this.audioAmplifier.gain.value = 5;
            this.audioFilter.connect(this.audioAmplifier);
            this.audioAmplifier.connect(this.audioAnalyser);
            if (USE_OSCILLATOR) {
                let oscillator = this.context.createOscillator();
                oscillator.type = "sine";
                oscillator.frequency.setValueAtTime(OSCILLATOR_HZ, this.context.currentTime);
                oscillator.connect(this.audioFilter);
                oscillator.start();
            }
            else {
                navigator.getUserMedia({ audio: true }, (stream) => {
                    this.audioInputStream = stream;
                    this.audioInputNode = this.context.createMediaStreamSource(stream);
                    this.audioInputNode.channelCountMode = "explicit";
                    this.audioInputNode.channelCount = 1;
                    this.audioBuffer = new Uint8Array(this.audioAnalyser.frequencyBinCount);
                    this.audioInputNode.connect(this.audioFilter);
                }, (err) => console.error(err));
            }
        }
        updateCanvasSize() {
            var _a;
            this.canvas.width = window.innerWidth;
            this.canvas.height = window.innerHeight;
            (_a = this.canvasCtx) === null || _a === void 0 ? void 0 : _a.setTransform(1, 0, 0, -1, 0, this.canvas.height * 0.5);
        }
        drawVisualizer() {
            if (this.canvasCtx === null)
                return;
            const ctx = this.canvasCtx;
            ctx.globalAlpha = 0.5;
            ctx.fillStyle = "black";
            ctx.fillRect(0, -0.5 * this.canvas.height, this.canvas.width, this.canvas.height);
            ctx.globalAlpha = 1;
            // Draw FFT
            this.audioAnalyser.getByteFrequencyData(this.audioBuffer);
            const scale = this.canvas.height / (2 * 255);
            const { frequencyBinCount } = this.audioAnalyser;
            const { sampleRate } = this.context;
            {
                const step = this.canvas.width / frequencyBinCount;
                ctx.beginPath();
                ctx.moveTo(-step, this.audioBuffer[0] * scale);
                for (let index = 0; index < frequencyBinCount; index++) {
                    ctx.lineTo(index * step, scale * this.audioBuffer[index]);
                }
                ctx.strokeStyle = "white";
                ctx.stroke();
            }
            // Get the highest dominant frequency
            const step = this.canvas.width / frequencyBinCount;
            let highestFreqHz = 0;
            {
                /**
                 * Highest frequency index in the buffer
                 */
                let highestFreqIndex = NaN;
                let highestFreqAmp = NaN;
                let remSteps = NaN;
                for (let i = frequencyBinCount - 1; i >= 0; i--) {
                    const sample = this.audioBuffer[i];
                    if (sample > 30) {
                        if (isNaN(highestFreqAmp)) {
                            highestFreqIndex = i;
                            highestFreqAmp = sample;
                        }
                        else {
                            if (sample > highestFreqAmp) {
                                highestFreqIndex = i;
                                highestFreqAmp = sample;
                            }
                        }
                        //if (isNaN(remSteps)) remSteps = 100;
                    }
                    if (!isNaN(remSteps)) {
                        if (remSteps-- < 0)
                            break;
                    }
                }
                if (!isNaN(highestFreqIndex)) {
                    // Force exact value: (not necessary)
                    highestFreqIndex =
                        (OSCILLATOR_HZ * (2 * frequencyBinCount)) / sampleRate;
                    ctx.beginPath();
                    ctx.moveTo(highestFreqIndex * step, 0);
                    ctx.lineTo(highestFreqIndex * step, scale * 255);
                    ctx.strokeStyle = "green";
                    ctx.stroke();
                    highestFreqHz =
                        (highestFreqIndex * sampleRate) / (2 * frequencyBinCount);
                    window.HZ = highestFreqHz;
                }
            }
            // Draw Audio
            this.audioAnalyser.getByteTimeDomainData(this.audioBuffer);
            {
                const iv = highestFreqHz == 0 ? 0 : 1 / highestFreqHz;
                const bufferSize = this.audioBuffer.length;
                const offsetY = -this.canvas.height / 2.4;
                const startIndex = Math.round(iv * sampleRate);
                const step = this.canvas.width / (this.audioBuffer.length - startIndex);
                const scale = this.canvas.height / (3 * 255);
                const offsetX = highestFreqHz == 0
                    ? 0
                    : Math.round((this.context.currentTime % iv) * sampleRate) %
                        bufferSize;
                // Draw the audio graph with the given offset
                ctx.beginPath();
                ctx.moveTo(-step, this.audioBuffer[startIndex - offsetX] * scale + offsetY);
                for (let i = startIndex; i < bufferSize; i += 4) {
                    const index = (i - offsetX) % bufferSize;
                    const sample = this.audioBuffer[index];
                    ctx.lineTo((i - startIndex) * step, scale * sample + offsetY);
                }
                ctx.strokeStyle = "white";
                ctx.stroke();
            }
        }
    }
    window.addEventListener("load", () => {
        const app = new App(document.getElementById("visualizer"), document.getElementById("options"));
        requestAnimationFrame(draw);
        function draw() {
            requestAnimationFrame(draw);
            app.drawVisualizer();
        }
    });
    html {
        background: black;
    }
    body {
        width: 100vw;
        height: 100vh;
        margin: 0;
        overflow: hidden;
    }
    
    #visualizer {
        position: fixed;
        inset: 0;
    }
    <!DOCTYPE html>
    <html lang="en">
    <head>
        <meta charset="UTF-8">
        <meta name="viewport" content="width=device-width, initial-scale=1.0">
        <title>Equalizer</title>
    </head>
    <body>
        <div id="visualizer"></div>
        <div id="options"></div>
    </body>
    </html>