Search code examples
htmlaudioweb-audio-api

Is it possible to create a multichannel `MediaElementAudioSourceNode` from a multichannel `HTMLAudioElement`?


It's possible to load a multichannel WAV (i.e. first order ambisonics) file using an html <audio> element, and create a MediaElementAudioSourceNode from that element.

Regarding the channelCount property of that MediaElementAudioSourceNode, the documentation states that this will depend on the HTMLMediaElement the node was created from, but when trying this the channel count is always 2.

Is it possible to create a node that has the channel count corresponding to the number of channels of the loaded file ?

Best, N


Solution

  • To answer my own question, it seems like all tracks are loaded and played back, you just need to adjust some values. I set the channel count to 4 and channel interpretation to discrete, then used a channel splitter to route the channels individually to an analyzer, e voila, I can see all the different waveforms. Example below.

    <html>
      <body>
        <h1> Multitrack Experiment </h1> 
    
        <p>This is a little experiment to see if all channels of a multichannel audio file are played back,
        which they are.</p>
        
        <p>The audio file contains four different waveforms on four channels. (0 -> sine, 1 -> saw, 2 -> square, 3 -> noise)</p>
        
        <audio src="tracktest.wav"></audio>
        
        <button>
          <span>Play/Pause</span>
        </button>
        <br/>
        <br/>
        
        <canvas id="oscilloscope"></canvas>
    
      </body>
      <script>
        // for legacy browsers
        //const AudioContext = window.AudioContext || window.webkitAudioContext;
    
        var playing = false;
        
        const audioContext = new AudioContext();
        // get the audio element
        const audioElement = document.querySelector('audio');
        
        // pass it into the audio context
        // configure the mediaelement source correctly
        // (otherwise it still shows 2 channels)
        // also change channel interpretation while you're at it ...
        const track = audioContext.createMediaElementSource(audioElement);
        track.outputChannels = 4;
        track.channelInterpretation = 'discrete';
    
        // just for monitoring purposes
        track.connect(audioContext.destination);
    
        // split channels to be able to 
        const splitter = audioContext.createChannelSplitter(4);
        track.connect(splitter);
            
        const analyser = audioContext.createAnalyser();
        analyser.fftSize = 2048;
    
        // uncomment the different options to see the different results ...
        splitter.connect(analyser, 0, 0);
        //splitter.connect(analyser, 1, 0);
        //splitter.connect(analyser, 2, 0);
        //splitter.connect(analyser, 3, 0);
    
        // select our play button
        const playButton = document.querySelector('button');
    
        playButton.addEventListener('click', function() {
    
        // check if context is in suspended state (autoplay policy)
        if (audioContext.state === 'suspended') {
                audioContext.resume();
        }
    
        console.log(track)
        
        // play or pause track depending on state
        if (!playing) {
            console.log("play");
                audioElement.play();
                playing = true;
        } else {
            console.log("stop");
                audioElement.pause();
            playing = false;
                
        }
    
        }, false);
    
        const bufferLength = analyser.frequencyBinCount;
        const dataArray = new Uint8Array(bufferLength);
        analyser.getByteTimeDomainData(dataArray);
    
    
        // Get a canvas defined with ID "oscilloscope"
        const canvas = document.getElementById("oscilloscope");
        const canvasCtx = canvas.getContext("2d");
    
        // draw an oscilloscope of the current audio source
    
        function draw() {
    
        requestAnimationFrame(draw);
    
        analyser.getByteTimeDomainData(dataArray);
    
        canvasCtx.fillStyle = "rgb(200, 200, 200)";
        canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
    
        canvasCtx.lineWidth = 2;
        canvasCtx.strokeStyle = "rgb(0, 0, 0)";
    
        canvasCtx.beginPath();
    
        const sliceWidth = canvas.width * 1.0 / bufferLength;
        let x = 0;
    
        for (let i = 0; i < bufferLength; i++) {
    
            const v = dataArray[i] / 128.0;
            const y = v * canvas.height / 2;
    
            if (i === 0) {
            canvasCtx.moveTo(x, y);
            } else {
            canvasCtx.lineTo(x, y);
            }
    
            x += sliceWidth;
        }
    
        canvasCtx.lineTo(canvas.width, canvas.height / 2);
        canvasCtx.stroke();
        }
    
        draw();
    
      </script>
    </html>