I wrote the following to draw a sound wave from AudioBuffer but what I get is a canvas with a straight horizontal line:
const { audioContext, analyser } = this.getAudioContext();
const source = audioContext.createBufferSource();
source.buffer = audioBuffer;
source.connect(audioContext.destination);
const canvas = document.getElementById('canvas');
const canvasCtx = canvas.getContext("2d");
let sinewaveDataArray = new Uint8Array(analyser.fftSize);
const drawSinewave = function() {
analyser.getByteTimeDomainData(sinewaveDataArray);
requestAnimationFrame(drawSinewave);
canvasCtx.fillStyle = 'white';
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = "black";
canvasCtx.beginPath();
const sliceWidth = canvas.width * 1.0 / analyser.fftSize;
let x = 0;
for(let i = 0; i < analyser.fftSize; i++) {
const v = sinewaveDataArray[i] / 128.0; // byte / 2 || 256 / 2
const y = v * canvas.height / 2;
if(i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
source.start();
drawSinewave();
getAudioContext = () => {
AudioContext = window.AudioContext || window.webkitAudioContext;
const audioContext = new AudioContext();
const analyser = audioContext.createAnalyser();
return { audioContext, analyser };
};
The end result I am looking for is something like the first image clip here: https://meyda.js.org/
Any idea what I am missing?
I think two little changes are necessary to make it work.
source.start()
needs to be executed in response to a user event. A simple button with a click handler can be used to achieve that. This is necessary to deal with the autoplay policy which is present in some browsers.
aReferenceToAButton.addEventListener('click', async () => {
await audioContext.resume();
source.start();
});
Last but not least you need to pipe the signal through the AnalyserNode
.
source
.connect(analyser)
.connect(audioContext.destination);
I hope this helps.