Search code examples
javascriptweb-audio-apiaudio-recording

I developed the recording using the JavaScript web audio API, but the sound quality is poor


I have created an application that sings along in the app with the web audio API of JavaScript. This worked perfectly on iOS safari and Chrome, but the sound quality was poor on Android Chrome. To solve this, I tried changing the audio deviceId, but it still didn't work. Does someone have information that might help?

Doubt: After recording, I pass the file to the server and play it on another page. I am wondering if this is causing the problem.

This is my code

function captureUserMedia(mediaConstraints) {
    navigator.mediaDevices.getUserMedia(mediaConstraints).then(onMediaSuccess)["catch"]();
}

function record() {
    if (getParameterByName("startSec").length !== 0) {
        masterSound.currentTime = getParameterByName("startSec");
    }
    masterSound.play();
    if (document.querySelectorAll(".record")[0].getAttribute("status") == "off") {
        document.querySelectorAll(".record")[0].setAttribute("status", "on");
        document.querySelectorAll(".record")[0].classList.add("stoped");
        var mediaConstraints;
        const devices = navigator.mediaDevices.enumerateDevices()
        devices.then((value) => {
            // mediaConstraints = {
            //     audio: {
            //         deviceId: {
            //             exact: value[0].deviceId
            //         }
            //     },
            //     video: false
            // };
            mediaConstraints = {
                audio: true,
                video: false,
            };
            captureUserMedia(mediaConstraints, onMediaSuccess);
        });

    } else {
        document.querySelectorAll(".record")[0].setAttribute("status", "off");
        document.querySelectorAll(".record")[0].classList.remove("stoped");
        mediaRecorder.stream.stop();
        masterSound.pause();
    }
}

function onMediaSuccess(stream) {
    var audio = document.createElement('audio');
    audio.controls = true;
    audio.files = true;
    audio.muted = true;
    audio.srcObject = stream;
    audio.play();
    var audiosContainer = document.querySelectorAll(".audio_wrapper")[0];
    audiosContainer.appendChild(audio);
    audiosContainer.appendChild(document.createElement('hr'));
    mediaRecorder = new MediaStreamRecorder(stream);
    mediaRecorder.mimeType = 'audio/wav';
    mediaRecorder.stream = stream;
    mediaRecorder.recorderType = MediaRecorderWrapper;
    mediaRecorder.audioChannels = 1;
    mediaRecorder.start();
    mediaRecorder.ondataavailable = function (blob) {
        audioFile = blob;
        var blobURL = URL.createObjectURL(blob);
        document.querySelectorAll(".append_audio")[0].setAttribute("src", blobURL);

        function blobToFile(theBlob, fileName) {
            theBlob.lastModifiedDate = new Date();
            theBlob.name = fileName;
            return theBlob;
        }

        submit();

        function submit() {
            var audioTest = new Audio(URL.createObjectURL(blob));
            audioTest.play();
        }
    };
} 

Solution

  • When trying to build high-quality audio with getDisplayMedia, in the past I've passed in MediaStreamConstraints that remove some of the default processing on the input track:

    stream = await navigator.mediaDevices.getDisplayMedia(
                { 
                    video: true, 
                    audio: 
                        { 
                            channels: 2, 
                            autoGainControl: false, 
                            echoCancellation: false, 
                            noiseSuppression: false 
                        }
                    }
                );
    

    I'm still learning WebRTC myself, so I'm not sure if these same properties can be passed when using getUserMedia and MediaConstraints, but I thought I'd share in case helpful. It sounds like this might also be about available devices. Good luck!