Search code examples
javascriptreactjsaudioelectronnavigator

How to differentiate between System Audio and Microphone Audio in Javascript?


I'm developing an electron app with react. I have a functionality in which I want to record audio for both microphone and system and send them to backend in different variables.

For that, I have used the Navigator object from JS, but it is giving me combined audio of microphone and system. I want to save them differently in different variables.

For example:

const microphoneAudio = [microphone audio]
const systemAudio = [system audio]

Any help would be appreciated. Thank you


Solution

  • Here you can find how to differentiate system audio and microphone audio.Hope it will help you!

    function recordSystemAudio() {
        navigator.mediaDevices
            .getUserMedia({
                audio: { echoCancellation: true },
            })
            .then((stream) => {
                // Handle the stream
    
                const audioContext = new AudioContext();
                const source = audioContext.createMediaStreamSource(stream);
                const destination = audioContext.createMediaStreamDestination();
    
                source.connect(destination);
    
                const tabStream = destination.stream;
    
                let tabMediaRecorder = new MediaRecorder(tabStream);
    
                console.log('tabstream', {
                    audioContext,
                    source,
                    destination,
                    tabStream,
                    tabMediaRecorder,
                });
    
                window.systemAudio = tabMediaRecorder;
    
                const chunks = [];
    
                tabMediaRecorder.addEventListener('dataavailable', (event) => {
                    chunks.push(event.data);
    
                    const audioBlob = new Blob(chunks);
                    const audioUrl = URL.createObjectURL(audioBlob);
                    const audio = new Audio(audioUrl);
    
                    console.log({ audioBlob, audioUrl, audio }, 'Event Data');
                    const audioFile = new File([audioBlob], 'audio.wav', {
                        type: 'audio/wav',
                    });
    
                    // console.log('audioFile', audioFile);
                    sendAudioToAPI(audioFile);
                });
    
                tabMediaRecorder.start(2000);
            })
            .catch((error) => {
                // Handle the error
            });
    }
    
    
    const microPhoneAudio = async () => {
        const stream = await navigator.mediaDevices.getUserMedia({
            audio: true,
        });
        // Call the captureSystemAudio function to capture the system audio
        mediaRecorder = new MediaRecorder(stream);
        window.microPhoneAudio = mediaRecorder;
        mediaRecorder.start(2000);
        const audioChunks = [];
        mediaRecorder.addEventListener('dataavailable', (event) => {
            audioChunks.push(event.data);
            const audioBlob = new Blob(audioChunks);
            const audioUrl = URL.createObjectURL(audioBlob);
            const audio = new Audio(audioUrl);
            const audioFile = new File([audioBlob], 'audio.wav', {
                type: 'audio/wav',
            });
            console.log('audioFile', audioFile);
            sendMicrophoneAudioToAPI(audioFile);
        });
    };