Search code examples
node.jsfirebaseexpressffmpegfirebase-storage

Saving Video From URL as MP3 in Firebase Storage


I am working on a problem I have been stumped on the past couple days. I am using Node.js with Express (v4.18.2) to eventually create a Firebase deployment that can take in a video URL and output an audio mp3 to the Firebase Firestore. I have made some progress, but am still unsuccessful in some areas.

I cannot save the file locally using fs, but for this example I have shown that it works with FS. I am successfully saving a local .mp3 file.

First a few functions I have:

async function downloadVideo(videoUrl) {
    try {
      const response = await axios.get(videoUrl, {
        responseType: 'stream',
      });
  
      if (response.status === 200) {
        return response.data;
      } else {
        throw new Error('Failed to fetch the video');
      }
    } catch (error) {
      throw new Error('Error fetching the video: ' + error.message);
    }
  }
async function extractAudioFromVideo(videoUrl) {
    try {
      const videoStream = await downloadVideo(videoUrl);
  
      // Create a PassThrough stream to pipe the video data
      const passThrough = new PassThrough();
      videoStream.pipe(passThrough);

      const outputFile = 'output.mp3';
      const outputStream = fs.createWriteStream(outputFile);
  
        return new Promise((resolve, reject) => {
            const audioBuffers = [];

            passThrough.on('data', chunk => {
                audioBuffers.push(chunk)
                outputStream.write(chunk); // Write chunks to a local file
              });
        
              passThrough.on('error', err => {
                reject(err);
              });
        

            ffmpeg()
            .input(passThrough)
            .output('/dev/null') // Null output as a placeholder
            .outputOptions('-vn') // Extract audio only
            .noVideo()
            .audioQuality(0)
            .audioCodec('libmp3lame') // Set audio codec
            .format('mp3')
            .on('end', () => {
                const audioBuffer = Buffer.concat(audioBuffers)
                if (audioBuffer.length > 0) {
                    resolve(audioBuffer);
                  } else {
                    reject(new Error('Empty audio buffer'));
                  }
              })
            .on('error', err => reject(err))
            .run();
        })
    } catch (error) {
      throw new Error('Error extracting audio: ' + error.message);
    }
  }
 async function saveAudioToFirebase(audioBuffer, fileName) {
    try {
        let storage = admin.storage()
    let storageRef = storage.bucket(serviceAccount.storage_bucket_content)
      const file = storageRef.file(fileName) // Specify the desired file name here

      const renamedFileName = fileName.replace(/\.[^/.]+$/, '.mp3'); // Change the file extension to .mp3
  
      await file.save(audioBuffer, {
        metadata: {
          contentType: 'audio/mpeg', // Adjust the content type as needed
        },
      });

      await file.setMetadata({
        contentType: 'audio/mpeg'
      })

      await file.move(renamedFileName); // Rename the file with the .mp3 extension
  
      console.log('Audio saved to Firebase Storage.');
    } catch (error) {
      console.error('Error saving audio to Firebase Storage:', error);
    }
  }

What works:

  • Downloading the video via Axios
  • Saving to Firebase storage (no intializing or pointer issues to Firebase)
  • Outputting a local .mp3 file called "output.mp3"
  • I am able to log the result of extractAudioFromVideo and get a buffer logged in my terminal

What doesn't work:

  • Saving a file to Firebase Storage that is an .mp3. It says ".mp3" in the url and it has a content type of 'audio/mpeg' but it is in fact an .mp4. Still has video and plays video in the browser window.

I am willing to use other libraries like tmp if suggested and the solution works.


Solution

  • For those who are looking for a solution to this problem, I believe there is still a way to do this with passthrough, but I will provide my solution using the 'os' Node.js package.

    const fs = require('fs');
    const fetch = require("node-fetch");
    const ffmpeg = require('fluent-ffmpeg')
    const { tmpdir } = require('os');
    
    function createTempFile(data, name) {
        const tmpFilePath = join(tmpdir(), name); // Define your temporary file path
      
        // Write data to the temporary file synchronously
        if(data){
            fs.writeFileSync(tmpFilePath, data);
        }else{
            fs.openSync(tmpFilePath, 'w')
        }
    
        return tmpFilePath;
      }
    
    
      async function videoToMp3(videoUrl){
        try{
            const response = await fetch(videoUrl)
            const buffer = await response.buffer();
            const temporaryFilePath = await createTempFile( buffer, 'id_input.mp4');
            
            return new Promise((resolve, reject) => {
                const temporaryFilePath2 = join(tmpdir(), 'id_output.mp3')
            
                ffmpeg()
                .input(temporaryFilePath)
                .toFormat('mp3')
                .on('end', () => {
                    resolve(temporaryFilePath2);
                })
                .on('error', (err, stdout, stderr) => {
                    // console.error('Error converting to MP3:', err.message);
                    // console.error('FFmpeg stdout:', stdout);
                    // console.error('FFmpeg stderr:', stderr);
                    reject(new Error(`Error converting to MP3: ${err}`));
                })
                .save(temporaryFilePath2);
            })
        }catch(error){
            throw new Error('Error extracting audio: ' + error.message);
        }
      }