Search code examples
javascriptnode.jsffmpeglive-streaming

how to add audio using ffmpeg when recording video from browser and streaming to Youtube/Twitch?


I have a web application I am working on that allows the user to stream video from their browser and simultaneously livestream to both Youtube and Twitch using ffmpeg. The application works fine when I don't need to send any of the audio. Currently I am getting the error below when I try to record video and audio. I am new to using ffmpeg and so any help would be greatly appreciated. Here is also my repo if needed: https://github.com/toshvelaga/livestream Node Server

Here is my node.js server with ffmpeg

const child_process = require('child_process') // To be used later for running FFmpeg
const express = require('express')
const http = require('http')
const WebSocketServer = require('ws').Server
const NodeMediaServer = require('node-media-server')
const app = express()
const cors = require('cors')
const path = require('path')
const logger = require('morgan')
require('dotenv').config()

app.use(logger('dev'))
app.use(cors())

app.use(express.json({ limit: '200mb', extended: true }))
app.use(
  express.urlencoded({ limit: '200mb', extended: true, parameterLimit: 50000 })
)

var authRouter = require('./routes/auth')
var compareCodeRouter = require('./routes/compareCode')

app.use('/', authRouter)
app.use('/', compareCodeRouter)

if (process.env.NODE_ENV === 'production') {
  // serve static content
  // npm run build
  app.use(express.static(path.join(__dirname, 'client/build')))

  app.get('*', (req, res) => {
    res.sendFile(path.join(__dirname, 'client/build', 'index.html'))
  })
}

const PORT = process.env.PORT || 8080

app.listen(PORT, () => {
  console.log(`Server is starting on port ${PORT}`)
})

const server = http.createServer(app).listen(3000, () => {
  console.log('Listening on PORT 3000...')
})


const wss = new WebSocketServer({
  server: server,
})

wss.on('connection', (ws, req) => {
  const ffmpeg = child_process.spawn('ffmpeg', [
    // works fine when I use this but when I need audio problems arise
    // '-f',
    // 'lavfi',
    // '-i',
    // 'anullsrc',

    '-i',
    '-',

    '-f',
    'flv',
    '-c',
    'copy',
    `${process.env.TWITCH_STREAM_ADDRESS}`,
    '-f',
    'flv',
    '-c',
    'copy',
    `${process.env.YOUTUBE_STREAM_ADDRESS}`,
    // '-f',
    // 'flv',
    // '-c',
    // 'copy',
    // `${process.env.FACEBOOK_STREAM_ADDRESS}`,
  ])

  ffmpeg.on('close', (code, signal) => {
    console.log(
      'FFmpeg child process closed, code ' + code + ', signal ' + signal
    )
    ws.terminate()
  })

  ffmpeg.stdin.on('error', (e) => {
    console.log('FFmpeg STDIN Error', e)
  })

  ffmpeg.stderr.on('data', (data) => {
    console.log('FFmpeg STDERR:', data.toString())
  })

  ws.on('message', (msg) => {
    console.log('DATA', msg)
    ffmpeg.stdin.write(msg)
  })

  ws.on('close', (e) => {
    console.log('kill: SIGINT')
    ffmpeg.kill('SIGINT')
  })
})

const config = {
  rtmp: {
    port: 1935,
    chunk_size: 60000,
    gop_cache: true,
    ping: 30,
    ping_timeout: 60,
  },
  http: {
    port: 8000,
    allow_origin: '*',
  },
}

var nms = new NodeMediaServer(config)
nms.run()

Here is my frontend code that records the video/audio and sends to server:

import React, { useState, useEffect, useRef } from 'react'
import Navbar from '../../components/Navbar/Navbar'
import './Dashboard.css'

const CAPTURE_OPTIONS = {
  audio: true,
  video: true,
}

function Dashboard() {
  const [mute, setMute] = useState(false)
  const videoRef = useRef()
  const ws = useRef()
  const mediaStream = useUserMedia(CAPTURE_OPTIONS)

  let liveStream
  let liveStreamRecorder

  if (mediaStream && videoRef.current && !videoRef.current.srcObject) {
    videoRef.current.srcObject = mediaStream
  }

  const handleCanPlay = () => {
    videoRef.current.play()
  }

  useEffect(() => {
    ws.current = new WebSocket(
      window.location.protocol.replace('http', 'ws') +
        '//' + // http: -> ws:, https: -> wss:
        'localhost:3000'
    )

    ws.current.onopen = () => {
      console.log('WebSocket Open')
    }

    return () => {
      ws.current.close()
    }
  }, [])

  const startStream = () => {
    liveStream = videoRef.current.captureStream(30) // 30 FPS
    liveStreamRecorder = new MediaRecorder(liveStream, {
      mimeType: 'video/webm;codecs=h264',
      videoBitsPerSecond: 3 * 1024 * 1024,
    })
    liveStreamRecorder.ondataavailable = (e) => {
      ws.current.send(e.data)
      console.log('send data', e.data)
    }
    // Start recording, and dump data every second
    liveStreamRecorder.start(1000)
  }

  const stopStream = () => {
    liveStreamRecorder.stop()
    ws.current.close()
  }

  const toggleMute = () => {
    setMute(!mute)
  }

  return (
    <>
      <Navbar />
      <div style={{ marginTop: '5rem' }} className='main'>
        <div id='container'>
          <video
            ref={videoRef}
            onCanPlay={handleCanPlay}
            autoPlay
            playsInline
            muted={mute}
          />
        </div>
        <div className='button-container'>
          <button onClick={startStream}>Go Live</button>
          <button onClick={stopStream}>Stop Recording</button>
          <button>Share Screen</button>
          <button onClick={toggleMute}>Mute</button>
        </div>
      </div>
    </>
  )
}

const useUserMedia = (requestedMedia) => {
  const [mediaStream, setMediaStream] = useState(null)

  useEffect(() => {
    async function enableStream() {
      try {
        const stream = await navigator.mediaDevices.getUserMedia(requestedMedia)
        setMediaStream(stream)
      } catch (err) {
        console.log(err)
      }
    }

    if (!mediaStream) {
      enableStream()
    } else {
      return function cleanup() {
        mediaStream.getVideoTracks().forEach((track) => {
          track.stop()
        })
      }
    }
  }, [mediaStream, requestedMedia])

  return mediaStream
}

export default Dashboard

Solution

  • So I got the audio to work after a little bit of trial and error with ffmpeg. Not sure if this is the optimal approach, but it works for the time being.

    Here is also the full file: https://github.com/toshvelaga/livestream/blob/main/server/server.js

      const ffmpeg = child_process.spawn('ffmpeg', [
        '-i',
        '-',
    
        // video codec config: low latency, adaptive bitrate
        '-c:v',
        'libx264',
        '-preset',
        'veryfast',
        '-tune',
        'zerolatency',
    
        // audio codec config: sampling frequency (11025, 22050, 44100), bitrate 64 kbits
        '-c:a',
        'aac',
        '-strict',
        '-2',
        '-ar',
        '44100',
        '-b:a',
        '64k',
    
        //force to overwrite
        '-y',
    
        // used for audio sync
        '-use_wallclock_as_timestamps',
        '1',
        '-async',
        '1',
    
        //'-filter_complex', 'aresample=44100', // resample audio to 44100Hz, needed if input is not 44100
        //'-strict', 'experimental',
        '-bufsize',
        '1000',
        '-f',
        'flv',
        `${process.env.TWITCH_STREAM_ADDRESS}`,
    
        // video codec config: low latency, adaptive bitrate
        '-c:v',
        'libx264',
        '-preset',
        'veryfast',
        '-tune',
        'zerolatency',
    
        // audio codec config: sampling frequency (11025, 22050, 44100), bitrate 64 kbits
        '-c:a',
        'aac',
        '-strict',
        '-2',
        '-ar',
        '44100',
        '-b:a',
        '64k',
    
        //force to overwrite
        '-y',
    
        // used for audio sync
        '-use_wallclock_as_timestamps',
        '1',
        '-async',
        '1',
    
        '-f',
        'flv',
        `${process.env.YOUTUBE_STREAM_ADDRESS}`,
      ])