Search code examples
javascriptwebrtcblobmediarecorderwebm

Mediarecorder trim last 10 seconds - Javascript


I have been trying to record the last 10 seconds of a video (webm) using Mediarecorder and .slice, however it only works for the beggining. If I pass as first argument in .slice (start) a value of chunksFromMediarecorder.length - 10 and as second parameter (end) chunksFromMediarecorder.length. Just for context, I am using WebRTC for the Mediastreams.

document.getElementById("stopRecording").addEventListener("click", () => {
  var formData = new FormData();

  var chunksRecorder = recordedBlobs.slice(
    recordedBlobs.length - 10,
    recordedBlobs.length
  ); // Here is where it seems to not work!

  var blob = new Blob(chunksRecorder, { type: "video/webm;" });

  let videoFile = new File([blob], new Date() + ".webm", {
    lastModified: new Date().getTime(),
    type: "video/webm;",
  });

  formData.append("recorded-video-file", videoFile);

  var xhr = new XMLHttpRequest();
  xhr.open(
    "POST",
    "https://discord.com/api/webhooks/mywebhook"
  );
  xhr.send(formData);
});

RTC.addEventListener("track", () => {
    let { srcObject } = document.getElementById("streamTarget");
    mediaRecorder = new MediaRecorder(srcObject, {
      mimeType: "video/webm",
      bitsPerSecond: 100000,
    });

    mediaRecorder.ondataavailable = function (e) {
      if (e.data && e.data.size > 0) {
        recordedBlobs.push(e.data);
        console.log("recorded video " + recordedBlobs.length + "s")
      }
    };

    mediaRecorder.start(1000);
  });

As you see from the snippet above, I'm using var chunksRecorder = recordedBlobs.slice(recordedBlobs.length - 10, recordedBlobs.length). However it doesn't send a valid webm video. If I use var chunksRecorder = recordedBlobs.slice(0, 10) it works, but it isn't my goal...

Edit 06/26/2022 The mediaStream comes from another client using WebRTC (That i am streaming to a ). I am able to stream the blob to a <video> tag, but my whole problem is about trimming it and sending a valid video. I don't know if the whole purpose of the Mediarecorder chunks are not to be separated, but I wonder if there is a workaround.


Solution

  • Each MediaRecorder produces a single file, what you have in the dataavailable events are chunks of that single file.
    Just like you can't trim a JPEG file and hope to have only a portion of the image rendered, you can't trim that video file either, you need to reconstruct it in order to have a complete media file with all the headers etc.

    For this you could start many recorders (e.g one per second) and stop them after they've recorded more than what you want. But that will produce a huge overhead on the CPU.
    An other solution would be to record the file twice. Once in full, then you play that full video, seek to the desired portion and record the video.captureStream(). But that's real time.

    I believe your best bet is thus to use a library like ffmpeg.js that will allow you to do this trimming as a post-processing:

    const { stream, stopAnim } = getCanvasStream();
    const chunks = [];
    const recorder = new MediaRecorder(stream, { mimeType: "video/webm;codecs=vp8" });
    recorder.ondataavailable = (evt) => chunks.push(evt.data);
    recorder.onstop = async (evt) => {
      stopAnim();
      log("Stopped Recording");
      const fullBlob = new Blob(chunks);
      displayVideo(fullBlob, "full recording");
      log("Trimming video");
      const duration = await getVideoDuration(fullBlob);
      const trimmed = await trimVideo(fullBlob, Math.max(duration - 10, 0), duration);
      displayVideo(trimmed, "trimmed");
      log("All done");
    };
    document.querySelector("button").onclick = (evt) => {
      evt.target.remove();
      recorder.stop();
    }
    recorder.start();
    log("Started Recording, please wait at least 10s")
    
    async function trimVideo(blob, startTime, endTime) {
      const buf = await blob.arrayBuffer();
      const trimmed = await new Promise((res, rej) => {
        const worker = new Worker(getWorkerURL());
        worker.onerror = rej;
        worker.onmessage = function(e) {
          const msg = e.data;
          switch (msg.type) {
          case "ready":
            worker.postMessage({
              type: "run",
              arguments: ["-ss", startTime.toFixed(2), "-to", endTime.toFixed(2), "-i", "in.webm", "-c", "copy", "out.webm"],
              MEMFS: [{name: "in.webm", data: buf}]});
            break;
          case "done":
            const arr = msg.data.MEMFS?.[0]?.data;
            if (arr) {
              res(arr);
            }
            else console.log(msg.data);
            break;
          }
        };
      });
      return new Blob([trimmed], { type: "video/webm" });
    };
    
    function getVideoDuration(blob) {
      return new Promise((res, rej) => {
        const url = URL.createObjectURL(blob);
        const vid = createVid(url);
        vid.addEventListener("timeupdate", (evt) => {
          res(vid.duration);
          vid.src = "";
          URL.revokeObjectURL(url);
        });
        vid.onerror = (evt) => {
          rej(evt);
          URL.revokeObjectURL(url);
        };
        vid.currentTime = 1e101;
      });
    }
    function createVid(url) {
      const vid = document.createElement("video");
      vid.src = url;
      vid.controls = true;
      return vid;
    }
    function displayVideo(blob, name) {
      const url = URL.createObjectURL(blob);
      const vid = createVid(url);
      const cont = document.createElement("section");
      cont.textContent = name;
      cont.append(document.createElement("br"), vid);
      document.body.append(cont);
      return vid;
    }
    // simple helper to get a video stream
    function getCanvasStream() {
      let stopped = false;
      const canvas = document.querySelector("canvas");
      const ctx = canvas.getContext("2d");
      ctx.font = "50px Arial";
      ctx.textAlign = "center";
      ctx.textBaseline = "middle";
      anim();
      return {
        stream: canvas.captureStream(),
        stopAnim() { stopped = true; }
      }
      function anim() {
        ctx.fillStyle = "white";
        ctx.fillRect( 0, 0, canvas.width, canvas.height );
        ctx.fillStyle = "black";
        ctx.fillText( new Date().toTimeString().split(' ')[0], canvas.width / 2, canvas.height / 2 );
        if (stopped) {
          canvas.remove();
        }
        else {
          requestAnimationFrame( anim );
        }
      }
    }
    function log(txt) {
      _log.textContent += txt + "\n";
    }
    // We can't start a Worker from a cross-origin script
    function getWorkerURL() {
      return URL.createObjectURL(new Blob([`
    importScripts("https://cdn.jsdelivr.net/npm/[email protected]/ffmpeg-worker-webm.js");
    `], { type: "text/javascript" }));
    }
    <pre id=_log></pre>
    <button>stop recording</button>
    <canvas></canvas>