I've made a simple setup, getting the webcam / phone camera stream and the passing it on , drawing on a html 2d canvas.
But ive been having trouble figuring out how to show the stream with a delay of few seconds. Kinda like a delay mirror.
I tried playing with ctx.globalAlpha = 0.005;
but this gives me a ghosting effect rather than 'delaying' the stream.
Any idea how this can be achieved?
https://codepen.io/farisk/pen/LvmGGQ
var width = 0, height = 0;
var canvas = document.createElement('canvas'),
ctx = canvas.getContext('2d');
document.body.appendChild(canvas);
var video = document.createElement('video'),
track;
video.setAttribute('autoplay',true);
window.vid = video;
function getWebcam(){
navigator.mediaDevices.getUserMedia({ video: true }).then(function(stream) {
var videoTracks = stream.getVideoTracks();
var newStream = new MediaStream(stream.getVideoTracks());
video.srcObject = newStream;
video.play();
track = stream.getTracks()[0];
}, function(e) {
console.error('Rejected!', e);
});
}
getWebcam();
var rotation = 0,
loopFrame,
centerX,
centerY,
twoPI = Math.PI * 2;
function loop(){
loopFrame = requestAnimationFrame(loop);
// ctx.globalAlpha = 0.005;
ctx.drawImage(video, 0, 0, width, height);
ctx.restore();
}
function startLoop(){
loopFrame = requestAnimationFrame(loop);
}
video.addEventListener('loadedmetadata',function(){
width = canvas.width = video.videoWidth;
height = canvas.height = video.videoHeight;
centerX = width / 2;
centerY = height / 2;
startLoop();
});
canvas.addEventListener('click',function(){
if ( track ) {
if ( track.stop ) { track.stop(); }
track = null;
} else {
getWebcam();
}
});
video,
canvas {
max-width: 100%;
height: auto;
}
You can buffer your stream, and play what you've buffered.
To achieve this with a MediaStream
, you can make use of the MediaRecorder API, along with the MediaSource API.
The basic idea is to record your stream as it goes, and at each new chunk produced, buffer it into the MediaSource
. Then we just have to wait for the delay we want with the video paused before starting its playback.
(async () => {
const delay = 3000;
const mimeType = `video/webm; codecs="vp8"`;
const stream = await getStream();
document.getElementById("realtime").srcObject = stream;
const mediaSource = new MediaSource();
const delayed = document.getElementById("delayed");
delayed.src = URL.createObjectURL(mediaSource);
await new Promise((res) =>
mediaSource.addEventListener("sourceopen", res, { once: true })
);
const sourceBuffer = mediaSource.addSourceBuffer(mimeType);
const recorder = new MediaRecorder(stream, { mimeType });
const chunks = [];
recorder.ondataavailable = async ({ data }) => {
if (mediaSource.readyState !== "open" || !data.size) {
return;
}
sourceBuffer.appendBuffer(await data.arrayBuffer());
};
delayed.pause();
recorder.start(50);
setTimeout(() => delayed.play(), delay);
})();
function getStream() {
// StackSnippet only:
// because StackSnippet don't allow the use of gUM
// we return a MediaStream from a simple <canvas> anim
const canvas = document.createElement("canvas");
const ctx = canvas.getContext("2d");
ctx.font = "30px sans-serif";
ctx.textAlign = "center";
ctx.textBaseline = "middle";
function anim() {
ctx.fillStyle = "white";
ctx.fillRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = "black";
ctx.fillText(new Date().toTimeString().split(" ")[0], canvas.width / 2, canvas.height / 2);
requestAnimationFrame(anim);
}
anim();
return canvas.captureStream();
}
Realtime:<br>
<video id="realtime" autoplay muted controls></video><br> Delayed:<br>
<video id="delayed" autoplay muted controls></video>
And as a fiddle since StackSnippets are not very gUM friendly.