I am currently developing a component that allows you to make webcam videos and upload them directly to amazon s3. For that purpose I user RecordRTC library and Amazon S3 storage. I have discovered a strange issue, and I am not sure whether it has to do with RecordRTC blobs or with amazon configuration. When the file size is over 1MB, the Amazon server hangs and after 20 seconds returns a timeout error. Could anyone help me figure this out? Here is my code for the recorder component (p() is the same as console.log()):
navigator.record_function = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
if (navigator.record_function) {
navigator.record_function(videoObj, function (stream) {
video.src = window.URL.createObjectURL(stream);
video.play();
$("#stop_recording").click(function () {
stream.stop();
});
// init recorders
audio_recorder = RecordRTC(stream, {type: "audio", bufferSize: 16384});
video_recorder = RecordRTC(stream, videoOptions);
}, errBack);
}
$("#start_recording").click(function () {
// record the audio and video
video_recorder.startRecording();
audio_recorder.startRecording();
});
And for the uploader component:
// Populate the Post paramters.
fd.append('key', "users/" + Main.current_user.id + "/test.webm");
fd.append('AWSAccessKeyId', msg.access_key_id);
fd.append('acl', 'private');
fd.append('policy', msg.policy);
fd.append('signature', msg.signature);
fd.append('Content-Type', '$Content-Type');
fd.append('x-amz-server-side-encryption', 'AES256');
fd.append('success_action_status', '201');
fd.append('name', 'test.webm');
fd.append('Filename', 'test.webm');
fd.append("file", file);
xhr.open('POST', 'https://' + msg.bucket + '.s3.amazonaws.com', true);
xhr.upload.addEventListener('progress', function (e) {
p(e);
}, false);
xhr.onreadystatechange = function () {
p(xhr.readyState);
};
xhr.send(fd);
$("#stop_recording").click(function () {
// stop recorders
audio_recorder.stopRecording(function () {
var audio_blob = audio_recorder.getBlob();
p(audio_blob);
// VideoUploader.upload_user_audio(audio_blob);
}
);
video_recorder.stopRecording(function () {
var video_blob = video_recorder.getBlob();
p(video_blob);
VideoUploader.upload_user_video(video_blob);
});
});
The error message on timeout is:
Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed.
I appreciate any help I can get, I am really lost here. Thanks in advance.
I have managed to find a pretty... strange solution for this issue. It appears the problem comes from the way RecordRTC saves the blob object, which makes it impossible to upload in Firefox 35 on Mac. I could not find the code in RecordRTC that causes the problem, the Blob seems to be generated correctly, but the workaround that worked for me was encoding the Blob again into a new Blob via Filereader.
video_recorder.stopRecording(function () {
var video_blob = video_recorder.getBlob();
var arrayBuffer;
var fileReader = new FileReader();
fileReader.onload = function (ex) {
arrayBuffer = this.result;
video_blob = new Blob([arrayBuffer], {type: "video/webm"});
VideoUploader.upload_user_video(video_blob)
};
fileReader.readAsArrayBuffer(video_blob);
});
As for why that happens, I have no idea, but other projects that use the same technology are affected as well http://videobooth.herokuapp.com/ (the same exact problem in the same exact browser, upload hangs). Maybe this information could be useful for people working on RecordRTC, as I think this workaround could be made into a patch.
Solution is tested on Mac Firefox 35 and Mac Chrome 40.