RecordRTC video upload AmazonS3 Timeout error - javascript

I am currently developing a component that allows you to make webcam videos and upload them directly to amazon s3. For that purpose I user RecordRTC library and Amazon S3 storage. I have discovered a strange issue, and I am not sure whether it has to do with RecordRTC blobs or with amazon configuration. When the file size is over 1MB, the Amazon server hangs and after 20 seconds returns a timeout error. Could anyone help me figure this out? Here is my code for the recorder component (p() is the same as console.log()):
navigator.record_function = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
if (navigator.record_function) {
navigator.record_function(videoObj, function (stream) {
video.src = window.URL.createObjectURL(stream);
video.play();
$("#stop_recording").click(function () {
stream.stop();
});
// init recorders
audio_recorder = RecordRTC(stream, {type: "audio", bufferSize: 16384});
video_recorder = RecordRTC(stream, videoOptions);
}, errBack);
}
$("#start_recording").click(function () {
// record the audio and video
video_recorder.startRecording();
audio_recorder.startRecording();
});
And for the uploader component:
// Populate the Post paramters.
fd.append('key', "users/" + Main.current_user.id + "/test.webm");
fd.append('AWSAccessKeyId', msg.access_key_id);
fd.append('acl', 'private');
fd.append('policy', msg.policy);
fd.append('signature', msg.signature);
fd.append('Content-Type', '$Content-Type');
fd.append('x-amz-server-side-encryption', 'AES256');
fd.append('success_action_status', '201');
fd.append('name', 'test.webm');
fd.append('Filename', 'test.webm');
fd.append("file", file);
xhr.open('POST', 'https://' + msg.bucket + '.s3.amazonaws.com', true);
xhr.upload.addEventListener('progress', function (e) {
p(e);
}, false);
xhr.onreadystatechange = function () {
p(xhr.readyState);
};
xhr.send(fd);
$("#stop_recording").click(function () {
// stop recorders
audio_recorder.stopRecording(function () {
var audio_blob = audio_recorder.getBlob();
p(audio_blob);
// VideoUploader.upload_user_audio(audio_blob);
}
);
video_recorder.stopRecording(function () {
var video_blob = video_recorder.getBlob();
p(video_blob);
VideoUploader.upload_user_video(video_blob);
});
});
The error message on timeout is:
Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed.
I appreciate any help I can get, I am really lost here.
Thanks in advance.

I have managed to find a pretty... strange solution for this issue. It appears the problem comes from the way RecordRTC saves the blob object, which makes it impossible to upload in Firefox 35 on Mac. I could not find the code in RecordRTC that causes the problem, the Blob seems to be generated correctly, but the workaround that worked for me was encoding the Blob again into a new Blob via Filereader.
video_recorder.stopRecording(function () {
var video_blob = video_recorder.getBlob();
var arrayBuffer;
var fileReader = new FileReader();
fileReader.onload = function (ex) {
arrayBuffer = this.result;
video_blob = new Blob([arrayBuffer], {type: "video/webm"});
VideoUploader.upload_user_video(video_blob)
};
fileReader.readAsArrayBuffer(video_blob);
});
As for why that happens, I have no idea, but other projects that use the same technology are affected as well http://videobooth.herokuapp.com/ (the same exact problem in the same exact browser, upload hangs). Maybe this information could be useful for people working on RecordRTC, as I think this workaround could be made into a patch.
Solution is tested on Mac Firefox 35 and Mac Chrome 40.

Related

I developed the recording using the JavaScript web audio API, but the sound quality is poor

I have created an application that sings along in the app with the web audio API of JavaScript. This worked perfectly on iOS safari and Chrome, but the sound quality was poor on Android Chrome. To solve this, I tried changing the audio deviceId, but it still didn't work. Does someone have information that might help?
Doubt: After recording, I pass the file to the server and play it on another page. I am wondering if this is causing the problem.
This is my code
function captureUserMedia(mediaConstraints) {
navigator.mediaDevices.getUserMedia(mediaConstraints).then(onMediaSuccess)["catch"]();
}
function record() {
if (getParameterByName("startSec").length !== 0) {
masterSound.currentTime = getParameterByName("startSec");
}
masterSound.play();
if (document.querySelectorAll(".record")[0].getAttribute("status") == "off") {
document.querySelectorAll(".record")[0].setAttribute("status", "on");
document.querySelectorAll(".record")[0].classList.add("stoped");
var mediaConstraints;
const devices = navigator.mediaDevices.enumerateDevices()
devices.then((value) => {
// mediaConstraints = {
// audio: {
// deviceId: {
// exact: value[0].deviceId
// }
// },
// video: false
// };
mediaConstraints = {
audio: true,
video: false,
};
captureUserMedia(mediaConstraints, onMediaSuccess);
});
} else {
document.querySelectorAll(".record")[0].setAttribute("status", "off");
document.querySelectorAll(".record")[0].classList.remove("stoped");
mediaRecorder.stream.stop();
masterSound.pause();
}
}
function onMediaSuccess(stream) {
var audio = document.createElement('audio');
audio.controls = true;
audio.files = true;
audio.muted = true;
audio.srcObject = stream;
audio.play();
var audiosContainer = document.querySelectorAll(".audio_wrapper")[0];
audiosContainer.appendChild(audio);
audiosContainer.appendChild(document.createElement('hr'));
mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'audio/wav';
mediaRecorder.stream = stream;
mediaRecorder.recorderType = MediaRecorderWrapper;
mediaRecorder.audioChannels = 1;
mediaRecorder.start();
mediaRecorder.ondataavailable = function (blob) {
audioFile = blob;
var blobURL = URL.createObjectURL(blob);
document.querySelectorAll(".append_audio")[0].setAttribute("src", blobURL);
function blobToFile(theBlob, fileName) {
theBlob.lastModifiedDate = new Date();
theBlob.name = fileName;
return theBlob;
}
submit();
function submit() {
var audioTest = new Audio(URL.createObjectURL(blob));
audioTest.play();
}
};
}
When trying to build high-quality audio with getDisplayMedia, in the past I've passed in MediaStreamConstraints that remove some of the default processing on the input track:
stream = await navigator.mediaDevices.getDisplayMedia(
{
video: true,
audio:
{
channels: 2,
autoGainControl: false,
echoCancellation: false,
noiseSuppression: false
}
}
);
I'm still learning WebRTC myself, so I'm not sure if these same properties can be passed when using getUserMedia and MediaConstraints, but I thought I'd share in case helpful. It sounds like this might also be about available devices. Good luck!
Had a similar issue where we were getting complaints about very low sound/gain - barely hearable - with our HTML/JS recording client when running on Chrome on some Android devices.
Ended up buying an older Samsung phone (Galaxy A8) to easily replicate the issue.
The culprit was echoCancellation being set to false. With it disabled, we had a very low volume on the recorded audio. The solution was to set echoCancellation as true.
We ended up removing the constraint altogether and relied on each browser's defaults (echoCancellation is enabled by default on Chrome, Safari, Firefox).
Worth mentioning that autoGainControl and noiseSuppression inherit the value of echoCancellation, more exactly, if you only set audio: {echoCancellation: true} the other 2 constraints will also be set as true.

Detecting WebAudio sound end

I'm using a javascript build a radio and I'm trying to give it different functionalities but I need to be able to detect the end of a song to make it work. Is there any way to do this?
I've tried different methods I've found online like .ended and such but I don't think those work without using the html audio tag. So I tried to make an audio tag that uses the same data for the source that my js radio uses and get the file length to stop my sourceNode at the end time and make a new one but but i keep getting null returned as the data so that doesn't work either.
I want to do something like:
context.onended = function() {
$.ajax({
url: '../scripts/radio.php',
data: {
attr1: 'value1'
},
success: function(data) {
console.log(data);
fileChosen = true;
setupAudioNodes();
var request = new XMLHttpRequest();
request.addEventListener("progress", updateProgress);
request.addEventListener("load", transferComplete);
request.addEventListener("error", transferFailed);
request.addEventListener("abort", transferCanceled);
request.open('GET', data, true);
request.responseType = 'arraybuffer';
// When loaded decode the data
request.onload = function() {
$("#title").html("Title");
$("#album").html("Goes");
$("#artist").html("Here");
onWindowResize();
$("#title, #artist, #album").css("visibility", "visible");
// decode the data
context.decodeAudioData(request.response, function(buffer) {
// when the audio is decoded play the sound
sourceNode.buffer = buffer;
sourceNode.start(0);
$("#freq, body").addClass("animateHue");
//on error
}, function(e) {
console.log(e);
});
};
request.send();
}
});
I want for this to run at the end of a song and play the next file. Which it would work if I could get the end time of the currently playing song.
To fix the above issue I added the .ended event inside the function that the source was set up:
function setupAudioNodes() {
// setup a analyser
analyser = context.createAnalyser();
// create a buffer source node
sourceNode = context.createBufferSource();
//connect source to analyser as link
sourceNode.connect(analyser);
// and connect source to destination
sourceNode.connect(context.destination);
//start updating
rafID = window.requestAnimationFrame(updateVisualization);
//I had to place a function for the .ended event inside the function sourceNode was set up.
sourceNode.onended = function() {
sourceNode.stop(0);
playFirst();
}
}

How to send getusermedia recorded stream to server nodejs realtime

I am able to send stream blob using socket.io to node js server. But, it is blinking while updating the blob data in video player. I want it to run smooth. How I can send data without blinking of video player. Here is my server code
var express = require("express");
var app = express();
var http = require("http").Server(app);
var io = require("socket.io")(http);
var fs = require("fs")
app.use(express.static('public'))
app.get("/", function(req, res){
res.sendFile(__dirname+ "/public/index.html");
});
io.on("connection", function(socket) {
console.log("A user is connected");
socket.on("send", function(data){
console.log(data);
socket.emit("data", data);
});
socket.on("disconnect", function() {
console.log("A user is disconnected");
});
});
http.listen(3000, function(){
console.log("Server is started at port 3000\nTo close use Ctrl+C");
});
And here is my client side code,
<html>
<head><title>Testing</title>
<script src="socket.io/socket.io.js"></script>
<script type="text/javascript" src="MediaStreamRecorder.js"></script>
</head>
<body>
<video autoplay="true" id="video"></video>
<script type="text/javascript">
var socket = io();
window.URL.createObjectURL = window.URL.createObjectURL || window.URL.webkitCreateObjectURL || window.URL.mozCreateObjectURL || window.URL.msCreateObjectURL;
socket.on("data", function(data){
var binaryData = [];
binaryData.push(data);
videoElement = document.getElementById('video');
videoElement.src = window.URL.createObjectURL(new Blob(binaryData, {type: "video/webm"}));
});
var mediaConstraints = {
video: true
};
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
navigator.getUserMedia(mediaConstraints, onMediaSuccess, onMediaError);
function onMediaSuccess(stream) {
var arrayOfStreams = [stream];
var medias = new MediaStreamRecorder(stream);
medias.ondataavailable = function(blob) {
socket.emit("send", blob);
};
medias.start();
}
function onMediaError(e) {
console.error('media error', e);
}
</script>
</body>
</html>
I have change timeslice value in MediaStreamRecorder api to 500 default. So, sending data to server after 500 millisecond. But is blinking in the webpage. I have to this to make it real time. Any help will appreciated.
You're using socket.io which uses TCP connection. If you want to make your application real-time something like skype, you must use UDP connection.
If one frame lags behind then your application should ignore this but in your case you're using TCP connection so it will always deliver in order. Firewall is also an issue for TCP connection. You said that you tried setting up timeout to 500 ms which is too big for real-time application that's why your video is blinking.
If you're willing to give up on TCP connection, I have a small solution for this. I've tried this and it is working fine.
https://www.youtube.com/watch?v=ieBtXwHvoNk
Just one problem is that in this case you cannot send your packets on WAN directly as you can easily do in websockets. You have to implement STUN/TURN or something like that on your server.
If you still have some doubts then see this github issue and read all the replies : https://github.com/socketio/socket.io/issues/1175
I hope this would help.
But, it is blinking while updating the blob data in video player.
You are changing the .src of the <video> element.
URL.createObjectURL() and MediaStreamRecorder are not necessary.
Instead of changing the .src of <video> element you can pass the MediaStream object once and set <video> element .srcObject to the passed MediaStream.
videoElement = document.getElementById("video");
function onMediaSuccess(stream) {
if (videoElement.srcObject === null) {
videoElement.srcObject = stream
}
}
var videoElement = document.getElementById("video");
videoElement.oncanplay = function() {
videoElement.play();
}
var media = document.createElement("video");
media.src = "https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4";
media.oncanplay = function() {
media.play();
var stream = media.captureStream();
onMediaSuccess(stream);
}
function onMediaSuccess(stream) {
if (videoElement.srcObject === null) {
videoElement.srcObject = stream
}
}
<video id="video"></video>
If requirement is to send a Blob you can use MediaSource, convert Blob to an ArrayBuffer using FileReader or fetch() and append the ArrayBuffer to SourceBuffer, see Unable to stream video over a websocket to Firefox.

Calling getUserMedia with new constraints causes black screen (MediaStream.ended=true)

On my nexus4 (Android 4.4.4) I am trying to switch between 'user' facing camera and 'environment' facing camera.
Accessing either one directly works.
Switching between them bij making another call to navigator.getUserMedia() setting new constraints fails. The failure results in a black screen video & MediaStream.ended=true.
Why is MediaStream.ended=true on my second call to getUserMedia?
In my view I dynamically create buttons for the number of video sources. Two in this case. Clicking the buttons will call camera.getUserMedia() and passes in a media source:
camera.getUserMedia = function(source){
var constraints = {
video: true,
audio: false
};
if(source){
constraints.video = {optional: [{
sourceId: source.id
}]};
}
navigator.getMedia(
constraints,
function(stream) {
var vendorURL = window.URL || window.webkitURL;
video.src = vendorURL.createObjectURL(stream);
video.play();
streaming = true;
},
function(err) {
...
}
);
};
I have solved this problem by storing the stream on the camera object and then before binding the stream to the video element I will call stop on it. Not really sure what is exactly happening though (maybe somebody can add the explanation in the comments).
camera.getUserMedia = function(source){
if(camera.stream){
camera.stream.stop();
}
...
navigator.getMedia(
constraints,
function(stream) {
camera.stream = stream;
...
},
function(err) {
...
}
);
};

WebRTC works in Chrome but not Firefox

I read several other questions on a related issue, but none answered my question. I have an odd issue where I am able to use WebRTC to audio chat from chrome to firefox but not firefox to chrome.
Basically, when a user wishes to audio chat, he/she clicks a button #audioChatBtn, which uses getUserMedia() to setup a stream. The thing is, clicking #audioChatBtn from Firefox doesn't fire the onaddstream callback on Chrome, but clicking the button from Chrome fires onaddstream on Firefox. Thus, I can audio chat from Chrome to Firefox but not the other way around. I have been trying to figure this out for several hours, but I'm hoping maybe someone here has an answer.
Relevant source:
var configuration = {
'iceServers': [
{ url: 'stun:stun.l.google.com:19302' },
{ url: 'stun:stun1.l.google.com:19302' },
{ url: 'stun:stun2.l.google.com:19302' },
{ url: 'stun:stun3.l.google.com:19302' },
{ url: 'stun:stun4.l.google.com:19302' }
]
};
var pc = RTCPeerConnection(configuration);
var myStream = null;
var currentAudioIndex = 0; // Number of created channels
var myAudioEnabled = false;
// send any ice candidates to the other peer
pc.onicecandidate = function (evt) {
if (evt.candidate)
$(document).trigger("persistState", { mode: 'rtc', 'candidate': evt.candidate });
};
// let the 'negotiationneeded' event trigger offer generation
pc.onnegotiationneeded = function () {
pc.createOffer(localDescCreated, logError);
}
// once remote stream arrives, play it in the audio element
pc.onaddstream = function (evt) {
console.log('creating and binding audio');
var idx = (currentAudioIndex++);
var audioElement = $('#audio' + idx);
if (audioElement.length == 0) {
var audio = $('<audio id="audio' + idx + '" autoplay>');
$('body').append(audio);
audioElement = $('#audio' + idx);
}
var audioObject = audioElement[0];
attachMediaStream(audioObject, evt.stream);
};
function localDescCreated(desc) {
pc.setLocalDescription(desc, function () {
$(document).trigger("persistState", { mode: 'rtc', 'sdp': pc.localDescription });
}, logError);
}
function logError(e) {
bootbox.alert("Audio chat could not be started.");
}
function hasGetUserMedia() {
return !!(navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia);
}
server.onPersist = function(msg) {
if (msg.mode == "rtc") {
if (msg.sdp)
pc.setRemoteDescription(new RTCSessionDescription(msg.sdp), function () {
// if we received an offer, we need to answer
if (pc.remoteDescription.type == 'offer')
pc.createAnswer(localDescCreated, logError);
}, logError);
else
pc.addIceCandidate(new RTCIceCandidate(msg.candidate));
}
}
// On click, start audio chat from this user.
$('#audioChatBtn').click(function() {
if (!hasGetUserMedia()) {
bootbox.alert('Audio conferencing is not supported by your browser. (Currently only supported by Chrome, Firefox, and Opera web browsers.)');
return;
}
if (myAudioEnabled) {
myStream.stop();
displayAlert('Streaming closed', 'Audio chat is off');
$('#audioChatBtn').removeClass('btn-success').addClass('btn-primary');
} else {
getUserMedia({ video: false, audio: true }, function (localMediaStream) {
myStream = localMediaStream;
pc.addStream(localMediaStream);
displayAlert('Streaming...', 'Audio chat is enabled');
$('#audioChatBtn').removeClass('btn-primary').addClass('btn-success');
}, logError);
}
myAudioEnabled = !myAudioEnabled;
});
What I've tried
Tried using 'optional': [{ 'DtlsSrtpKeyAgreement': 'true' }] in the configuration after reading this question
Tried creating a new RTCPeerConnection() each request
Tried using native browser functions instead of adapter.js.
Explored Web Audio API instead of getUserMedia()
Firefox does not currently support onnegotiationneeded, because we currently don't support re-negotiation of an existing connection. All addStream/addTrack and a single createDataChannel (if you want to use them) need to be done before createOffer() or createAnswer. You can createDataChannel() after you connect, if you created on before createOffer.
Adding a stream after they're connected won't work.
An (annoying) alternative is to create a new set of PeerConnections to replace the old ones (using a DataChannel in the old pair as a signaling channel for lower latency)
Resolving this is high on our priority list, but will take a few more releases.
After a lot of debugging, I came to realize that the bug has nothing to do with my code but has to do with Firefox's implementation of WebRTC. Firefox doesn't trigger the onnegotiationneeded callback, so I have to do it hackily using a timeout (and hope that the stream information has been relayed to the remote client before the function fires). Obviously, this is a firefox bug and I will report it, hoping that they fix the bug in the next build.
getUserMedia({ video: false, audio: true }, function (localMediaStream) {
myStream = localMediaStream;
pc.addStream(localMediaStream);
displayAlert('Streaming...', 'Audio chat is enabled');
$('#audioChatBtn').removeClass('btn-primary').addClass('btn-success');
// Need this for Firefox
if (webrtcDetectedBrowser == 'firefox')
setTimeout(pc.onnegotiationneeded, 5000);
}, logError);

Categories