Calling getUserMedia with new constraints causes black screen (MediaStream.ended=true) - javascript

On my nexus4 (Android 4.4.4) I am trying to switch between 'user' facing camera and 'environment' facing camera.
Accessing either one directly works.
Switching between them bij making another call to navigator.getUserMedia() setting new constraints fails. The failure results in a black screen video & MediaStream.ended=true.
Why is MediaStream.ended=true on my second call to getUserMedia?
In my view I dynamically create buttons for the number of video sources. Two in this case. Clicking the buttons will call camera.getUserMedia() and passes in a media source:
camera.getUserMedia = function(source){
var constraints = {
video: true,
audio: false
};
if(source){
constraints.video = {optional: [{
sourceId: source.id
}]};
}
navigator.getMedia(
constraints,
function(stream) {
var vendorURL = window.URL || window.webkitURL;
video.src = vendorURL.createObjectURL(stream);
video.play();
streaming = true;
},
function(err) {
...
}
);
};

I have solved this problem by storing the stream on the camera object and then before binding the stream to the video element I will call stop on it. Not really sure what is exactly happening though (maybe somebody can add the explanation in the comments).
camera.getUserMedia = function(source){
if(camera.stream){
camera.stream.stop();
}
...
navigator.getMedia(
constraints,
function(stream) {
camera.stream = stream;
...
},
function(err) {
...
}
);
};

Related

I developed the recording using the JavaScript web audio API, but the sound quality is poor

I have created an application that sings along in the app with the web audio API of JavaScript. This worked perfectly on iOS safari and Chrome, but the sound quality was poor on Android Chrome. To solve this, I tried changing the audio deviceId, but it still didn't work. Does someone have information that might help?
Doubt: After recording, I pass the file to the server and play it on another page. I am wondering if this is causing the problem.
This is my code
function captureUserMedia(mediaConstraints) {
navigator.mediaDevices.getUserMedia(mediaConstraints).then(onMediaSuccess)["catch"]();
}
function record() {
if (getParameterByName("startSec").length !== 0) {
masterSound.currentTime = getParameterByName("startSec");
}
masterSound.play();
if (document.querySelectorAll(".record")[0].getAttribute("status") == "off") {
document.querySelectorAll(".record")[0].setAttribute("status", "on");
document.querySelectorAll(".record")[0].classList.add("stoped");
var mediaConstraints;
const devices = navigator.mediaDevices.enumerateDevices()
devices.then((value) => {
// mediaConstraints = {
// audio: {
// deviceId: {
// exact: value[0].deviceId
// }
// },
// video: false
// };
mediaConstraints = {
audio: true,
video: false,
};
captureUserMedia(mediaConstraints, onMediaSuccess);
});
} else {
document.querySelectorAll(".record")[0].setAttribute("status", "off");
document.querySelectorAll(".record")[0].classList.remove("stoped");
mediaRecorder.stream.stop();
masterSound.pause();
}
}
function onMediaSuccess(stream) {
var audio = document.createElement('audio');
audio.controls = true;
audio.files = true;
audio.muted = true;
audio.srcObject = stream;
audio.play();
var audiosContainer = document.querySelectorAll(".audio_wrapper")[0];
audiosContainer.appendChild(audio);
audiosContainer.appendChild(document.createElement('hr'));
mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'audio/wav';
mediaRecorder.stream = stream;
mediaRecorder.recorderType = MediaRecorderWrapper;
mediaRecorder.audioChannels = 1;
mediaRecorder.start();
mediaRecorder.ondataavailable = function (blob) {
audioFile = blob;
var blobURL = URL.createObjectURL(blob);
document.querySelectorAll(".append_audio")[0].setAttribute("src", blobURL);
function blobToFile(theBlob, fileName) {
theBlob.lastModifiedDate = new Date();
theBlob.name = fileName;
return theBlob;
}
submit();
function submit() {
var audioTest = new Audio(URL.createObjectURL(blob));
audioTest.play();
}
};
}
When trying to build high-quality audio with getDisplayMedia, in the past I've passed in MediaStreamConstraints that remove some of the default processing on the input track:
stream = await navigator.mediaDevices.getDisplayMedia(
{
video: true,
audio:
{
channels: 2,
autoGainControl: false,
echoCancellation: false,
noiseSuppression: false
}
}
);
I'm still learning WebRTC myself, so I'm not sure if these same properties can be passed when using getUserMedia and MediaConstraints, but I thought I'd share in case helpful. It sounds like this might also be about available devices. Good luck!
Had a similar issue where we were getting complaints about very low sound/gain - barely hearable - with our HTML/JS recording client when running on Chrome on some Android devices.
Ended up buying an older Samsung phone (Galaxy A8) to easily replicate the issue.
The culprit was echoCancellation being set to false. With it disabled, we had a very low volume on the recorded audio. The solution was to set echoCancellation as true.
We ended up removing the constraint altogether and relied on each browser's defaults (echoCancellation is enabled by default on Chrome, Safari, Firefox).
Worth mentioning that autoGainControl and noiseSuppression inherit the value of echoCancellation, more exactly, if you only set audio: {echoCancellation: true} the other 2 constraints will also be set as true.

Multiple webcams per browser page

We have a laptop with one built-in webcam and 2 external USB webcams. I would like to receive images from all three webcams at the same time. I know that since 2018 this is possible.
I am using the following code to work with one camera, but how to display the image from three cameras at once?
<script>
var video = null;
var canvas = null;
var canvasContext = null;
var webimage = null;
var statusLabel = null;
function initVideo() {
video = document.getElementById("monitor");
statusLabel = document.getElementById("LBLSTATUS");
navigator.webkitGetUserMedia({video:true}, gotStream, noStream);
}
function setStatus(aStatus) {
statusLabel.innerHTML = aStatus;
}
function gotStream(stream) {
video.onerror = function () {
stream.stop();
streamError();
};
video.srcObject = stream;
//video.src = webkitURL.createObjectURL(stream); -> Deprecated
}
function noStream() {
setStatus('No camera available.');
}
function streamError() {
setStatus('Camera error.');
}
</script>
You need to get all 3 cameras deviceIDs. Once you get those, make 3 calls to getUserMedia, each with the respective camera ids.
navigator.mediaDevices.getUserMedia({
video: {
deviceId:{
exact: videoSource
},
},
}).then(function( video ) {
const localVidElem = document.getElementById( 'localVideo1' );
localVidElem.srcObject = video;
})
<video id="localVideo1"></video>
Make sure that you have the correct DeviceID and then assign it a srcObject. For more info about that, please read the official WebRTC docs.

How to access webcam in Safari and IE? Are there any alternative API to getUserMedia?

I am currently accessing webcam in javascript, then streams to browsers using getUserMedia, caniuse.com shows that Safari and Internet Explorer 11 and above are not able to access getUserMedia.
I use this to check if there is a webcam,
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
Then I display using this :
if (navigator.getUserMedia){
//document.getElementById("webcam").style.display = "block";
window.addEventListener("DOMContentLoaded", function() {
// Grab elements, create settings, etc.
var canvas = document.getElementById("canvas"),
context = canvas.getContext("2d"),
video = document.getElementById("video"),
videoObj = { "video": true },
errBack = function(error) {
console.log("Video capture error: ", error.code);
};
// Put video listeners into place
if(navigator.getUserMedia) { // Standard
navigator.getUserMedia(videoObj, function(stream) {
video.src = stream;
video.play();
}, errBack);
}
else if(navigator.webkitGetUserMedia) { // WebKit-prefixed
navigator.webkitGetUserMedia(videoObj, function(stream){
video.src = window.webkitURL.createObjectURL(stream);
video.play();
}, errBack);
}
else if(navigator.mozGetUserMedia) { // Firefox-prefixed
navigator.mozGetUserMedia(videoObj, function(stream){
video.src = window.URL.createObjectURL(stream);
video.play();
}, errBack);
}
// Trigger photo take
document.getElementById("snap").addEventListener("click", function() {
context.drawImage(video, 0, 0, 640, 480);
canvasToDataURLString = canvas.toDataURL();
var blob = dataURItoBlob(canvasToDataURLString);
base64result = canvasToDataURLString.split(',')[1];
//console.debug(base64result);
document.getElementById("base64result").innerHTML = base64result;
});
}, false);
}
Or there simply is no way of accessing webcam using IE and safari? I understand there are limitations in Chrome as the webpage has to be accessed from a secure origin.
I wonder if I am doing it wrongly...
I want to know if there are any other alternatives to getUserMedia!
Thank you and please be kind, I have already tried googling and nth much came out, I am just seeking for alternatives, if it is really impossible, then ok.
Thank you !
IE: no. fuggedaboudit. (the good news is that even older versions of Windows can now use new Edge, and as far as gUM and MediaRecorder go, it is Chromium.)
Safari, both mobile and mac, yes. gUM works, but MediaRecorder is wakky.
These samples let you test what various browsers do, and you can use the source for guidance on your project.
https://webrtc.github.io/samples/

RTCmulticonnection use existing getUserMedia stream

I prepared a webcam streaming app and therefore previewed a users webcam in a video tag using "navigator.mediaDevices.getUserMedia".
After the user hits a button and "RTCmulticonnection" opens a room, I want to add the preview webcam / video to be used as webcam-stream.
I just don't get it working.
The part Im going to show you starts after the socket.io Connection is established - the "connection.open" method also returns that the room has been created and the connection is established. So Signaling works - I just don't know how to simply attach my Video to the (RTCmulsiconnection) connection Object.
I have tried to use the "attachStreams" method, but remained unsuccessfully.
//Get Users Webcam
navigator.mediaDevices.getUserMedia({
audio: true,
video: true
}).then(function(stream) {
$("#mediaPreview").show();
let preview = document.getElementById('mediaPreviewVideo');
preview.srcObject = stream;
preview.volume = 0;
preview.play();
});
//Start stream
$("#btnMediaStreamStart").on('click', function(e) {
//Prevent Default & Hide
e.preventDefault();
//Add Classes
$("#mediaPreviewVideo").addClass('webcam-online');
$("#mediaStreamStatus").show().addClass('bg-success').html('<small><strong>Live</strong></small>');
connection.socketURL = 'localhost:9001/';
let connectionRoom = '123';
connection.open(connectionRoom, function(e) {
if (e === true) {
connection.attachStreams.forEach(function(localStream) {
//How to attach the cam stream? Is my previously created video even part of the "streams"?
});
connection.mediaConstraints = {
audio: {
deviceId: selectAudio
},
video: {
deviceId: selectVideo
}
};
}
});
Try using
connection.addStream(stream);
where stream is your already captured stream - perhaps via getUserMedia()

"hello world" on webcam capturing in HTML5 fails to work

after a couple of hours of struggling here I am. I have the following code, which apparently should just start my webcam and prompt the stream on the webpage:
<!doctype html>
<html>
<head>
<title>HTML5 Webcam Test</title>
</head>
<body>
<video id="sourcevid" autoplay>Put your fallback message here.</video>
<div id="errorMessage"></div>
<script>
video = document.getElementById('sourcevid');
navigator.getUserMedia = navigator.webkitGetUserMedia || navigator.getUserMedia;
window.URL = window.URL || window.webkitURL;
function gotStream(stream) {
if (window.URL) {
video.src = window.URL.createObjectURL(stream);
} else {
video.src = stream; // Opera.
}
video.onerror = function(e) {
stream.stop();
};
stream.onended = noStream;
}
function noStream(e) {
var msg = 'No camera available.';
if (e.code == 1) {
msg = 'User denied access to use camera.';
}
document.getElementById('errorMessage').textContent = msg;
}
navigator.webkitGetUserMedia({video: true}, gotStream, noStream);
</script>
</body>
</html>
No errors in the console, but no webcam stream either. Just the "User denied access to use camera.".
I tried another example, too long to show, but again apparently as soon as I try to run the page the stream falls into the .onended function:
function gotStream(stream) {
video.src = URL.createObjectURL(stream);
video.onerror = function () {
stream.stop();
};
stream.onended = noStream;
[...]
Where noStream is a simple function that prints something:
function noStream() {
document.getElementById('errorMessage').textContent = 'No camera available.';
}
So basically when I'm running the second example I'm shown the "No camera available" on the webpage.
I'm running on Chrome Version 22.0.1229.94, I saw somewhere that I needed to enable some flags, but I couldn't find them in my chrome://flags; the flags' name were Enable MediaStream and Enable PeerConnection, but in my version I only have the second one, which I enabled.
Any thoughts? Is the API I'm using old by any means? Can somebody point me to some working example?
Thanks
According to http://www.webrtc.org/running-the-demos the getUserMedia API is available on stable version as of Chrome 21 without the need to use any flag.
I think the error happens because you are trying to instantiate the stream without to define the url stream properly. Consider that you need to access the url stream differently in Chrome and Opera.
I would create the structure of your code as something like below:
function gotStream(stream) {
if (window.URL) {
video.src = window.URL.createObjectURL(stream) || stream;
video.play();
} else {
video.src = stream || stream; // Opera.
video.play();
}
video.onerror = function(e) {
stream.stop();
};
stream.onended = noStream;
}
function noStream(e) {
var msg = 'No camera available.';
if (e.code == 1) {
msg = 'User denied access to use camera.';
}
document.getElementById('errorMessage').textContent = msg;
}
var options = {video: true, toString: function(){return 'video';}};
navigator.getUserMedia(options, gotStream, noStream);
EDIT:
You need to replace the source video element with the media stream. Edited the code above.
video = document.getElementById('sourcevid');
I recommend for reading these two articles:
http://www.html5rocks.com/en/tutorials/getusermedia/intro/
http://dev.opera.com/articles/view/playing-with-html5-video-and-getusermedia-support/

Categories