We have a laptop with one built-in webcam and 2 external USB webcams. I would like to receive images from all three webcams at the same time. I know that since 2018 this is possible.
I am using the following code to work with one camera, but how to display the image from three cameras at once?
<script>
var video = null;
var canvas = null;
var canvasContext = null;
var webimage = null;
var statusLabel = null;
function initVideo() {
video = document.getElementById("monitor");
statusLabel = document.getElementById("LBLSTATUS");
navigator.webkitGetUserMedia({video:true}, gotStream, noStream);
}
function setStatus(aStatus) {
statusLabel.innerHTML = aStatus;
}
function gotStream(stream) {
video.onerror = function () {
stream.stop();
streamError();
};
video.srcObject = stream;
//video.src = webkitURL.createObjectURL(stream); -> Deprecated
}
function noStream() {
setStatus('No camera available.');
}
function streamError() {
setStatus('Camera error.');
}
</script>
You need to get all 3 cameras deviceIDs. Once you get those, make 3 calls to getUserMedia, each with the respective camera ids.
navigator.mediaDevices.getUserMedia({
video: {
deviceId:{
exact: videoSource
},
},
}).then(function( video ) {
const localVidElem = document.getElementById( 'localVideo1' );
localVidElem.srcObject = video;
})
<video id="localVideo1"></video>
Make sure that you have the correct DeviceID and then assign it a srcObject. For more info about that, please read the official WebRTC docs.
Related
I have created an application that sings along in the app with the web audio API of JavaScript. This worked perfectly on iOS safari and Chrome, but the sound quality was poor on Android Chrome. To solve this, I tried changing the audio deviceId, but it still didn't work. Does someone have information that might help?
Doubt: After recording, I pass the file to the server and play it on another page. I am wondering if this is causing the problem.
This is my code
function captureUserMedia(mediaConstraints) {
navigator.mediaDevices.getUserMedia(mediaConstraints).then(onMediaSuccess)["catch"]();
}
function record() {
if (getParameterByName("startSec").length !== 0) {
masterSound.currentTime = getParameterByName("startSec");
}
masterSound.play();
if (document.querySelectorAll(".record")[0].getAttribute("status") == "off") {
document.querySelectorAll(".record")[0].setAttribute("status", "on");
document.querySelectorAll(".record")[0].classList.add("stoped");
var mediaConstraints;
const devices = navigator.mediaDevices.enumerateDevices()
devices.then((value) => {
// mediaConstraints = {
// audio: {
// deviceId: {
// exact: value[0].deviceId
// }
// },
// video: false
// };
mediaConstraints = {
audio: true,
video: false,
};
captureUserMedia(mediaConstraints, onMediaSuccess);
});
} else {
document.querySelectorAll(".record")[0].setAttribute("status", "off");
document.querySelectorAll(".record")[0].classList.remove("stoped");
mediaRecorder.stream.stop();
masterSound.pause();
}
}
function onMediaSuccess(stream) {
var audio = document.createElement('audio');
audio.controls = true;
audio.files = true;
audio.muted = true;
audio.srcObject = stream;
audio.play();
var audiosContainer = document.querySelectorAll(".audio_wrapper")[0];
audiosContainer.appendChild(audio);
audiosContainer.appendChild(document.createElement('hr'));
mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'audio/wav';
mediaRecorder.stream = stream;
mediaRecorder.recorderType = MediaRecorderWrapper;
mediaRecorder.audioChannels = 1;
mediaRecorder.start();
mediaRecorder.ondataavailable = function (blob) {
audioFile = blob;
var blobURL = URL.createObjectURL(blob);
document.querySelectorAll(".append_audio")[0].setAttribute("src", blobURL);
function blobToFile(theBlob, fileName) {
theBlob.lastModifiedDate = new Date();
theBlob.name = fileName;
return theBlob;
}
submit();
function submit() {
var audioTest = new Audio(URL.createObjectURL(blob));
audioTest.play();
}
};
}
When trying to build high-quality audio with getDisplayMedia, in the past I've passed in MediaStreamConstraints that remove some of the default processing on the input track:
stream = await navigator.mediaDevices.getDisplayMedia(
{
video: true,
audio:
{
channels: 2,
autoGainControl: false,
echoCancellation: false,
noiseSuppression: false
}
}
);
I'm still learning WebRTC myself, so I'm not sure if these same properties can be passed when using getUserMedia and MediaConstraints, but I thought I'd share in case helpful. It sounds like this might also be about available devices. Good luck!
Had a similar issue where we were getting complaints about very low sound/gain - barely hearable - with our HTML/JS recording client when running on Chrome on some Android devices.
Ended up buying an older Samsung phone (Galaxy A8) to easily replicate the issue.
The culprit was echoCancellation being set to false. With it disabled, we had a very low volume on the recorded audio. The solution was to set echoCancellation as true.
We ended up removing the constraint altogether and relied on each browser's defaults (echoCancellation is enabled by default on Chrome, Safari, Firefox).
Worth mentioning that autoGainControl and noiseSuppression inherit the value of echoCancellation, more exactly, if you only set audio: {echoCancellation: true} the other 2 constraints will also be set as true.
The javascript error is: Unhandled Promise Rejection: NotAllowedError: The request is not allowed by the user agent or the platform in the current context, possibly because the user denied permission.
My setup works across other browsers, desktop and mobile.
The way it works is:
have a flag first_audio_played = false;
add a touch event listener that plays some audio, and sets first_audio_played = true; (then removes the touch listener)
all subsequent audio checks if(first_audio_played) some_other_audio.play();
this way, only the first audio played requires direct user input. after that, all audio is free to be triggered by in-game events, timing, etc...
this appears to be the "rule" for audio across most browsers. is the iOS "rule" that every audio needs to be triggered by user input? or is there some other step I'm missing?
For my javascript game, sounds stopped working on iOS recently. They all have readyState=4, but only the sound I played on tap works, the others won't play. Maybe you could play all the sounds on the first tap. But the solution I found that works for now for me is to load all the sounds from ajax arraybuffers and use decodeAudioData(). Then once you've played 1 sound from user tap (on not the body), they all play whenever.
Here is my working code where the second way of doing it is on bottom. When I tap to play sound2, sound1 starts working also.
<html>
<body>
<div id=all style='font-size:160%;background:#DDD' onclick="log('clicked');playSound(myAudio)">
Sound1 should be playing every couple seconds.
<br />Tap here to play sound1.
</div>
<div id=debug style='font-size:120%;' onclick="playSound(myAudio2)">
Tap here to play the sound2.
</div>
<script>
var url = "http://curtastic.com/drum.wav"
var url2 = "http://curtastic.com/gold.wav"
var myAudio, myAudio2
if(0)
{
var playSound = function(sound)
{
log("playSound() readyState="+sound.readyState)
log("gold readyState="+myAudio2.readyState)
sound.play()
}
var loadSound = function(url, callback)
{
var audio = new Audio(url)
audio.addEventListener('canplaythrough', function()
{
log('canplaythrough');
if(callback)
callback()
}, false)
audio.load()
if(audio.readyState > 3)
{
log('audio.readyState > 3');
if(callback)
callback()
}
return audio
}
myAudio = loadSound(url, startInterval)
myAudio2 = loadSound(url2)
}
else
{
var playSound = function(sound)
{
log("playSound()")
var source = audioContext.createBufferSource()
if(source)
{
source.buffer = sound
if(!source.start)
source.start = source.noteOn
if(source.start)
{
var gain = audioContext.createGain()
source.connect(gain)
gain.connect(audioContext.destination)
source.start()
}
}
}
var loadSound = function(url, callback)
{
log("start loading sound "+url)
var ajax = new XMLHttpRequest()
ajax.open("GET", url, true)
ajax.responseType = "arraybuffer"
ajax.onload = function()
{
audioContext.decodeAudioData(
ajax.response,
function(buffer)
{
log("loaded sound "+url)
log(buffer)
callback(buffer)
},
function(error)
{
log(error)
}
)
}
ajax.send()
}
var AudioContext = window.AudioContext || window.webkitAudioContext
var audioContext = new AudioContext()
loadSound(url, function(r) {myAudio = r; startInterval()})
loadSound(url2, function(r) {myAudio2 = r})
}
function startInterval()
{
log("startInterval()")
setInterval(function()
{
playSound(myAudio)
}, 2000)
}
function log(m)
{
console.log(m)
debug.innerHTML += m+"<br />"
}
</script>
</body>
</html>
You can use either [WKWebViewConfiguration setMediaTypesRequiringUserActionForPlayback:WKAudiovisualMediaTypeNone] or [UIWebView setMediaPlaybackRequiresUserAction:NO] depending on your WebView class (or Swift equivalent).
On my nexus4 (Android 4.4.4) I am trying to switch between 'user' facing camera and 'environment' facing camera.
Accessing either one directly works.
Switching between them bij making another call to navigator.getUserMedia() setting new constraints fails. The failure results in a black screen video & MediaStream.ended=true.
Why is MediaStream.ended=true on my second call to getUserMedia?
In my view I dynamically create buttons for the number of video sources. Two in this case. Clicking the buttons will call camera.getUserMedia() and passes in a media source:
camera.getUserMedia = function(source){
var constraints = {
video: true,
audio: false
};
if(source){
constraints.video = {optional: [{
sourceId: source.id
}]};
}
navigator.getMedia(
constraints,
function(stream) {
var vendorURL = window.URL || window.webkitURL;
video.src = vendorURL.createObjectURL(stream);
video.play();
streaming = true;
},
function(err) {
...
}
);
};
I have solved this problem by storing the stream on the camera object and then before binding the stream to the video element I will call stop on it. Not really sure what is exactly happening though (maybe somebody can add the explanation in the comments).
camera.getUserMedia = function(source){
if(camera.stream){
camera.stream.stop();
}
...
navigator.getMedia(
constraints,
function(stream) {
camera.stream = stream;
...
},
function(err) {
...
}
);
};
after a couple of hours of struggling here I am. I have the following code, which apparently should just start my webcam and prompt the stream on the webpage:
<!doctype html>
<html>
<head>
<title>HTML5 Webcam Test</title>
</head>
<body>
<video id="sourcevid" autoplay>Put your fallback message here.</video>
<div id="errorMessage"></div>
<script>
video = document.getElementById('sourcevid');
navigator.getUserMedia = navigator.webkitGetUserMedia || navigator.getUserMedia;
window.URL = window.URL || window.webkitURL;
function gotStream(stream) {
if (window.URL) {
video.src = window.URL.createObjectURL(stream);
} else {
video.src = stream; // Opera.
}
video.onerror = function(e) {
stream.stop();
};
stream.onended = noStream;
}
function noStream(e) {
var msg = 'No camera available.';
if (e.code == 1) {
msg = 'User denied access to use camera.';
}
document.getElementById('errorMessage').textContent = msg;
}
navigator.webkitGetUserMedia({video: true}, gotStream, noStream);
</script>
</body>
</html>
No errors in the console, but no webcam stream either. Just the "User denied access to use camera.".
I tried another example, too long to show, but again apparently as soon as I try to run the page the stream falls into the .onended function:
function gotStream(stream) {
video.src = URL.createObjectURL(stream);
video.onerror = function () {
stream.stop();
};
stream.onended = noStream;
[...]
Where noStream is a simple function that prints something:
function noStream() {
document.getElementById('errorMessage').textContent = 'No camera available.';
}
So basically when I'm running the second example I'm shown the "No camera available" on the webpage.
I'm running on Chrome Version 22.0.1229.94, I saw somewhere that I needed to enable some flags, but I couldn't find them in my chrome://flags; the flags' name were Enable MediaStream and Enable PeerConnection, but in my version I only have the second one, which I enabled.
Any thoughts? Is the API I'm using old by any means? Can somebody point me to some working example?
Thanks
According to http://www.webrtc.org/running-the-demos the getUserMedia API is available on stable version as of Chrome 21 without the need to use any flag.
I think the error happens because you are trying to instantiate the stream without to define the url stream properly. Consider that you need to access the url stream differently in Chrome and Opera.
I would create the structure of your code as something like below:
function gotStream(stream) {
if (window.URL) {
video.src = window.URL.createObjectURL(stream) || stream;
video.play();
} else {
video.src = stream || stream; // Opera.
video.play();
}
video.onerror = function(e) {
stream.stop();
};
stream.onended = noStream;
}
function noStream(e) {
var msg = 'No camera available.';
if (e.code == 1) {
msg = 'User denied access to use camera.';
}
document.getElementById('errorMessage').textContent = msg;
}
var options = {video: true, toString: function(){return 'video';}};
navigator.getUserMedia(options, gotStream, noStream);
EDIT:
You need to replace the source video element with the media stream. Edited the code above.
video = document.getElementById('sourcevid');
I recommend for reading these two articles:
http://www.html5rocks.com/en/tutorials/getusermedia/intro/
http://dev.opera.com/articles/view/playing-with-html5-video-and-getusermedia-support/
I opened a webcam by using the following JavaScript code:
const stream = await navigator.mediaDevices.getUserMedia({ /* ... */ });
Is there any JavaScript code to stop or close the webcam?
Since this answer has been originally posted the browser API has changed.
.stop() is no longer available on the stream that gets passed to the callback.
The developer will have to access the tracks that make up the stream (audio or video) and stop each of them individually.
More info here: https://developers.google.com/web/updates/2015/07/mediastream-deprecations?hl=en#stop-ended-and-active
Example (from the link above):
stream.getTracks().forEach(function(track) {
track.stop();
});
Browser support may differ.
Previously, navigator.getUserMedia provided you with a stream in the success callback, you could call .stop() on that stream to stop the recording (at least in Chrome, seems FF doesn't like it)
Use any of these functions:
// stop both mic and camera
function stopBothVideoAndAudio(stream) {
stream.getTracks().forEach(function(track) {
if (track.readyState == 'live') {
track.stop();
}
});
}
// stop only camera
function stopVideoOnly(stream) {
stream.getTracks().forEach(function(track) {
if (track.readyState == 'live' && track.kind === 'video') {
track.stop();
}
});
}
// stop only mic
function stopAudioOnly(stream) {
stream.getTracks().forEach(function(track) {
if (track.readyState == 'live' && track.kind === 'audio') {
track.stop();
}
});
}
Don't use stream.stop(), it's deprecated
MediaStream Deprecations
Use stream.getTracks().forEach(track => track.stop())
FF, Chrome and Opera has started exposing getUserMedia via navigator.mediaDevices as standard now (Might change :)
online demo
navigator.mediaDevices.getUserMedia({audio:true,video:true})
.then(stream => {
window.localStream = stream;
})
.catch( (err) =>{
console.log(err);
});
// later you can do below
// stop both video and audio
localStream.getTracks().forEach( (track) => {
track.stop();
});
// stop only audio
localStream.getAudioTracks()[0].stop();
// stop only video
localStream.getVideoTracks()[0].stop();
Suppose we have streaming in video tag and id is video - <video id="video"></video> then we should have following code -
var videoEl = document.getElementById('video');
// now get the steam
stream = videoEl.srcObject;
// now get all tracks
tracks = stream.getTracks();
// now close each track by having forEach loop
tracks.forEach(function(track) {
// stopping every track
track.stop();
});
// assign null to srcObject of video
videoEl.srcObject = null;
Starting Webcam Video with different browsers
For Opera 12
window.navigator.getUserMedia(param, function(stream) {
video.src =window.URL.createObjectURL(stream);
}, videoError );
For Firefox Nightly 18.0
window.navigator.mozGetUserMedia(param, function(stream) {
video.mozSrcObject = stream;
}, videoError );
For Chrome 22
window.navigator.webkitGetUserMedia(param, function(stream) {
video.src =window.webkitURL.createObjectURL(stream);
}, videoError );
Stopping Webcam Video with different browsers
For Opera 12
video.pause();
video.src=null;
For Firefox Nightly 18.0
video.pause();
video.mozSrcObject=null;
For Chrome 22
video.pause();
video.src="";
With this the Webcam light go down everytime...
Try method below:
var mediaStream = null;
navigator.getUserMedia(
{
audio: true,
video: true
},
function (stream) {
mediaStream = stream;
mediaStream.stop = function () {
this.getAudioTracks().forEach(function (track) {
track.stop();
});
this.getVideoTracks().forEach(function (track) { //in case... :)
track.stop();
});
};
/*
* Rest of your code.....
* */
});
/*
* somewhere insdie your code you call
* */
mediaStream.stop();
You can end the stream directly using the stream object returned in the success handler to getUserMedia. e.g.
localMediaStream.stop()
video.src="" or null would just remove the source from video tag. It wont release the hardware.
Since you need the tracks to close the streaming, and you need the stream boject to get to the tracks, the code I have used with the help of the Muaz Khan's answer above is as follows:
if (navigator.getUserMedia) {
navigator.getUserMedia(constraints, function (stream) {
videoEl.src = stream;
videoEl.play();
document.getElementById('close').addEventListener('click', function () {
stopStream(stream);
});
}, errBack);
function stopStream(stream) {
console.log('stop called');
stream.getVideoTracks().forEach(function (track) {
track.stop();
});
Of course this will close all the active video tracks. If you have multiple, you should select accordingly.
If the .stop() is deprecated then I don't think we should re-add it like #MuazKhan dose. It's a reason as to why things get deprecated and should not be used anymore. Just create a helper function instead... Here is a more es6 version
function stopStream (stream) {
for (let track of stream.getTracks()) {
track.stop()
}
}
You need to stop all tracks (from webcam, microphone):
localStream.getTracks().forEach(track => track.stop());
Start and Stop Web Camera,(Update 2020 React es6 )
Start Web Camera
stopWebCamera =()=>
//Start Web Came
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
//use WebCam
navigator.mediaDevices.getUserMedia({ video: true }).then(stream => {
this.localStream = stream;
this.video.srcObject = stream;
this.video.play();
});
}
}
Stop Web Camera or Video playback in general
stopVideo =()=>
{
this.video.pause();
this.video.src = "";
this.video.srcObject = null;
// As per new API stop all streams
if (this.localStream)
this.localStream.getTracks().forEach(track => track.stop());
}
Stop Web Camera function works even with video streams:
this.video.src = this.state.videoToTest;
this.video.play();
Using .stop() on the stream works on chrome when connected via http. It does not work when using ssl (https).
Please check this: https://jsfiddle.net/wazb1jks/3/
navigator.getUserMedia(mediaConstraints, function(stream) {
window.streamReference = stream;
}, onMediaError);
Stop Recording
function stopStream() {
if (!window.streamReference) return;
window.streamReference.getAudioTracks().forEach(function(track) {
track.stop();
});
window.streamReference.getVideoTracks().forEach(function(track) {
track.stop();
});
window.streamReference = null;
}
The following code worked for me:
public vidOff() {
let stream = this.video.nativeElement.srcObject;
let tracks = stream.getTracks();
tracks.forEach(function (track) {
track.stop();
});
this.video.nativeElement.srcObject = null;
this.video.nativeElement.stop();
}
Have a reference of stream form successHandle
var streamRef;
var handleVideo = function (stream) {
streamRef = stream;
}
//this will stop video and audio both track
streamRef.getTracks().map(function (val) {
val.stop();
});