I am trying to call a function from within navigator.mediaDevices.GetUserMedia and it ain't working.
This is what I have
navigator.mediaDevices.getUserMedia({audio: true}, startUserMedia, function(e) {
__log('No live audio input: ' + e);
});
This is my function
function startUserMedia(stream) {
var input = audio_context.createMediaStreamSource(stream);
__log('Media stream created.' );
__log("input sample rate " +input.context.sampleRate);
__log('Input connected to audio context destination.');
recorder = new Recorder(input, {
numChannels: 1
});
__log('Recorder initialised.');
}
I'm trying to update this call, since before it was:
navigator.getUserMedia({audio: true}, startUserMedia, function(e) {
__log('No live audio input: ' + e);
});
and guess what, that works. But this new "mediaDevices", I can't make it work somehow. It says:
Uncaught TypeError: recorder is undefined
It is not making the call to the "startUserMedia" function. If I add a "alert("hello")" inside the function, it doesn't executes.
This works though:
navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {
startUserMedia();
}).catch(function(err) {});
but it doesn't recognizes my recorder
recorder = new Recorder(input, { numChannels: 1 });
Anyone can lend a hand?
Found the answer thanks to Derek there.
window.AudioContext = window.AudioContext || window.webkitAudioContext;
navigator.getUserMedia = (navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
audio_context = new AudioContext;
__log('Audio context set up.');
if (navigator.mediaDevices) { // if navigator.mediaDevices exists, use it
navigator.mediaDevices.getUserMedia({audio: true}).then(startUserMedia, function(e) {
__log('No live audio input: ' + e);
});
} else {
navigator.getUserMedia({audio: true}, startUserMedia, function(e) {
__log('No live audio input: ' + e);
});
}
This way I can check both if the browser supports getUserMedia or not. Either way, it fires the prompt that asks the user for the browser's media permission.
The StartUserMedia function gets called in both instances.
Related
I'm trying to use the getUserMedia method to access my webcam and track my face with clmtrackr (https://github.com/auduno/clmtrackr).
Some weeks ago it was working but since Chrome update to v50 I encounter issues, it uses the replacement video instead of calling my webcam.
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
// check for camerasupport
if (navigator.getUserMedia) {
var videoSelector = {video : true};
if (window.navigator.appVersion.match(/Chrome\/(.*?) /)) {
var chromeVersion = parseInt(window.navigator.appVersion.match(/Chrome\/(\d+)\./)[1], 10);
if (chromeVersion < 20) {
videoSelector = "video";
}
};
navigator.getUserMedia(videoSelector, function( stream ) {
if (video.mozCaptureStream) {
video.mozSrcObject = stream;
} else {
video.src = (window.URL && window.URL.createObjectURL(stream)) || stream;
}
video.play();
}, function() {
//it uses this alt video
insertAltVideo(video);
alert("There was some problem trying to fetch video from your webcam, using a fallback video instead.");
});
} else {
insertAltVideo(video);
alert("Your browser does not seem to support getUserMedia, using a fallback video instead.");
}
PS : It works as I want on Firefox
Thanks in advance
navigator.getUserMedia no longer works in Chrome (it returns undefined), use the newer MediaDevices interface:
navigator.mediaDevices.getUserMedia(constraints)
.then(function(stream) {
var videoTracks = stream.getVideoTracks();
console.log('Got stream with constraints:', constraints);
console.log('Using video device: ' + videoTracks[0].label);
stream.onended = function() {
console.log('Stream ended');
};
window.stream = stream; // make variable available to console
video.srcObject = stream;
})
.catch(function(error) {
// ...
}
See more:
https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
https://developers.google.com/web/updates/2015/07/mediastream-deprecations?hl=en
https://developers.google.com/web/updates/2015/10/media-devices?hl=en
Is there a way in javascript to check if Browser(Any Browser) is waiting for user to Allow permission for Microphone or Camera?
Note:- Based on this, I want to determine if media permission was already saved. i.e. if permission is saved then the popup will not appear.
EDIT
MEDIA = ( navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia ||
navigator.getUserMedia );
get_user_media: function() {
jQuery("instruction_image_id").show();
(MEDIA.bind(navigator))(
{audio: true, video: false},
media_success(),
media_error()
);
}
media_success: function() {
jQuery("instruction_image_id").hide();
}
EDIT
Note:- I need a Cross Browser solution. THIS(Marked as duplicate) thread's solution works well only with Chrome.
The time between waiting can be checked here, see start and end of permission:
// Start the permission.
navigator.getUserMedia (
// constraints
{
video: true,
audio: true
},
// successCallback
function(localMediaStream) {
// End the permission
var video = document.querySelector('video');
video.src = window.URL.createObjectURL(localMediaStream);
video.onloadedmetadata = function(e) {
// Do something with the video here.
};
},
// errorCallback
function(err) {
if(err === PERMISSION_DENIED) {
// Explain why you need permission and how to update the permission setting
}
}
);
I read several other questions on a related issue, but none answered my question. I have an odd issue where I am able to use WebRTC to audio chat from chrome to firefox but not firefox to chrome.
Basically, when a user wishes to audio chat, he/she clicks a button #audioChatBtn, which uses getUserMedia() to setup a stream. The thing is, clicking #audioChatBtn from Firefox doesn't fire the onaddstream callback on Chrome, but clicking the button from Chrome fires onaddstream on Firefox. Thus, I can audio chat from Chrome to Firefox but not the other way around. I have been trying to figure this out for several hours, but I'm hoping maybe someone here has an answer.
Relevant source:
var configuration = {
'iceServers': [
{ url: 'stun:stun.l.google.com:19302' },
{ url: 'stun:stun1.l.google.com:19302' },
{ url: 'stun:stun2.l.google.com:19302' },
{ url: 'stun:stun3.l.google.com:19302' },
{ url: 'stun:stun4.l.google.com:19302' }
]
};
var pc = RTCPeerConnection(configuration);
var myStream = null;
var currentAudioIndex = 0; // Number of created channels
var myAudioEnabled = false;
// send any ice candidates to the other peer
pc.onicecandidate = function (evt) {
if (evt.candidate)
$(document).trigger("persistState", { mode: 'rtc', 'candidate': evt.candidate });
};
// let the 'negotiationneeded' event trigger offer generation
pc.onnegotiationneeded = function () {
pc.createOffer(localDescCreated, logError);
}
// once remote stream arrives, play it in the audio element
pc.onaddstream = function (evt) {
console.log('creating and binding audio');
var idx = (currentAudioIndex++);
var audioElement = $('#audio' + idx);
if (audioElement.length == 0) {
var audio = $('<audio id="audio' + idx + '" autoplay>');
$('body').append(audio);
audioElement = $('#audio' + idx);
}
var audioObject = audioElement[0];
attachMediaStream(audioObject, evt.stream);
};
function localDescCreated(desc) {
pc.setLocalDescription(desc, function () {
$(document).trigger("persistState", { mode: 'rtc', 'sdp': pc.localDescription });
}, logError);
}
function logError(e) {
bootbox.alert("Audio chat could not be started.");
}
function hasGetUserMedia() {
return !!(navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia);
}
server.onPersist = function(msg) {
if (msg.mode == "rtc") {
if (msg.sdp)
pc.setRemoteDescription(new RTCSessionDescription(msg.sdp), function () {
// if we received an offer, we need to answer
if (pc.remoteDescription.type == 'offer')
pc.createAnswer(localDescCreated, logError);
}, logError);
else
pc.addIceCandidate(new RTCIceCandidate(msg.candidate));
}
}
// On click, start audio chat from this user.
$('#audioChatBtn').click(function() {
if (!hasGetUserMedia()) {
bootbox.alert('Audio conferencing is not supported by your browser. (Currently only supported by Chrome, Firefox, and Opera web browsers.)');
return;
}
if (myAudioEnabled) {
myStream.stop();
displayAlert('Streaming closed', 'Audio chat is off');
$('#audioChatBtn').removeClass('btn-success').addClass('btn-primary');
} else {
getUserMedia({ video: false, audio: true }, function (localMediaStream) {
myStream = localMediaStream;
pc.addStream(localMediaStream);
displayAlert('Streaming...', 'Audio chat is enabled');
$('#audioChatBtn').removeClass('btn-primary').addClass('btn-success');
}, logError);
}
myAudioEnabled = !myAudioEnabled;
});
What I've tried
Tried using 'optional': [{ 'DtlsSrtpKeyAgreement': 'true' }] in the configuration after reading this question
Tried creating a new RTCPeerConnection() each request
Tried using native browser functions instead of adapter.js.
Explored Web Audio API instead of getUserMedia()
Firefox does not currently support onnegotiationneeded, because we currently don't support re-negotiation of an existing connection. All addStream/addTrack and a single createDataChannel (if you want to use them) need to be done before createOffer() or createAnswer. You can createDataChannel() after you connect, if you created on before createOffer.
Adding a stream after they're connected won't work.
An (annoying) alternative is to create a new set of PeerConnections to replace the old ones (using a DataChannel in the old pair as a signaling channel for lower latency)
Resolving this is high on our priority list, but will take a few more releases.
After a lot of debugging, I came to realize that the bug has nothing to do with my code but has to do with Firefox's implementation of WebRTC. Firefox doesn't trigger the onnegotiationneeded callback, so I have to do it hackily using a timeout (and hope that the stream information has been relayed to the remote client before the function fires). Obviously, this is a firefox bug and I will report it, hoping that they fix the bug in the next build.
getUserMedia({ video: false, audio: true }, function (localMediaStream) {
myStream = localMediaStream;
pc.addStream(localMediaStream);
displayAlert('Streaming...', 'Audio chat is enabled');
$('#audioChatBtn').removeClass('btn-primary').addClass('btn-success');
// Need this for Firefox
if (webrtcDetectedBrowser == 'firefox')
setTimeout(pc.onnegotiationneeded, 5000);
}, logError);
I have an odd problem with code that works in Chrome, but not Firefox. For whatever reason, it says that the variable (success) is undefined, even though it is clearly defined before the function is ever called. Take a look:
createRoom.addEventListener('click',function(e){
e.preventDefault();
var success = function(myStream){
ownVideo.src = URL.createObjectURL(myStream);
// create a room
WebRTC.createRoom();
};
navigator.getUserMedia({audio: true, video: true}, gotStream, gotError);
});
After success is defined, I run getUserMedia which, upon running successfully will run gotStream, which will then run success. Why won't Firefox accept the definition of success? Halp.
according to mozila,
// you code should be
navigator.getUserMedia({audio: true, video: true}, success, gotError);
may be, function gotStream is running for chrome.
here is the running sample from Mozila's reference page :
navigator.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
if (navigator.getUserMedia) {
navigator.getUserMedia (
// constraints
{
video: true,
audio: true
},
// successCallback
function(localMediaStream) {
var video = document.querySelector('video');
video.src = window.URL.createObjectURL(localMediaStream);
// Do something with the video here, e.g. video.play()
},
// errorCallback
function(err) {
console.log("The following error occured: " + err);
}
);
} else {
console.log("getUserMedia not supported");
}
after a couple of hours of struggling here I am. I have the following code, which apparently should just start my webcam and prompt the stream on the webpage:
<!doctype html>
<html>
<head>
<title>HTML5 Webcam Test</title>
</head>
<body>
<video id="sourcevid" autoplay>Put your fallback message here.</video>
<div id="errorMessage"></div>
<script>
video = document.getElementById('sourcevid');
navigator.getUserMedia = navigator.webkitGetUserMedia || navigator.getUserMedia;
window.URL = window.URL || window.webkitURL;
function gotStream(stream) {
if (window.URL) {
video.src = window.URL.createObjectURL(stream);
} else {
video.src = stream; // Opera.
}
video.onerror = function(e) {
stream.stop();
};
stream.onended = noStream;
}
function noStream(e) {
var msg = 'No camera available.';
if (e.code == 1) {
msg = 'User denied access to use camera.';
}
document.getElementById('errorMessage').textContent = msg;
}
navigator.webkitGetUserMedia({video: true}, gotStream, noStream);
</script>
</body>
</html>
No errors in the console, but no webcam stream either. Just the "User denied access to use camera.".
I tried another example, too long to show, but again apparently as soon as I try to run the page the stream falls into the .onended function:
function gotStream(stream) {
video.src = URL.createObjectURL(stream);
video.onerror = function () {
stream.stop();
};
stream.onended = noStream;
[...]
Where noStream is a simple function that prints something:
function noStream() {
document.getElementById('errorMessage').textContent = 'No camera available.';
}
So basically when I'm running the second example I'm shown the "No camera available" on the webpage.
I'm running on Chrome Version 22.0.1229.94, I saw somewhere that I needed to enable some flags, but I couldn't find them in my chrome://flags; the flags' name were Enable MediaStream and Enable PeerConnection, but in my version I only have the second one, which I enabled.
Any thoughts? Is the API I'm using old by any means? Can somebody point me to some working example?
Thanks
According to http://www.webrtc.org/running-the-demos the getUserMedia API is available on stable version as of Chrome 21 without the need to use any flag.
I think the error happens because you are trying to instantiate the stream without to define the url stream properly. Consider that you need to access the url stream differently in Chrome and Opera.
I would create the structure of your code as something like below:
function gotStream(stream) {
if (window.URL) {
video.src = window.URL.createObjectURL(stream) || stream;
video.play();
} else {
video.src = stream || stream; // Opera.
video.play();
}
video.onerror = function(e) {
stream.stop();
};
stream.onended = noStream;
}
function noStream(e) {
var msg = 'No camera available.';
if (e.code == 1) {
msg = 'User denied access to use camera.';
}
document.getElementById('errorMessage').textContent = msg;
}
var options = {video: true, toString: function(){return 'video';}};
navigator.getUserMedia(options, gotStream, noStream);
EDIT:
You need to replace the source video element with the media stream. Edited the code above.
video = document.getElementById('sourcevid');
I recommend for reading these two articles:
http://www.html5rocks.com/en/tutorials/getusermedia/intro/
http://dev.opera.com/articles/view/playing-with-html5-video-and-getusermedia-support/