Below is the code I am using to set a drop down list last item to be selected but for some reason the value is still of the first one can someone please help me. This is an application that will allow the user to open the rear camera as he gives permission to the browser. The problem is that although the drop down list is saying camera 2 it still open camera 1. I think the problem is the value is not changing
<body>
<div class='select'>
<label for='videoSource'>Video source: </label><select id='videoSource'></select>
</div>
<video muted autoplay style="width:100%; height:auto"></video>
</body>
</html>
<script type="text/javascript">
var videoElement = document.querySelector("video");
var videoSelect = document.querySelector("select#videoSource");
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
function gotSources(sourceInfos) {
for (var i = 0; i != sourceInfos.length; ++i) {
var sourceInfo = sourceInfos[i];
var option = document.createElement("option");
option.value = sourceInfo.id;
if (sourceInfo.kind === 'video') {
option.text = sourceInfo.label || 'camera ' + (videoSelect.length + 1);
videoSelect.appendChild(option);
if( i == sourceInfos.length - 1 ) {
option.selected = true;
}
} else {
console.log('Some other kind of source: ', sourceInfo);
}
}
}
if (typeof MediaStreamTrack === 'undefined') {
alert('This browser does not support MediaStreamTrack.\n\nTry Chrome Canary.');
} else {
MediaStreamTrack.getSources(gotSources);
}
function successCallback(stream) {
window.stream = stream; // make stream available to console
videoElement.src = window.URL.createObjectURL(stream);
videoElement.play();
}
function errorCallback(error) {
console.log("navigator.getUserMedia error: ", error);
}
function start() {
if (!!window.stream) {
videoElement.src = null;
window.stream.stop();
}
var videoSource = videoSelect.value;
var constraints = {
video: {
optional: [{ sourceId: videoSource}]
}
};
navigator.getUserMedia(constraints , successCallback, errorCallback);
}
videoSelect.onchange = start;
start();
</script>
Related
I want to use the Web Speech API for speech recognition and record the user's voice in Android Devices at the same time (I mean user holds a button, his/her voice is recorded and transcript to text at the same time .
This is working perfectly in Windows but with Android it just returns the error :
no-speech
Seems like defining the MediaRecorder blocks access of microphone for Web Speech API in Android!
How can I fix this?
If I remove this line which is responsible for recording, speech recognition works again:
new MediaRecorder(stream); // adding this line ruins the speech recognition
Here is the code in action:
In the given code I didn't remove this, in order to show that the code won't work on Android devices:
Note: this code should be tested with an Android device, it is working fine in desktop.
CodePen: https://codepen.io/pixy-dixy/pen/GRddgYL?editors=1010
Demo here in SO:
let audioChunks = [];
let rec;
let stopRecognize;
const output = document.getElementById('output');
async function Recognize() {
console.log('Recognize')
let recognitionAllowed = true;
stopRecognize = function() {
if(recognitionAllowed) {
recognition.stop();
recognitionAllowed = false;
}
}
var SpeechRecognition = SpeechRecognition || webkitSpeechRecognition;
var SpeechGrammarList = SpeechGrammarList || webkitSpeechGrammarList;
var SpeechRecognitionEvent = SpeechRecognitionEvent || webkitSpeechRecognitionEvent;
var recognition = new SpeechRecognition();
var speechRecognitionList = new SpeechGrammarList();
recognition.grammars = speechRecognitionList;
recognition.lang = 'en-GB';
recognition.continuous = false;
recognition.interimResults = true;
recognition.maxAlternatives = 1;
recognition.start();
recognition.onresult = function(event) {
window.interim_transcript = '';
window.speechResult = '';
for(var i = event.resultIndex; i < event.results.length; ++i) {
if(event.results[i].isFinal) {
speechResult += event.results[i][0].transcript;
console.log(speechResult);
output.innerHTML = speechResult;
} else {
interim_transcript += event.results[i][0].transcript;
console.log(interim_transcript);
output.innerHTML = interim_transcript;
}
}
}
recognition.onerror = function(event) {
// restartRecognition();
console.log('recognition error: ' + event.error);
}
recognition.onend = async function(event) {
restartRecognition();
}
function restartRecognition() {
try { if(recognitionAllowed) recognition.start(); } catch(err) {}
}
}
const startRecognition = document.getElementById('start-recognition');
startRecognition.addEventListener('mousedown', handleRecognitionStart);
startRecognition.addEventListener('mouseup', handleRecognitionEnd);
startRecognition.addEventListener('touchstart', handleRecognitionStart);
startRecognition.addEventListener('touchend', handleRecognitionEnd);
function handleRecognitionStart(e) {
console.log('handleRecognitionStart', isTouchDevice)
const event = e.type;
if(isTouchDevice && event == 'touchstart') {
recognitionStart();
} else if(!isTouchDevice && event == 'mousedown') {
console.log('handleRecognitionStart')
recognitionStart();
}
}
const isTouchDevice = touchCheck();
function touchCheck() {
const maxTouchPoints = navigator.maxTouchPoints || navigator.msMaxTouchPoints;
return 'ontouchstart' in window || maxTouchPoints > 0 || window.matchMedia && matchMedia('(any-pointer: coarse)').matches;
}
function handleRecognitionEnd(e) {
const event = e.type;
console.log(':::', event == 'touchend');
if(isTouchDevice && event == 'touchend') {
recognitionEnd();
} else if(!isTouchDevice && event == 'mouseup') {
recognitionEnd();
}
}
function recognitionEnd() {
resetRecognition();
}
function recognitionStart() {
console.log('recognitionStart')
Recognize();
audioChunks = [];
voiceRecorder.start()
}
function resetRecognition() {
console.log('reset')
if(typeof stopRecognize == "function") stopRecognize();
// if(rec.state !== 'inactive') rec.stop();
voiceRecorder.stop()
}
const playAudio = document.getElementById('play');
playAudio.addEventListener('click', () => {
console.log('play');
voiceRecorder.play();
})
class VoiceRecorder {
constructor() {
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
console.log("getUserMedia supported")
} else {
console.log("getUserMedia is not supported on your browser!")
}
this.mediaRecorder
this.stream
this.playerRef = document.querySelector("#player")
this.recorderRef = document.querySelector("#recorder")
this.chunks = []
this.isRecording = false
this.constraints = {
audio: true,
video: false
}
}
handleSuccess(stream) {
this.stream = stream
this.stream.oninactive = () => {
console.log("Stream ended!")
};
this.recorderRef.srcObject = this.stream
this.mediaRecorder = new MediaRecorder(this.stream)
console.log(this.mediaRecorder)
this.mediaRecorder.ondataavailable = this.onMediaRecorderDataAvailable.bind(this)
this.mediaRecorder.onstop = this.onMediaRecorderStop.bind(this)
this.recorderRef.play()
this.mediaRecorder.start()
}
handleError(error) {
console.log("navigator.getUserMedia error: ", error)
}
onMediaRecorderDataAvailable(e) { this.chunks.push(e.data) }
onMediaRecorderStop(e) {
const blob = new Blob(this.chunks, { 'type': 'audio/ogg; codecs=opus' })
const audioURL = window.URL.createObjectURL(blob)
this.playerRef.src = audioURL;
this.chunks = [];
this.stream.getAudioTracks().forEach(track => track.stop());
this.stream = null;
}
play() { this.playerRef.play(); }
start() {
console.log('start')
if(this.isRecording) return;
console.log('33')
this.isRecording = true;
this.playerRef.src = '';
navigator.mediaDevices
.getUserMedia(this.constraints)
.then(this.handleSuccess.bind(this))
.catch(this.handleError.bind(this))
}
stop() {
if(!this.isRecording) return;
this.isRecording = false;
this.recorderRef.pause();
this.mediaRecorder.stop();
}
}
voiceRecorder = new VoiceRecorder();
<button id="start-recognition">Hold This Button and Speak In Android This should output the text and record your voice at the s</button>
<button id="play">Play Recorded Audio</button>
<h1 id="output">Voice over here</h1>
<audio id="recorder" muted hidden></audio>
<audio id="player" hidden></audio>
I have a problem based on a code that I used to request audio/video , it works on chrome , mozilla firefox however in IE11 It doesn't work.
I've already read this one:
https://github.com/addyosmani/getUserMedia.js
and applied most of the changes to make it work on IE11 , however I don't get the results, what is wrong in my code that is not allowing me to get the video on that browser.
function initiateMedia(obj){
constraint="";
if(obj.video=="true" && obj.audio=="true"){
constraint={
audio:true,
video:{
width:{max:380},
height:{max:260}
}
}
//Set Video Element
$("#mediacontainer").html('<video id ="io-videocam" muted="muted"></video>');
mediactrl = document.querySelector('video');
mediaType="video/webm";
return "Audio and Video input found! You can now start recording!";
}else if(obj.video=="null" && obj.audio=="true"){ //audio input only
constraint={audio:true, video: false}
//Set Audio Element
$("#mediacontainer").html('<audio id ="io-audio" controls muted="muted"></audio>');
mediactrl = document.querySelector('audio');
mediaType="audio/mpeg";
return ("Audio input found! You can now start recording!");
}else if(obj.video=="null" && obj.audio=="null"){ //no audio or video input
console.log("No Audio or Video input found");
return false;
}
}
function listUserMedia(){
if (!navigator.mediaDevices || !navigator.mediaDevices.enumerateDevices) {
console.log("enumerateDevices() not supported.");
return;
}
var media = navigator.mediaDevices.enumerateDevices();
media.then(function(devices) {
var useVideo=null;
var useAudio=null;
devices.forEach(function(device) {
//console.log(device.kind + ": " + device.label + " id = " + device.deviceId);
if(device.kind=="audioinput"){
useAudio="true";
}
if(device.kind=="videoinput"){
useVideo="true";
}
});
if(typeof(Storage) !== "undefined") {
sessionStorage.setItem('useAudio', useAudio);
sessionStorage.setItem('useVideo', useVideo);
} else {
console.log("Unable to set SessionStorage");
}
});
media.catch(function(err) {
console.log(err.name + ": " + err.message);
});
if(typeof(Storage) !== "undefined") {
return {"audio":sessionStorage.getItem('useAudio'),"video":sessionStorage.getItem('useVideo')};
} else {
console.log("Unable to get SessionStorage");
}
return null;
}
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
function requestMedia(string){
if(typeof MediaRecorder === 'undefined' || !navigator.getUserMedia){
alert('MediaRecorder/getUserMedia no supported on your browser, kindly use Firefox 36+');
}else{
//initiateMedia(listUserMedia());
//navigator.getUserMedia(constraint, startRecording, errorCallback);
var media = navigator.mediaDevices.getUserMedia(constraint);
media.then(function(mediaStream){
stream=mediaStream;
var url = window.URL || window.webkitURL;
if(mediaType!=null){
if(mediaType=="video/webm"){
mediactrl.src = url ? url.createObjectURL(stream) : stream;
mediactrl.onloadedmetadata = function(e) {
mediactrl.play();
};
}else{
mediactrl.src = url ? url.createObjectURL(stream) : stream;
mediactrl.onloadedmetadata = function(e) {
mediactrl.play();
}
}
$("#requestmedia").addClass("btndisabled faded-effect");
$("#videocontrol").removeClass("btndisabled").prop("disabled",false);
$("#mediaerror").html(string).css({"display":"block","color":"#003666"});
}
});
media.catch(function(err){
errorCallback(err);
});
}
}
function startRecording(stream){
/*
if (typeof MediaRecorder.isTypeSupported == 'function'){
if (MediaRecorder.isTypeSupported('video/webm;codecs=vp9')) {
var options = {mimeType: 'video/webm;codecs=vp9'};
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=vp8')) {
var options = {mimeType: 'video/webm;codecs=vp8'};
}
mediaRecorder = new MediaRecorder(stream, options);
}else{
mediaRecorder = new MediaRecorder(stream); //Default codec for browser
}
*/
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.start(10);
mediaRecorder.ondataavailable = function(e){
chunks.push(e.data);
};
mediaRecorder.onerror = function(e){
console.log("Error: "+e)
};
mediaRecorder.onstop = function(){
var blob = new Blob(chunks, {type: mediaType});
chunks = [];
var mediaURL = (window.webkitURL || window.URL).createObjectURL(blob);
sendFileToServer(blob);
//console.log(mediaURL);
};
}
function errorCallback(error){
console.log('navigator.getUserMedia error: ', error);
}
function releaseDevice(){
if(stream && stream.stop){
//stream.stop();
if(mediaType=="video/webm"){
stream.getVideoTracks()[0].stop();
stream.getAudioTracks()[0].stop();
}else{
stream.getAudioTracks()[0].stop();
}
}
stream=null;
}
I am beginner with getusermedia, just got some codes from Google, and I'm able to work on those. But i have to show options on my webapp, from where a user can select WebCam from Primary (Laptop) or Secondary (Connected via USB).
Tried this, working for primary (Laptop WebCam), but when i am adding USB WebCam, it is auto select USB WebCam.
var canvas = document.getElementById("canvas"),
context = canvas.getContext("2d"),
video = document.getElementById("video"),
imagegrid = document.getElementById("imagegrid"),
videoObj = { "video": true },
errBack = function(error) {
console.log("Video capture error: ", error.code);
};
var video = document.querySelector("#video");
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia || navigator.oGetUserMedia;
if (navigator.getUserMedia) {
navigator.getUserMedia({video: true}, handleVideo, videoError);
}
function handleVideo(stream) {
video.src = window.URL.createObjectURL(stream);
}
function videoError(e) {
// do something
}
// Trigger photo take
document.getElementById("video").addEventListener("click", function() {
draw(video, canvas, imagegrid);
});
Is it possible, i can show options for both webcams.
Thanks
The function navigator.getUserMedia() will only give you the default camera (with the exception of Firefox which gives you an option of which camera to share with the web application)
To avoid this problem you should use navigator.mediaDevices.enumerateDevices() and then navigator.mediaDevices.getUserMedia(constraints).
Example:
navigator.mediaDevices.enumerateDevices()
.then(gotDevices)
.catch(errorCallback);
...
function gotDevices(deviceInfos) {
...
for (var i = 0; i !== deviceInfos.length; ++i) {
var deviceInfo = deviceInfos[i];
var option = document.createElement('option');
option.value = deviceInfo.deviceId;
if (deviceInfo.kind === 'audioinput') {
option.text = deviceInfo.label ||
'Microphone ' + (audioInputSelect.length + 1);
audioInputSelect.appendChild(option);
} else if (deviceInfo.kind === 'audiooutput') {
option.text = deviceInfo.label || 'Speaker ' +
(audioOutputSelect.length + 1);
audioOutputSelect.appendChild(option);
} else if (deviceInfo.kind === 'videoinput') {
option.text = deviceInfo.label || 'Camera ' +
(videoSelect.length + 1);
videoSelect.appendChild(option);
}
...
}
navigator.mediaDevices.getUserMedia(constraints)
.then(function(stream) {
var videoTracks = stream.getVideoTracks();
console.log('Got stream with constraints:', constraints);
console.log('Using video device: ' + videoTracks[0].label);
stream.onended = function() {
console.log('Stream ended');
};
window.stream = stream; // make variable available to console
video.srcObject = stream;
})
.catch(function(error) {
// ...
}
The above functions use promises and require a more complex approach than yours. So you need to do some reading in order to adaptate this method. Have a look at the link below for some examples:
https://developers.google.com/web/updates/2015/10/media-devices
I digg deep into StackOwerflow but not get proper answer from any post.
First here is my code:
(function(a,n){
window.onload = function() {
var b = document.body,
userAgent = navigator.userAgent || navigator.vendor || window.opera,
playSound = function(file) {
var mediaAudio = new Audio(file);
mediaAudio.play();
};
if(b.id == 'fail' || b.id == 'success')
{
if ((/android/gi.test(userAgent) && !window.MSStream))
{
var vm = document.createElement("video"), type;
vm.autoPlay = false;
vm.controls = true;
vm.preload = 'auto';
vm.loop = false;
vm.muted = true;
vm.style.position = 'absolute';
vm.style.top = '-9999%';
vm.style.left = '-9999%';
vm.style.zIndex = '-1';
vm.id = 'video';
if(b.id == 'fail')
type = a;
else
type = n;
for(key in type)
{
if(/video/gi.test(key) && vm.canPlayType(key) == 'probably') {
vm.type = key;
vm.src = type[key];
b.appendChild(vm);
setTimeout(function(){
vm.muted = false;
vm.play();
},100);
return;
}
}
}
else
{
var au = new Audio(),type;
if(b.id == 'fail')
type = a;
else
type = n;
for(key in type)
{
if(/audio/gi.test(key) && au.canPlayType(key) == "probably") {
playSound(type[key]);
return;
}
}
}
}
}
}({
'audio/mpeg':'./sfx/not_ok.mp3',
'audio/wav':'./sfx/not_ok.wav',
'audio/ogg':'./sfx/not_ok.ogg',
'video/mp4; codecs=avc1.42E01E,mp4a.40.2':'./sfx/not_ok.mp4',
},
{
'audio/mpeg':'./sfx/ok.mp3',
'audio/wav':'./sfx/ok.wav',
'audio/ogg':'./sfx/ok.ogg',
'video/mp4; codecs=avc1.42E01E,mp4a.40.2':'./sfx/ok.mp4',
}));
I'm trying to play background sound on all devices on one special page. That page play fail or success sound.
All works great on desktop browsers but when I try to play on mobile, I not get results. In that code you see above, I add one hack where I on android platform generate hidden video and trying to autoplay but not have success.
Is there a way how I can trigger play for video or audio automaticaly?
Is there a way to emulate some click event on body to automaticaly play sound on click event or some other solution?
Thanks!
I have two webcams and am trying to specify which one to show in my video tag. My HTML is simply <video autoplay></video>. Here is my javascript:
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
var constraints = {
video: {
optional: [{
sourceId: "64-character-alphanumeric-source-id-here"
}]
}
};
var video = document.querySelector("video");
function successCallback(stream) {
window.stream = stream; // stream available to console
if (window.URL) {
video.src = window.URL.createObjectURL(stream);
} else {
video.src = stream;
}
video.play();
}
function errorCallback(error) {
console.log("navigator.getUserMedia error: ", error);
}
navigator.getUserMedia(constraints, successCallback, errorCallback);
However, even when I change the sourceId to my second webcam, I can't get it to display that webcam. js.fiddle code
this code is working for me in mobile chrome: It tries to detect the back facing video stream.
MediaStreamTrack.getSources(function(sourceInfos) {
var videoSourceId;
for (var i = 0; i != sourceInfos.length; ++i) {
var sourceInfo = sourceInfos[i];
if(sourceInfo.kind == "video" && sourceInfo.facing == "environment") {
videoSourceId = sourceInfo.id;
}
}
var constraints = {
audio: false,
video: {
optional: [{sourceId: videoSourceId}]
}
};
....
});
note that there is no fallback in this code