DOMException: Could not start video source in JavaScript - javascript

I am working on a webcam recorder app in JavaScript and WebRTC but when I click on the "Start Recording" button, I got this error:
Cannot access media devices: DOMException: Could not start video source
(anonymous) # scripts.js:43
Promise.catch (async)
(anonymous) # scripts.js:42
And here's my code:
HTML:
<button id="btn-start-recording">Start Recording</button>
<hr>
<video id="my-preview" controls autoplay></video>
<script src="./scripts.js"></script>
<script src="https://cdn.webrtc-experiment.com/RecordRTC.js"></script>
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
JavaScript:
// when the user clicks on the button start video recording
document.getElementById("btn-start-recording").addEventListener(
"click",
function () {
// disable the start recording button
this.disabled = true;
// request access to the media devices
navigator.mediaDevices
.getUserMedia({
audio: true,
video: true,
})
.then(function (stream) {
// display a live preview on the video element of the page
setSrcObject(stream, video);
// start to display the preview on the video element
// and mute the video to disable the echo issue!
video.play();
video.muted = true;
// initialize the recorder
recorder = new RecordRTCPromisesHandler(stream, {
mimeType: "video/webm",
bitsPerSecond: 128000,
});
// start recording the video
recorder
.startRecording()
.then(function () {
console.info("Recording video ...");
})
.catch(function (error) {
console.error("Cannot start video recording: ", error);
});
// release stream on stopRecording
recorder.stream = stream;
// enable the stop recording button
document.getElementById("btn-stop-recording").disabled = false;
})
.catch(function (error) {
console.error("Cannot access media devices: ", error); // this is line 43
});
},
false
);
I gave access to the browser microphone and camera on prompt and enabled it in Windows 10 settings.
I also tried in a live server from an extension in Visual Studio Code as well as I tried to run the file locally but this also did not work.
I am working on Windows 10 - Microsoft Edge Chromium 90 and Google Chrome 90.
When I tried in Firefox, I got DOMException: Failed to allocate videosource

getUserMedia in the browser requires the page to be served over HTTPS (aka TLS, usually port 443, and browser has a valid little lock up in the address bar).
If you're using a web server serving the HTML page over http (plain text, port 80, page marked as insecure, and/or no lock in the address bar), the request to getUserMedia will fail.
Source: me https://webrtchacks.com/chrome-secure-origin-https/
Edit
Another potential explanation is that another process is using the video camera at the same time. Have you verified that your webcam is not being used by another application? Consider completely killing all applications or browsers that have used your camera recently to try to free any process lock.

Related

Webrtc audio problems when initiating connecton from desktop to mobile

Iam making a web app using webrtc that allows two users to communicate with each other using both video and audio. The app uses node.js as signaling server. The app works fine when communicating between two desktops but when I try a desktop to mobile communication, if the user initiating the offer is the one in the desktop, the one on mobile can't hear any sound. If it happens the other way around, both have audio. When I check the devtools the audio stream is sent from the desktop and is received by the mobile (it is active and not muted) but there is no sound. I use the audio element to play the audio stream and the video element to play the video stream. I have tested this on both chrome and mozilla and i encounter the same problem.
If anyone can help it would be greatly appreciated.
Bellow are code samples of the ontrack event
rtcConnection.ontrack = function(event) {
console.log('Remote stream received.');
if(event.streams[0].getAudioTracks().length > 0) {
event.streams[0].getAudioTracks().forEach((track) => {
remoteAudioStream .addTrack(track);
});
audioPlayer.srcObject = remoteAudioStream;
}
if (event.streams[0].getVideoTracks().length > 0){
event.streams[0].getVideoTracks().forEach((track) => {
remoteVideoStream .addTrack(track);
});
localVideo.srcObject = remoteVideoStream;
}
};
and the capture media stream:
function getUserMedia() {
let getAudio = true;
let getVideo = true;
let constraints = { audio: getAudio, video: getVideo };
navigator.mediaDevices.getUserMedia(constraints) // Ask user to allow access to his media devices
.then(
function(data) { //if yes, get stream config data and join room
localStream = data;
console.log('Getting user media succeeded.');
console.log('RTC Connection created. Getting user media. Adding stream tracks to RTC connection');
sendMessage({ type: 'peermessage', messagetype:'info', messagetext: 'Peer started video streaming.'});
//stream to be sent to the other user
localStream.getTracks().forEach(track => rtcConnection.addTrack(track, localStream));
console.log('Creating offer');
rtcConnection.createOffer()
.then(function(offer) { // createOffer success
console.log('Offer created. Setting it as local description');
return rtcConnection.setLocalDescription(offer);
}, logError) // createOffer error
.then(function() { // setLocalDescription success
console.log('Offer set as local description. Sending it to agent');
sendMessage(rtcConnection.localDescription)
}, logError); // setLocalDescription error
}
);
}

video.play() occurred unhandled rejection (notallowederror) on IOS

using peer.js for stream video on React APP
addVideoStream(video: HTMLVideoElement, stream: MediaStream) {
video.srcObject = stream
video?.addEventListener('loadedmetadata', () => {
video.play()
})
if (this.videoGrid) this.videoGrid.append(video)
}
got this error at 'video.play()'
the request is not allowed by the user agent or the platform in the current context
already I allowed permission for Audio and video on IOS.
this code works well other platforms except IOS.
I have no idea.
If I deploy then I just get black screen on IOS.
how can I fix this?
thanks in advance
the problem was how video tag works in IOS with WebRTC.
used HTTPS environment(production) then add these attributes
if (isMobile && isSafari) {
this.myVideo.playsInline = true
this.myVideo.autoplay = true
}
then it works.

chrome.tabCapture.capture() auto mute my tab sound and recorded video has no sound

When I use chrome.tabCapture.capture({audio : true, video : true}) for recording tab It mutes my tab sound and recorded video has no sound .The same code works in another pc but I don't know what the problem is.
chrome: Version 87.0.4280.88 (Official Build) (64-bit)
OS: Microsoft Windows 10 Pro Version10.0.17763 Build 17763
When I use extensions from chrome web store I have the same problem
When you start an audio capture it disconnects the stream from the default output (speakers). By creating a secondary MediaStreamSource and connecting it to the default output (AudioContext.destination) you can allow the stream to continue outputting to speakers while being input to your recorder.
var constraints = {
audio: true,
video: true,
};
var context = new AudioContext();
chrome.tabCapture.capture(constraints, function (stream) {
if (stream) {
context.createMediaStreamSource(stream).connect(context.destination);
.......
}
}
Refer to this thread

How to stream webcam into mobile browser by using ReactJs?

I have created a simple react app that streams the webcam video stream on the browser. Here's the link to the github project : Basic WebCam Streamer
The code is pretty simple and straightforward :
class AppStreamCam extends React.Component {
constructor(props) {
super(props);
this.streamCamVideo= this.streamCamVideo.bind(this)
}
streamCamVideo() {
var constraints = { audio: true, video: { width: 1280, height: 720 } };
navigator.mediaDevices
.getUserMedia(constraints)
.then(function(mediaStream) {
var video = document.querySelector("video");
video.srcObject = mediaStream;
video.onloadedmetadata = function(e) {
video.play();
};
})
.catch(function(err) {
console.log(err.name + ": " + err.message);
}); // always check for errors at the end.
}
render() {
return (
<div>
<div id="container">
<video autoPlay={true} id="videoElement" controls></video>
</div>
<br/>
<button onClick={this.streamCamVideo}>Start streaming</button>
</div>
);
}
}
And this is the result :
Once, I click on the button, the webcam turns on and starts streaming into the browser.
Here's my problem:
When I open chrome on my phone and enter the localServer address, and click on the button, the app crashes since obviously the app code is meant to be run from the pc browser so that it may turn the pc webcam.
So when I click on the button from my phone, I understandably get this error:
TypeError: Cannot read property 'getUserMedia' of undefined
My goal is to click on the button from my mobile browser and start streaming the pc webcam on my mobile browser just like on the pc.
However, I do not know from where to start exactly. Any help?
I have solved this issue.
1. Open package.json and paste this inside scripts:
"start": "set HTTPS=true&&react-scripts start"
This should serve the app over https
2. If this gives you this error:
React app error: Failed to construct 'WebSocket': An insecure
WebSocket connection may not be initiated from a page loaded over
HTTPS
Open
node_modules/react-dev-utils/webpackHotDevClient.js
And paste this code inside the definition of the connection:
protocol: window.location.protocol === 'https:' ? 'wss' : 'ws',
This is apparently a bug in react-sripts that hasn't been solved yet. If https protocol is being used we should use WebSockets over SSL/TLS (WSS) protocol instead of WebSockets (WS). You can learn more about it here:
NOTE: This will not stream your pc webcam into your phone but rather the phone's camera.

Is HTML5's getUserMedia for audio recording working now?

I had searched a lot of DEMO and examples about getUserMedia , but most are just camera capturing, not microphone.
So I downloaded some examples and tried on my own computer , camera capturing is work ,
But when I changed
navigator.webkitGetUserMedia({video : true},gotStream);
to
navigator.webkitGetUserMedia({audio : true},gotStream);
The browser ask me to allow microphone access first, and then it failed at
document.getElementById("audio").src = window.webkitURL.createObjectURL(stream);
The message is :
GET blob:http%3A//localhost/a5077b7e-097a-4281-b444-8c1d3e327eb4 404 (Not Found)
This is my code: getUserMedia_simple_audio_test
Did I do something wrong? Or only getUserMedia can work for camera now ?
It is currently not available in Google Chrome. See Issue 112367.
You can see in the demo, it will always throw an error saying
GET blob:http%3A//whatever.it.is/b0058260-9579-419b-b409-18024ef7c6da 404 (Not Found)
And also you can't listen to the microphone either in
{
video: true,
audio: true
}
It is currently supported in Chrome Canary. You need to type about:flags into the address bar then enable Web Audio Input.
The following code connects the audio input to the speakers. WATCH OUT FOR THE FEEDBACK!
<script>
// this is to store a reference to the input so we can kill it later
var liveSource;
// creates an audiocontext and hooks up the audio input
function connectAudioInToSpeakers(){
var context = new webkitAudioContext();
navigator.webkitGetUserMedia({audio: true}, function(stream) {
console.log("Connected live audio input");
liveSource = context.createMediaStreamSource(stream);
liveSource.connect(context.destination);
});
}
// disconnects the audio input
function makeItStop(){
console.log("killing audio!");
liveSource.disconnect();
}
// run this when the page loads
connectAudioInToSpeakers();
</script>
<input type="button" value="please make it stop!" onclick="makeItStop()"/>
(sorry, I forgot to login, so posting with my proper username...)
It is currently supported in Chrome Canary. You need to type about:flags into the address bar then enable Web Audio Input.
The following code connects the audio input to the speakers. WATCH OUT FOR THE FEEDBACK!
http://jsfiddle.net/2mLtM/
<script>
// this is to store a reference to the input so we can kill it later
var liveSource;
// creates an audiocontext and hooks up the audio input
function connectAudioInToSpeakers(){
var context = new webkitAudioContext();
navigator.webkitGetUserMedia({audio: true}, function(stream) {
console.log("Connected live audio input");
liveSource = context.createMediaStreamSource(stream);
liveSource.connect(context.destination);
});
}
// disconnects the audio input
function makeItStop(){
console.log("killing audio!");
liveSource.disconnect();
}
// run this when the page loads
connectAudioInToSpeakers();
</script>
<input type="button" value="please make it stop!" onclick="makeItStop()"/>
It's working, you just need to add toString parameter after audio : true
Check this article - link

Categories