RTCmulticonnection use existing getUserMedia stream - javascript

I prepared a webcam streaming app and therefore previewed a users webcam in a video tag using "navigator.mediaDevices.getUserMedia".
After the user hits a button and "RTCmulticonnection" opens a room, I want to add the preview webcam / video to be used as webcam-stream.
I just don't get it working.
The part Im going to show you starts after the socket.io Connection is established - the "connection.open" method also returns that the room has been created and the connection is established. So Signaling works - I just don't know how to simply attach my Video to the (RTCmulsiconnection) connection Object.
I have tried to use the "attachStreams" method, but remained unsuccessfully.
//Get Users Webcam
navigator.mediaDevices.getUserMedia({
audio: true,
video: true
}).then(function(stream) {
$("#mediaPreview").show();
let preview = document.getElementById('mediaPreviewVideo');
preview.srcObject = stream;
preview.volume = 0;
preview.play();
});
//Start stream
$("#btnMediaStreamStart").on('click', function(e) {
//Prevent Default & Hide
e.preventDefault();
//Add Classes
$("#mediaPreviewVideo").addClass('webcam-online');
$("#mediaStreamStatus").show().addClass('bg-success').html('<small><strong>Live</strong></small>');
connection.socketURL = 'localhost:9001/';
let connectionRoom = '123';
connection.open(connectionRoom, function(e) {
if (e === true) {
connection.attachStreams.forEach(function(localStream) {
//How to attach the cam stream? Is my previously created video even part of the "streams"?
});
connection.mediaConstraints = {
audio: {
deviceId: selectAudio
},
video: {
deviceId: selectVideo
}
};
}
});

Try using
connection.addStream(stream);
where stream is your already captured stream - perhaps via getUserMedia()

Related

I developed the recording using the JavaScript web audio API, but the sound quality is poor

I have created an application that sings along in the app with the web audio API of JavaScript. This worked perfectly on iOS safari and Chrome, but the sound quality was poor on Android Chrome. To solve this, I tried changing the audio deviceId, but it still didn't work. Does someone have information that might help?
Doubt: After recording, I pass the file to the server and play it on another page. I am wondering if this is causing the problem.
This is my code
function captureUserMedia(mediaConstraints) {
navigator.mediaDevices.getUserMedia(mediaConstraints).then(onMediaSuccess)["catch"]();
}
function record() {
if (getParameterByName("startSec").length !== 0) {
masterSound.currentTime = getParameterByName("startSec");
}
masterSound.play();
if (document.querySelectorAll(".record")[0].getAttribute("status") == "off") {
document.querySelectorAll(".record")[0].setAttribute("status", "on");
document.querySelectorAll(".record")[0].classList.add("stoped");
var mediaConstraints;
const devices = navigator.mediaDevices.enumerateDevices()
devices.then((value) => {
// mediaConstraints = {
// audio: {
// deviceId: {
// exact: value[0].deviceId
// }
// },
// video: false
// };
mediaConstraints = {
audio: true,
video: false,
};
captureUserMedia(mediaConstraints, onMediaSuccess);
});
} else {
document.querySelectorAll(".record")[0].setAttribute("status", "off");
document.querySelectorAll(".record")[0].classList.remove("stoped");
mediaRecorder.stream.stop();
masterSound.pause();
}
}
function onMediaSuccess(stream) {
var audio = document.createElement('audio');
audio.controls = true;
audio.files = true;
audio.muted = true;
audio.srcObject = stream;
audio.play();
var audiosContainer = document.querySelectorAll(".audio_wrapper")[0];
audiosContainer.appendChild(audio);
audiosContainer.appendChild(document.createElement('hr'));
mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'audio/wav';
mediaRecorder.stream = stream;
mediaRecorder.recorderType = MediaRecorderWrapper;
mediaRecorder.audioChannels = 1;
mediaRecorder.start();
mediaRecorder.ondataavailable = function (blob) {
audioFile = blob;
var blobURL = URL.createObjectURL(blob);
document.querySelectorAll(".append_audio")[0].setAttribute("src", blobURL);
function blobToFile(theBlob, fileName) {
theBlob.lastModifiedDate = new Date();
theBlob.name = fileName;
return theBlob;
}
submit();
function submit() {
var audioTest = new Audio(URL.createObjectURL(blob));
audioTest.play();
}
};
}
When trying to build high-quality audio with getDisplayMedia, in the past I've passed in MediaStreamConstraints that remove some of the default processing on the input track:
stream = await navigator.mediaDevices.getDisplayMedia(
{
video: true,
audio:
{
channels: 2,
autoGainControl: false,
echoCancellation: false,
noiseSuppression: false
}
}
);
I'm still learning WebRTC myself, so I'm not sure if these same properties can be passed when using getUserMedia and MediaConstraints, but I thought I'd share in case helpful. It sounds like this might also be about available devices. Good luck!
Had a similar issue where we were getting complaints about very low sound/gain - barely hearable - with our HTML/JS recording client when running on Chrome on some Android devices.
Ended up buying an older Samsung phone (Galaxy A8) to easily replicate the issue.
The culprit was echoCancellation being set to false. With it disabled, we had a very low volume on the recorded audio. The solution was to set echoCancellation as true.
We ended up removing the constraint altogether and relied on each browser's defaults (echoCancellation is enabled by default on Chrome, Safari, Firefox).
Worth mentioning that autoGainControl and noiseSuppression inherit the value of echoCancellation, more exactly, if you only set audio: {echoCancellation: true} the other 2 constraints will also be set as true.

Chrome Extension tabCapture API Audio Stream to Play in HTML Page

I am creating a chrome extension which captures audio from a tab using the chrome tabCapture API. I would like to play this audio stream in another html page in hopes of eventually creating a visualizer for it.
I capture the audio in a background script like so
chrome.browserAction.onClicked.addListener(function(activeTab) {
var constraints = {
audio: true,
video: false,
};
var visualizerPage = chrome.extension.getURL("/views/visualizer.html");
chrome.tabCapture.capture(constraints, function(stream) {
console.log("\ngot stream");
console.log(stream);
chrome.tabs.create({
url: visualizerPage
}, function(tab) {
chrome.tabs.sendMessage(tabID, {
"message": "stream",
"stream": stream
});
});
});
the audio stream is captured from whatever page the extension was clicked on. Another tab is opened, and the audio stream is sent to it as a message.
The javascript for the visualizer.html page is
function loadStream(stream) {
// what do I have to put here to play the stream?
}
chrome.runtime.onMessage.addListener(function(request, sender, sendResponse) {
if (request.message === "stream") {
var stream = request.stream;
if (!stream) {
console.log("stream is null");
return;
}
console.log(stream);
loadStream(stream);
}
else if (request.message === "statusChanged") {
console.log("statusChanged");
}
});
What I have so far is to load the audio stream into the web audio api using an audio context
var context = new AudioContext();
var source = context.createMediaStreamSource(stream);
but the script just hangs when trying to create the source.
The problem is I am not really sure what type the stream is (tabCapture api says its a LocalMediaStream).
How can I get the page to play the audio stream?
Try this in loadStream function:
var audio = new Audio();
audio.src = URL.createObjectURL(stream);
audio.play();

Calling getUserMedia with new constraints causes black screen (MediaStream.ended=true)

On my nexus4 (Android 4.4.4) I am trying to switch between 'user' facing camera and 'environment' facing camera.
Accessing either one directly works.
Switching between them bij making another call to navigator.getUserMedia() setting new constraints fails. The failure results in a black screen video & MediaStream.ended=true.
Why is MediaStream.ended=true on my second call to getUserMedia?
In my view I dynamically create buttons for the number of video sources. Two in this case. Clicking the buttons will call camera.getUserMedia() and passes in a media source:
camera.getUserMedia = function(source){
var constraints = {
video: true,
audio: false
};
if(source){
constraints.video = {optional: [{
sourceId: source.id
}]};
}
navigator.getMedia(
constraints,
function(stream) {
var vendorURL = window.URL || window.webkitURL;
video.src = vendorURL.createObjectURL(stream);
video.play();
streaming = true;
},
function(err) {
...
}
);
};
I have solved this problem by storing the stream on the camera object and then before binding the stream to the video element I will call stop on it. Not really sure what is exactly happening though (maybe somebody can add the explanation in the comments).
camera.getUserMedia = function(source){
if(camera.stream){
camera.stream.stop();
}
...
navigator.getMedia(
constraints,
function(stream) {
camera.stream = stream;
...
},
function(err) {
...
}
);
};

WebRTC works in Chrome but not Firefox

I read several other questions on a related issue, but none answered my question. I have an odd issue where I am able to use WebRTC to audio chat from chrome to firefox but not firefox to chrome.
Basically, when a user wishes to audio chat, he/she clicks a button #audioChatBtn, which uses getUserMedia() to setup a stream. The thing is, clicking #audioChatBtn from Firefox doesn't fire the onaddstream callback on Chrome, but clicking the button from Chrome fires onaddstream on Firefox. Thus, I can audio chat from Chrome to Firefox but not the other way around. I have been trying to figure this out for several hours, but I'm hoping maybe someone here has an answer.
Relevant source:
var configuration = {
'iceServers': [
{ url: 'stun:stun.l.google.com:19302' },
{ url: 'stun:stun1.l.google.com:19302' },
{ url: 'stun:stun2.l.google.com:19302' },
{ url: 'stun:stun3.l.google.com:19302' },
{ url: 'stun:stun4.l.google.com:19302' }
]
};
var pc = RTCPeerConnection(configuration);
var myStream = null;
var currentAudioIndex = 0; // Number of created channels
var myAudioEnabled = false;
// send any ice candidates to the other peer
pc.onicecandidate = function (evt) {
if (evt.candidate)
$(document).trigger("persistState", { mode: 'rtc', 'candidate': evt.candidate });
};
// let the 'negotiationneeded' event trigger offer generation
pc.onnegotiationneeded = function () {
pc.createOffer(localDescCreated, logError);
}
// once remote stream arrives, play it in the audio element
pc.onaddstream = function (evt) {
console.log('creating and binding audio');
var idx = (currentAudioIndex++);
var audioElement = $('#audio' + idx);
if (audioElement.length == 0) {
var audio = $('<audio id="audio' + idx + '" autoplay>');
$('body').append(audio);
audioElement = $('#audio' + idx);
}
var audioObject = audioElement[0];
attachMediaStream(audioObject, evt.stream);
};
function localDescCreated(desc) {
pc.setLocalDescription(desc, function () {
$(document).trigger("persistState", { mode: 'rtc', 'sdp': pc.localDescription });
}, logError);
}
function logError(e) {
bootbox.alert("Audio chat could not be started.");
}
function hasGetUserMedia() {
return !!(navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia);
}
server.onPersist = function(msg) {
if (msg.mode == "rtc") {
if (msg.sdp)
pc.setRemoteDescription(new RTCSessionDescription(msg.sdp), function () {
// if we received an offer, we need to answer
if (pc.remoteDescription.type == 'offer')
pc.createAnswer(localDescCreated, logError);
}, logError);
else
pc.addIceCandidate(new RTCIceCandidate(msg.candidate));
}
}
// On click, start audio chat from this user.
$('#audioChatBtn').click(function() {
if (!hasGetUserMedia()) {
bootbox.alert('Audio conferencing is not supported by your browser. (Currently only supported by Chrome, Firefox, and Opera web browsers.)');
return;
}
if (myAudioEnabled) {
myStream.stop();
displayAlert('Streaming closed', 'Audio chat is off');
$('#audioChatBtn').removeClass('btn-success').addClass('btn-primary');
} else {
getUserMedia({ video: false, audio: true }, function (localMediaStream) {
myStream = localMediaStream;
pc.addStream(localMediaStream);
displayAlert('Streaming...', 'Audio chat is enabled');
$('#audioChatBtn').removeClass('btn-primary').addClass('btn-success');
}, logError);
}
myAudioEnabled = !myAudioEnabled;
});
What I've tried
Tried using 'optional': [{ 'DtlsSrtpKeyAgreement': 'true' }] in the configuration after reading this question
Tried creating a new RTCPeerConnection() each request
Tried using native browser functions instead of adapter.js.
Explored Web Audio API instead of getUserMedia()
Firefox does not currently support onnegotiationneeded, because we currently don't support re-negotiation of an existing connection. All addStream/addTrack and a single createDataChannel (if you want to use them) need to be done before createOffer() or createAnswer. You can createDataChannel() after you connect, if you created on before createOffer.
Adding a stream after they're connected won't work.
An (annoying) alternative is to create a new set of PeerConnections to replace the old ones (using a DataChannel in the old pair as a signaling channel for lower latency)
Resolving this is high on our priority list, but will take a few more releases.
After a lot of debugging, I came to realize that the bug has nothing to do with my code but has to do with Firefox's implementation of WebRTC. Firefox doesn't trigger the onnegotiationneeded callback, so I have to do it hackily using a timeout (and hope that the stream information has been relayed to the remote client before the function fires). Obviously, this is a firefox bug and I will report it, hoping that they fix the bug in the next build.
getUserMedia({ video: false, audio: true }, function (localMediaStream) {
myStream = localMediaStream;
pc.addStream(localMediaStream);
displayAlert('Streaming...', 'Audio chat is enabled');
$('#audioChatBtn').removeClass('btn-primary').addClass('btn-success');
// Need this for Firefox
if (webrtcDetectedBrowser == 'firefox')
setTimeout(pc.onnegotiationneeded, 5000);
}, logError);

Stop/Close webcam stream which is opened by navigator.mediaDevices.getUserMedia

I opened a webcam by using the following JavaScript code:
const stream = await navigator.mediaDevices.getUserMedia({ /* ... */ });
Is there any JavaScript code to stop or close the webcam?
Since this answer has been originally posted the browser API has changed.
.stop() is no longer available on the stream that gets passed to the callback.
The developer will have to access the tracks that make up the stream (audio or video) and stop each of them individually.
More info here: https://developers.google.com/web/updates/2015/07/mediastream-deprecations?hl=en#stop-ended-and-active
Example (from the link above):
stream.getTracks().forEach(function(track) {
track.stop();
});
Browser support may differ.
Previously, navigator.getUserMedia provided you with a stream in the success callback, you could call .stop() on that stream to stop the recording (at least in Chrome, seems FF doesn't like it)
Use any of these functions:
// stop both mic and camera
function stopBothVideoAndAudio(stream) {
stream.getTracks().forEach(function(track) {
if (track.readyState == 'live') {
track.stop();
}
});
}
// stop only camera
function stopVideoOnly(stream) {
stream.getTracks().forEach(function(track) {
if (track.readyState == 'live' && track.kind === 'video') {
track.stop();
}
});
}
// stop only mic
function stopAudioOnly(stream) {
stream.getTracks().forEach(function(track) {
if (track.readyState == 'live' && track.kind === 'audio') {
track.stop();
}
});
}
Don't use stream.stop(), it's deprecated
MediaStream Deprecations
Use stream.getTracks().forEach(track => track.stop())
FF, Chrome and Opera has started exposing getUserMedia via navigator.mediaDevices as standard now (Might change :)
online demo
navigator.mediaDevices.getUserMedia({audio:true,video:true})
.then(stream => {
window.localStream = stream;
})
.catch( (err) =>{
console.log(err);
});
// later you can do below
// stop both video and audio
localStream.getTracks().forEach( (track) => {
track.stop();
});
// stop only audio
localStream.getAudioTracks()[0].stop();
// stop only video
localStream.getVideoTracks()[0].stop();
Suppose we have streaming in video tag and id is video - <video id="video"></video> then we should have following code -
var videoEl = document.getElementById('video');
// now get the steam
stream = videoEl.srcObject;
// now get all tracks
tracks = stream.getTracks();
// now close each track by having forEach loop
tracks.forEach(function(track) {
// stopping every track
track.stop();
});
// assign null to srcObject of video
videoEl.srcObject = null;
Starting Webcam Video with different browsers
For Opera 12
window.navigator.getUserMedia(param, function(stream) {
video.src =window.URL.createObjectURL(stream);
}, videoError );
For Firefox Nightly 18.0
window.navigator.mozGetUserMedia(param, function(stream) {
video.mozSrcObject = stream;
}, videoError );
For Chrome 22
window.navigator.webkitGetUserMedia(param, function(stream) {
video.src =window.webkitURL.createObjectURL(stream);
}, videoError );
Stopping Webcam Video with different browsers
For Opera 12
video.pause();
video.src=null;
For Firefox Nightly 18.0
video.pause();
video.mozSrcObject=null;
For Chrome 22
video.pause();
video.src="";
With this the Webcam light go down everytime...
Try method below:
var mediaStream = null;
navigator.getUserMedia(
{
audio: true,
video: true
},
function (stream) {
mediaStream = stream;
mediaStream.stop = function () {
this.getAudioTracks().forEach(function (track) {
track.stop();
});
this.getVideoTracks().forEach(function (track) { //in case... :)
track.stop();
});
};
/*
* Rest of your code.....
* */
});
/*
* somewhere insdie your code you call
* */
mediaStream.stop();
You can end the stream directly using the stream object returned in the success handler to getUserMedia. e.g.
localMediaStream.stop()
video.src="" or null would just remove the source from video tag. It wont release the hardware.
Since you need the tracks to close the streaming, and you need the stream boject to get to the tracks, the code I have used with the help of the Muaz Khan's answer above is as follows:
if (navigator.getUserMedia) {
navigator.getUserMedia(constraints, function (stream) {
videoEl.src = stream;
videoEl.play();
document.getElementById('close').addEventListener('click', function () {
stopStream(stream);
});
}, errBack);
function stopStream(stream) {
console.log('stop called');
stream.getVideoTracks().forEach(function (track) {
track.stop();
});
Of course this will close all the active video tracks. If you have multiple, you should select accordingly.
If the .stop() is deprecated then I don't think we should re-add it like #MuazKhan dose. It's a reason as to why things get deprecated and should not be used anymore. Just create a helper function instead... Here is a more es6 version
function stopStream (stream) {
for (let track of stream.getTracks()) {
track.stop()
}
}
You need to stop all tracks (from webcam, microphone):
localStream.getTracks().forEach(track => track.stop());
Start and Stop Web Camera,(Update 2020 React es6 )
Start Web Camera
stopWebCamera =()=>
//Start Web Came
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
//use WebCam
navigator.mediaDevices.getUserMedia({ video: true }).then(stream => {
this.localStream = stream;
this.video.srcObject = stream;
this.video.play();
});
}
}
Stop Web Camera or Video playback in general
stopVideo =()=>
{
this.video.pause();
this.video.src = "";
this.video.srcObject = null;
// As per new API stop all streams
if (this.localStream)
this.localStream.getTracks().forEach(track => track.stop());
}
Stop Web Camera function works even with video streams:
this.video.src = this.state.videoToTest;
this.video.play();
Using .stop() on the stream works on chrome when connected via http. It does not work when using ssl (https).
Please check this: https://jsfiddle.net/wazb1jks/3/
navigator.getUserMedia(mediaConstraints, function(stream) {
window.streamReference = stream;
}, onMediaError);
Stop Recording
function stopStream() {
if (!window.streamReference) return;
window.streamReference.getAudioTracks().forEach(function(track) {
track.stop();
});
window.streamReference.getVideoTracks().forEach(function(track) {
track.stop();
});
window.streamReference = null;
}
The following code worked for me:
public vidOff() {
let stream = this.video.nativeElement.srcObject;
let tracks = stream.getTracks();
tracks.forEach(function (track) {
track.stop();
});
this.video.nativeElement.srcObject = null;
this.video.nativeElement.stop();
}
Have a reference of stream form successHandle
var streamRef;
var handleVideo = function (stream) {
streamRef = stream;
}
//this will stop video and audio both track
streamRef.getTracks().map(function (val) {
val.stop();
});

Categories