Webrtc audio problems when initiating connecton from desktop to mobile - javascript

Iam making a web app using webrtc that allows two users to communicate with each other using both video and audio. The app uses node.js as signaling server. The app works fine when communicating between two desktops but when I try a desktop to mobile communication, if the user initiating the offer is the one in the desktop, the one on mobile can't hear any sound. If it happens the other way around, both have audio. When I check the devtools the audio stream is sent from the desktop and is received by the mobile (it is active and not muted) but there is no sound. I use the audio element to play the audio stream and the video element to play the video stream. I have tested this on both chrome and mozilla and i encounter the same problem.
If anyone can help it would be greatly appreciated.
Bellow are code samples of the ontrack event
rtcConnection.ontrack = function(event) {
console.log('Remote stream received.');
if(event.streams[0].getAudioTracks().length > 0) {
event.streams[0].getAudioTracks().forEach((track) => {
remoteAudioStream .addTrack(track);
});
audioPlayer.srcObject = remoteAudioStream;
}
if (event.streams[0].getVideoTracks().length > 0){
event.streams[0].getVideoTracks().forEach((track) => {
remoteVideoStream .addTrack(track);
});
localVideo.srcObject = remoteVideoStream;
}
};
and the capture media stream:
function getUserMedia() {
let getAudio = true;
let getVideo = true;
let constraints = { audio: getAudio, video: getVideo };
navigator.mediaDevices.getUserMedia(constraints) // Ask user to allow access to his media devices
.then(
function(data) { //if yes, get stream config data and join room
localStream = data;
console.log('Getting user media succeeded.');
console.log('RTC Connection created. Getting user media. Adding stream tracks to RTC connection');
sendMessage({ type: 'peermessage', messagetype:'info', messagetext: 'Peer started video streaming.'});
//stream to be sent to the other user
localStream.getTracks().forEach(track => rtcConnection.addTrack(track, localStream));
console.log('Creating offer');
rtcConnection.createOffer()
.then(function(offer) { // createOffer success
console.log('Offer created. Setting it as local description');
return rtcConnection.setLocalDescription(offer);
}, logError) // createOffer error
.then(function() { // setLocalDescription success
console.log('Offer set as local description. Sending it to agent');
sendMessage(rtcConnection.localDescription)
}, logError); // setLocalDescription error
}
);
}

Related

How to add Video Track to Simple-Peer stream?

I am trying to add a video track to the user's stream object so that the peers can see it. In componentDidMount() I initially get the permission to use the microphone, but I have a button that I would like to use to add a video track.
I have a mute/unmute button, that toggles the audio, that works just fine, but when I try to add a video track the same way I can't get it to arrive to the peers.
This is the code I use to get access to the microphone only:
getAudio(callback, err) {
const options = { video: false, audio: true };
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
return navigator.mediaDevices.getUserMedia(options)
.then(stream => callback(stream))
.catch(e => err(e));
}
return navigator.getUserMedia(options, callback, err);
}
I call this in componentDidMount() like so:
this.getAudio(this.onAudio, err => {
this.setState({
mediaErr: 'Could not access webcam'
});
console.log('getMedia error', err);
});
The onAudio() creates the peers, since it runs on mount. I have a button I use to mute/unmute the audio like this:
toggleMicrophone(){
const audioTrack = this.stream.getAudioTracks()[0];
audioTrack.enabled = !audioTrack.enabled;
this.setState({
microphoneEnabled: audioTrack.enabled
});
}
This works fine, so I tried to add the video track pretty much the same way. I have a button that calls the getVideo(), that is identical to the getAudio(), except in the options, audio and video are both set to true. getVideo() calls the onVideo(), passing it the stream it gets from getUserMedia().
The onAudio() function:
onVideo(stream){
this.stream.addTrack(stream.getVideoTracks()[0]);
}
Since the mute button worked just by disabling the audio track, I thought I could just add the video track here and the peers would see the video stream, but it doesn't work that way.
The video track appears for the user that pressed the button, but not for the peers.
What am I missing?

How window.addEventListener('online') Works?

I am just confused Like how this window.addEventListener('online') or window.addEventListener('offline') works.
I have created An LGTV WebOS application where I have added that if any video is playing and on during play the video if internet connection is lost it should show an alert message.
So I used these window events but they only work when my wifi or network is disconnected not when I have connected to wifi but there is no internet on that.
So what I want is alert should be displayed when I have connected to wifi but there is no internet available on wifi is there any way to do this?
window.addEventListener('online', updateOnlineStatus);
window.addEventListener('offline', updateOnlineStatus);
function updateOnlineStatus(event) {
console.log("-----------------Control comes into updateOnlineStatus --------------");
console.log("event",event);
var errorModal = document.getElementById("errorModal");
var condition = navigator.onLine ? "online" : "offline";
if(condition == "online"){
console.log("-----------INternet Is conected ----------------");
errorModal.style.display="none";
video.play();
}else{
console.log("-----------INternet Is NOOOOOOTT conected ----------------");
video.pause();
errorModal.style.display="block";
SpatialNavigation.makeFocusable();
SpatialNavigation.focus("#ok_btn");
}
}
If you are developing a WebOS TV application, you should check first the native APIs of that platform...
Connection Manager
Event handler
WebOS services
You can get a connection status by using the webOsDev.js library of WebOS.
webOSDev.connection.getStatus({
onSuccess: function (res) {
if (res.isInternetConnectionAvailable === false) {
//when the internet connection is not available
} else {
//when internet is available
}
},
onFailure: function (res) {
//on failure to request the API
},
subscribe: true
});

webrtc audio device disconnection and reconnection

I have a video call application based on WebRTC. It is working as expected. However when call is going on, if I disconnect and connect back audio device (mic + speaker), only speaker part is working. The mic part seems to be not working - the other side can't hear anymore.
Is there any way to inform WebRTC to take audio input again once audio device is connected back?
Is there any way to inform WebRTC to take audio input again once audio device is connected back?
Your question appears simple—the symmetry with speakers is alluring—but once we're dealing with users who have multiple cameras and microphones, it's not that simple: If your user disconnects their bluetooth headset they were using, should you wait for them to reconnect it, or immediately switch to their laptop microphone? If the latter, do you switch back if they reconnect it later? These are application decisions.
The APIs to handle these things are: primarily the ended and devicechange events, and the replaceTrack() method. You may also need the deviceId constraint, and the enumerateDevices() method to a handle multiple devices.
However, to keep things simple, let's take the assumptions in your question at face value to explore the APIs:
When the user unplugs their sole microphone (not their camera) mid-call, our job is to resume conversation with it when they reinsert it, without dropping video:
First, we listen to the ended event to learn when our local audio track drops.
When that happens, we listen for a devicechange event to detect re-insertion (of anything).
When that happens, we could check what changed using enumerateDevices(), or simply try getUserMedia again (microphone only this time).
If that succeeds, use await sender.replaceTrack(newAudioTrack) to send our new audio.
This might look like this:
let sender;
(async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({video: true, audio: true});
pc.addTrack(stream.getVideoTracks()[0], stream);
sender = pc.addTrack(stream.getAudioTracks()[0], stream);
sender.track.onended = () => navigator.mediaDevices.ondevicechange = tryAgain;
} catch (e) {
console.log(e);
}
})();
async function tryAgain() {
try {
const stream = await navigator.mediaDevices.getUserMedia({audio: true});
await sender.replaceTrack(stream.getAudioTracks()[0]);
navigator.mediaDevices.ondevicechange = null;
sender.track.onended = () => navigator.mediaDevices.ondevicechange = tryAgain;
} catch (e) {
if (e.name == "NotFoundError") return;
console.log(e);
}
}
// Your usual WebRTC negotiation code goes here
The above is for illustration only. I'm sure there are lots of corner cases to consider.

MeteorJS - How to record sound from phone mic?

I was able to get sound byte data from the browser, but when I build it for the phone and try it out it doesn't work.
I have checked around and people had luck with cordova-plugin-media-capture however, it seems to record audio and archive it on the device.
I need to somehow get the audio data and manipulate it myself on the phone.
What I am currently doing to get audio on a non-mobile device
navigator.mediaDevices.getUserMedia({audio: true}).then(onMediaSuccess).catch(onMediaError);
function onMediaSuccess(stream) {
mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'audio/wav';
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start();
function handleDataAvailable(blob) {
toBuffer(blob, function(err, buffer) {
if (err)
throw err
ws.publish(`com.app.audioStream__`, {payload: buffer});
});
}
}
However, it seems get user media is having no effect.
I have tried using navigator.device.capture.captureAudio, but it is slightly difficult to debug it on my phone and see what the console outputs.
When I do use the method though, it prompts some UI managed by cordova that wants me to rec a sound and stop it then writes it to a file. I am looking for actively reading the mic.
Edit:
I have even tried with cordova-diagnostic-plugin
// check and request microphone access
cordova.plugins.diagnostic.getMicrophoneAuthorizationStatus(function(status) {
if (status !== "GRANTED") {
cordova.plugins.diagnostic.requestMicrophoneAuthorization(function(status) {
//...
return;
});
}
}, function() {
throw new Meteor.error('failed to get permission for microphone');
});
which results in a blank screen.
SO far there has been no easy way for me to prompt mic permissions.

Trouble with WebRTC in Nightly (22) and Chrome (25)

I'm experimenting with WebRTC between two browsers using RTCPeerConnection and my own long-polling implementation. I've created demo application, which successfully works with Mozilla Nightly (22), however in Chrome (25), I can't get no remote video and only "empty black video" appears. Is there something wrong in my JS code?
Function sendMessage(message) sends message to server via long-polling and on the other side, it is accepted using onMessage()
var peerConnection;
var peerConnection_config = {"iceServers": [{"url": "stun:23.21.150.121"}]};
// when message from server is received
function onMessage(evt) {
if (!peerConnection)
call(false);
var signal = JSON.parse(evt);
if (signal.sdp) {
peerConnection.setRemoteDescription(new RTCSessionDescription(signal.sdp));
} else {
peerConnection.addIceCandidate(new RTCIceCandidate(signal.candidate));
}
}
function call(isCaller) {
peerConnection = new RTCPeerConnection(peerConnection_config);
// send any ice candidates to the other peer
peerConnection.onicecandidate = function(evt) {
sendMessage(JSON.stringify({"candidate": evt.candidate}));
};
// once remote stream arrives, show it in the remote video element
peerConnection.onaddstream = function(evt) {
// attach media stream to local video - WebRTC Wrapper
attachMediaStream($("#remote-video").get("0"), evt.stream);
};
// get the local stream, show it in the local video element and send it
getUserMedia({"audio": true, "video": true}, function(stream) {
// attach media stream to local video - WebRTC Wrapper
attachMediaStream($("#local-video").get("0"), stream);
$("#local-video").get(0).muted = true;
peerConnection.addStream(stream);
if (isCaller)
peerConnection.createOffer(gotDescription);
else {
peerConnection.createAnswer(gotDescription);
}
function gotDescription(desc) {
sendMessage(JSON.stringify({"sdp": desc}));
peerConnection.setLocalDescription(desc);
}
}, function() {
});
}
My best guess is that there is a problem with your STUN server configuration. To determine if this is the issue, try using google's public stun server stun:stun.l.google.com:19302 (which won't work in Firefox, but should definitely work in Chrome) or test on a local network with no STUN server configured.
Also, verify that your ice candidates are being delivered properly. Firefox doesn't actually generate 'icecandidate' events (it includes the candidates in the offer/answer), so an issue with delivering candidate messages could also explain the discrepancy.
Make sure your video tag attribute autoplay is set to 'autoplay'.

Categories