WebRTC: Firefox unable to recieve video as callee - javascript

I'm new to WebRTC and am trying to create a simple test page that allows for 1-to-1 video conferences.
So far what I have below works for the following:
Chrome -> Chrome
Firefox -> Chrome
It does not work in the following situations:
Firefox -> Firefox
Chrome -> Firefox
Basically, Firefox is unable to receive the remote video stream when it is the callee. The onaddstream handler is never called, but I do not know why.
The only difference I have found between Firefox and Chrome is that Firefox does not trickle ICE candidates; they are included in the initial offer/answer creation causing the onicecandidate handler to never be called with a non-null candidate. On the other hand, Chrome does trickle candidates which are sent to the signaling server as separate messages. I'm not sure if this a/the potential problem or not.
I am testing with Firefox Nightly 34 and Chrome 36. The signaling server is hosted in my LAN and both browsers are running on the same computer.
The following code was based off of http://www.html5rocks.com/en/tutorials/webrtc/basics/.
var localVideo;
var remoteVideo;
var peerConnection;
var peerConnectionConfig = {'iceServers': [{'url': 'stun:stun.services.mozilla.com'}, {'url': 'stun:stun.l.google.com:19302'}]};
var isCaller;
navigator.getUserMedia = navigator.getUserMedia || navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
window.RTCPeerConnection = window.RTCPeerConnection || window.mozRTCPeerConnection || window.webkitRTCPeerConnection;
window.RTCIceCandidate = window.RTCIceCandidate || window.mozRTCIceCandidate || window.webkitRTCIceCandidate;
window.RTCSessionDescription = window.RTCSessionDescription || window.mozRTCSessionDescription || window.webkitRTCSessionDescription;
function pageReady() {
localVideo = document.getElementById('localVideo');
remoteVideo = document.getElementById('remoteVideo');
serverConnection = new WebSocket('ws://myserver:3434');
serverConnection.onmessage = gotMessageFromServer;
}
function start(_isCaller) {
isCaller = _isCaller
peerConnection = new RTCPeerConnection(peerConnectionConfig);
peerConnection.onicecandidate = gotIceCandidate;
peerConnection.onaddstream = gotRemoteStream;
var constraints = {
video: true,
audio: true,
};
if(navigator.getUserMedia) {
navigator.getUserMedia(constraints, getUserMediaSuccess, getUserMediaError);
} else {
alert('Your browser does not support getUserMedia API');
}
}
function getUserMediaSuccess(stream) {
localStream = stream;
localVideo.src = window.URL.createObjectURL(stream);
peerConnection.addStream(stream);
if(isCaller) {
peerConnection.createOffer(gotDescription, createOfferError);
} else {
peerConnection.createAnswer(gotDescription, createAnswerError);
}
}
function gotMessageFromServer(message) {
if(!peerConnection) start(false);
var signal = JSON.parse(message.data);
if(signal.sdp) {
peerConnection.setRemoteDescription(new RTCSessionDescription(signal.sdp));
} else if(signal.ice) {
peerConnection.addIceCandidate(new RTCIceCandidate(signal.ice));
}
}
function gotIceCandidate(event) {
if(event.candidate != null) {
serverConnection.send(JSON.stringify({'ice': event.candidate}));
}
}
function gotDescription(description) {
peerConnection.setLocalDescription(description);
serverConnection.send(JSON.stringify({'sdp': description}));
}
function gotMessageFromServer(message) {
if(!peerConnection) start(false);
var signal = JSON.parse(message.data);
if(signal.sdp) {
peerConnection.setRemoteDescription(new RTCSessionDescription(signal.sdp));
} else if(signal.ice) {
peerConnection.addIceCandidate(new RTCIceCandidate(signal.ice));
}
}
function gotRemoteStream(event) {
console.log("got remote stream");
remoteVideo.src = window.URL.createObjectURL(event.stream);
}
// Error functions....
function getUserMediaError(error) {
console.log(error);
}
function createOfferError(error) {
console.log(error);
}
function createAnswerError(error) {
console.log(error);
}
If it's useful, here is my signaling server. It's a very basic Node.js script that uses the ws WebSocket library to simply broadcast received messages to all connected clients which is only one other client in this case.
var WebSocketServer = require('ws').Server;
var wss = new WebSocketServer({port: 3434});
wss.broadcast = function(data) {
for(var i in this.clients) {
this.clients[i].send(data);
}
};
wss.on('connection', function(ws) {
ws.on('message', function(message) {
console.log('received: %s', message);
wss.broadcast(message);
});
});
Any help is greatly appreciated!

Turns out I had a race condition between the call to setRemoteDescription() and createAnswer(). Firefox asks for permission to use the webcam on every reload of the page whereas Chrome only asks on the first page load. This meant that When I went to call createAnswer() in Firefox, the video stream did not exist yet because the user had not allowed access to the webcam yet. The solution was to move the call to getUserMedia() to when the page is ready. This way, when an incoming call occurs the user has already allowed access to the webcam and the video stream can immediately be shared. It's not perfect, but it allows my little test page to work which is all I'm going for for now.
Here's the updated code if it helps anyone in the future:
var localVideo;
var remoteVideo;
var peerConnection;
var peerConnectionConfig = {'iceServers': [{'url': 'stun:stun.services.mozilla.com'}, {'url': 'stun:stun.l.google.com:19302'}]};
navigator.getUserMedia = navigator.getUserMedia || navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
window.RTCPeerConnection = window.RTCPeerConnection || window.mozRTCPeerConnection || window.webkitRTCPeerConnection;
window.RTCIceCandidate = window.RTCIceCandidate || window.mozRTCIceCandidate || window.webkitRTCIceCandidate;
window.RTCSessionDescription = window.RTCSessionDescription || window.mozRTCSessionDescription || window.webkitRTCSessionDescription;
function pageReady() {
localVideo = document.getElementById('localVideo');
remoteVideo = document.getElementById('remoteVideo');
serverConnection = new WebSocket('ws://sagan:3434');
serverConnection.onmessage = gotMessageFromServer;
var constraints = {
video: true,
audio: true,
};
if(navigator.getUserMedia) {
navigator.getUserMedia(constraints, getUserMediaSuccess, getUserMediaError);
} else {
alert('Your browser does not support getUserMedia API');
}
}
function getUserMediaSuccess(stream) {
localStream = stream;
localVideo.src = window.URL.createObjectURL(stream);
}
function start(isCaller) {
peerConnection = new RTCPeerConnection(peerConnectionConfig);
peerConnection.onicecandidate = gotIceCandidate;
peerConnection.onaddstream = gotRemoteStream;
peerConnection.addStream(localStream);
if(isCaller) {
peerConnection.createOffer(gotDescription, createOfferError);
}
}
function gotMessageFromServer(message) {
if(!peerConnection) start(false);
var signal = JSON.parse(message.data);
if(signal.sdp) {
peerConnection.setRemoteDescription(new RTCSessionDescription(signal.sdp), function() {
peerConnection.createAnswer(gotDescription, createAnswerError);
});
} else if(signal.ice) {
peerConnection.addIceCandidate(new RTCIceCandidate(signal.ice));
}
}
function gotIceCandidate(event) {
if(event.candidate != null) {
serverConnection.send(JSON.stringify({'ice': event.candidate}));
}
}
function gotDescription(description) {
console.log('got description');
peerConnection.setLocalDescription(description, function () {
serverConnection.send(JSON.stringify({'sdp': description}));
}, function() {console.log('set description error')});
}
function gotRemoteStream(event) {
console.log("got remote stream");
remoteVideo.src = window.URL.createObjectURL(event.stream);
}
// Error functions....
function getUserMediaError(error) {
console.log(error);
}
function createOfferError(error) {
console.log(error);
}
function createAnswerError(error) {
console.log(error);
}

Related

How to restart track in webrtc video calling

I implemented video calling using webrtc with use of https://github.com/webtutsplus/videoChat-WebFrontend library. But when I implement switch camera functionality on click of sender at that time sender video stop at receiver end. I followed Unable to change camera / stream for WebRTC call link for switch camera.
So anyone please give me a solution regarding this issue.
Thanks.
I tried Unable to change camera / stream for WebRTC call for sitch camera but receiver not getting the sender video when switch the camera.
I followed Unable to change camera / stream for WebRTC call link to switch camera in video calling between 2 users.
This is the code to switch the camera.
``
$(".btn_rear_camera").click(function() {
if (cameratype == "user") {
capture('environment');
} else {
capture('user');
}
});
function capture(facingMode) {
cameratype = facingMode;
localStream.getTracks().forEach(function(track) {
track.stop();
});
var constraints = {
video: {
deviceId: devicesIds[1]
},
audio: true
};
navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {
replaceTracks(stream);
}).catch(function(error) {
});
}
function replaceTracks(newStream) {
var elementId = "localVideo";
detachMediaStream(elementId);
newStream.getTracks().forEach(function(track) {
localStream.addTrack(track);
});
attachMediaStream(elementId, newStream);
// optionally, if you have active peer connections:
_replaceTracksForPeer(peerConnection);
function _replaceTracksForPeer(peer) {
peer.getSenders().map(function(sender) {
sender.replaceTrack(newStream.getTracks().find(function(track) {
return track.kind === sender.track.kind;
}));
});
}
}
function detachMediaStream(id) {
var elem = document.getElementById(id);
if (elem) {
elem.pause();
if (typeof elem.srcObject === 'object') {
elem.srcObject = null;
} else {
elem.src = '';
}
}
};
function attachMediaStream(id, stream) {
var elem = document.getElementById(id);
if (elem) {
if (typeof elem.srcObject === 'object') {
elem.srcObject = stream;
} else {
elem.src = window.URL.createObjectURL(stream);
}
elem.onloadedmetadata = function(e) {
elem.play();
};
} else {
throw new Error('Unable to attach media stream');
}
};
``

Javascript WebRTC Failed to set remote answer sdp: Called in wrong state: kHaveRemoteOffer and Called in wrong state: kStable

I can't get my WebRTC code to work properly.. I did everything right I believe and it's still not working. There is something strange why ontrack gets called so early maybe it's suppose to be like that.
The website uses javascript code, the server code I didn't post but thats where WebSockets connect is just a exchanger, what you send to server it sends the same information back to the other partner (stranger) you are connected too.
Server code looks like this little sample
private void writeStranger(UserProfile you, String msg) {
UserProfile stranger = you.stranger;
if(stranger != null)
sendMessage(stranger.getWebSocket(), msg);
}
public void sendMessage(WebSocket websocket, String msg) {
try {
websocket.send(msg);
} catch ( WebsocketNotConnectedException e ) {
disconnnectClient(websocket);
}
}
//...
case "ice_candidate":
JSONObject candidatePackage = (JSONObject) packet.get(1);
JSONObject candidate = (JSONObject) candidatePackage.get("candidate");
obj = new JSONObject();
list = new JSONArray();
list.put("iceCandidate");
obj.put("candidate", candidate);
list.put(obj);
System.out.println("Sent = " + list.toString());
writeStranger(you, list.toString()); //send ice candidate to stranger
break;
case "send_answer":
JSONObject sendAnswerPackage = (JSONObject) packet.get(1);
JSONObject answer = (JSONObject) sendAnswerPackage.get("answer");
obj = new JSONObject();
list = new JSONArray();
list.put("getAnswer");
obj.put("answer", answer);
list.put(obj);
System.out.println("Sent = " + list.toString());
writeStranger(you, list.toString()); //send answer to stranger
break;
case "send_offer":
JSONObject offerPackage = (JSONObject) packet.get(1);
JSONObject offer = (JSONObject) offerPackage.get("offer");
obj = new JSONObject();
list = new JSONArray();
list.put("getOffer");
obj.put("offer", offer);
list.put(obj);
System.out.println("Sent = " + list.toString());
writeStranger(you, list.toString()); //send ice candidate to stranger
break;
Here are my outputs.
RAW Text: https://pastebin.com/raw/FL8g29gG
JSON colored: https://pastebin.com/FL8g29gG
My javascript Code below
var ws;
var peerConnection, localStream;
var rtc_server = {
iceServers: [
{urls: "stun:stun.l.google.com:19302"},
{urls: "stun:stun.services.mozilla.com"},
{urls: "stun:stun.stunprotocol.org:3478"},
{url: "stun:stun.l.google.com:19302"},
{url: "stun:stun.services.mozilla.com"},
{url: "stun:stun.stunprotocol.org:3478"},
]
}
//offer SDP's tells other peers what you would like
var rtc_media_constraints = {
mandatory: {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
}
};
var rtc_peer_options = {
optional: [
{DtlsSrtpKeyAgreement: true}, //To make Chrome and Firefox to interoperate.
]
}
var PeerConnection = RTCPeerConnection || window.PeerConnection || window.webkitPeerConnection || window.webkitRTCPeerConnection || window.mozRTCPeerConnection;
var IceCandidate = RTCIceCandidate || window.mozRTCIceCandidate || window.RTCIceCandidate;
var SessionDescription = RTCSessionDescription || window.mozRTCSessionDescription || window.RTCSessionDescription;
var getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
function hasSupportForVideoChat() {
return window.RTCPeerConnection && window.RTCIceCandidate && window.RTCSessionDescription && navigator.mediaDevices && navigator.mediaDevices.getUserMedia && (RTCPeerConnection.prototype.addStream || RTCPeerConnection.prototype.addTrack) ? true : false;
}
function loadMyCameraStream() {
if (getUserMedia) {
getUserMedia.call(navigator, { video: {facingMode: "user", aspectRatio: 4 / 3/*height: 272, width: 322*/}, audio: { echoCancellation : true } },
function(localMediaStream) {
//Add my video
$("div#videoBox video#you")[0].muted = true;
$("div#videoBox video#you")[0].autoplay = true;
$("div#videoBox video#you").attr('playsinline', '');
$("div#videoBox video#you").attr('webkit-playsinline', '');
$("div#videoBox video#you")[0].srcObject = localMediaStream;
localStream = localMediaStream;
},
function(e) {
addStatusMsg("Your Video has error : " + e);
}
);
} else {
addStatusMsg("Your browser does not support WebRTC (Camera/Voice chat).");
return;
}
}
function loadStrangerCameraStream() {
if(!hasSupportForVideoChat())
return;
peerConnection = new PeerConnection(rtc_server, rtc_peer_options);
if (peerConnection.addTrack !== undefined)
localStream.getTracks().forEach(track => peerConnection.addTrack(track, localStream));
else
peerConnection.addStream(localStream);
peerConnection.onicecandidate = function(e) {
if (!e || !e.candidate)
return;
ws.send(JSON.stringify(['ice_candidate', {"candidate": e.candidate}]));
};
if (peerConnection.addTrack !== undefined) {
//newer technology
peerConnection.ontrack = function(e) {
//e.streams.forEach(stream => doAddStream(stream));
addStatusMsg("ontrack called");
//Add stranger video
$("div#videoBox video#stranger").attr('playsinline', '');
$("div#videoBox video#stranger").attr('webkit-playsinline', '');
$('div#videoBox video#stranger')[0].srcObject = e.streams[0];
$("div#videoBox video#stranger")[0].autoplay = true;
};
} else {
//older technology
peerConnection.onaddstream = function(e) {
addStatusMsg("onaddstream called");
//Add stranger video
$("div#videoBox video#stranger").attr('playsinline', '');
$("div#videoBox video#stranger").attr('webkit-playsinline', '');
$('div#videoBox video#stranger')[0].srcObject = e.stream;
$("div#videoBox video#stranger")[0].autoplay = true;
};
}
peerConnection.createOffer(
function(offer) {
peerConnection.setLocalDescription(offer, function () {
//both offer and peerConnection.localDescription are the same.
addStatusMsg('createOffer, localDescription: ' + JSON.stringify(peerConnection.localDescription));
//addStatusMsg('createOffer, offer: ' + JSON.stringify(offer));
ws.send(JSON.stringify(['send_offer', {"offer": peerConnection.localDescription}]));
},
function(e) {
addStatusMsg('createOffer, set description error' + e);
});
},
function(e) {
addStatusMsg("createOffer error: " + e);
},
rtc_media_constraints
);
}
function closeStrangerCameraStream() {
$('div#videoBox video#stranger')[0].srcObject = null
if(peerConnection)
peerConnection.close();
}
function iceCandidate(candidate) {
//ICE = Interactive Connectivity Establishment
if(peerConnection)
peerConnection.addIceCandidate(new IceCandidate(candidate));
else
addStatusMsg("peerConnection not created error");
addStatusMsg("Peer Ice Candidate = " + JSON.stringify(candidate));
}
function getAnswer(answer) {
if(!hasSupportForVideoChat())
return;
if(peerConnection) {
peerConnection.setRemoteDescription(new SessionDescription(answer), function() {
console.log("get answer ok");
addStatusMsg("peerConnection, SessionDescription answer is ok");
},
function(e) {
addStatusMsg("peerConnection, SessionDescription fail error: " + e);
});
}
}
function getOffer(offer) {
if(!hasSupportForVideoChat())
return;
addStatusMsg("peerConnection, setRemoteDescription offer: " + JSON.stringify(offer));
if(peerConnection) {
peerConnection.setRemoteDescription(new SessionDescription(offer), function() {
peerConnection.createAnswer(
function(answer) {
peerConnection.setLocalDescription(answer);
addStatusMsg("create answer sent: " + JSON.stringify(answer));
ws.send(JSON.stringify(['send_answer', {"answer": answer}]));
},
function(e) {
addStatusMsg("peerConnection, setRemoteDescription create answer fail: " + e);
}
);
});
}
}
My website where I use it: https://www.camspark.com/
Fixed myself I figured out I had 2 problems with this code.
First problem was then createOffer() must only be sent by 1 person not both people.. you have to randomly pick which person which does the createOffer().
Second problem is the ICE Candidate's you have to create a queue/array for both sides, which holds all the incoming ice_candidates. Only do the peerConnection.addIceCandidate(new IceCandidate(candidate)); when the response to createOffer() is received and the setRemoteDescription from createOffer() response is set up.
Both getAnswer() and getOffer() use exactly same code, but one is received for 1 client while the other is received for the other client. Both need to flush the IceCandidates array when either of them is triggered.. Maybe if anyone wants you could combine both functions into 1 function as the code is the same.
Final working code looks like this
var ws;
var peerConnection, localStream;
//STUN = (Session Traversal Utilities for NAT)
var rtc_server = {
iceServers: [
{urls: "stun:stun.l.google.com:19302"},
{urls: "stun:stun.services.mozilla.com"},
{urls: "stun:stun.stunprotocol.org:3478"},
{url: "stun:stun.l.google.com:19302"},
{url: "stun:stun.services.mozilla.com"},
{url: "stun:stun.stunprotocol.org:3478"},
]
}
//offer SDP = [Session Description Protocol] tells other peers what you would like
var rtc_media_constraints = {
mandatory: {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
}
};
var rtc_peer_options = {
optional: [
{DtlsSrtpKeyAgreement: true}, //To make Chrome and Firefox to interoperate.
]
}
var finishSDPVideoOffer = false;
var isOfferer = false;
var iceCandidates = [];
var PeerConnection = RTCPeerConnection || window.PeerConnection || window.webkitPeerConnection || window.webkitRTCPeerConnection || window.mozRTCPeerConnection;
var IceCandidate = RTCIceCandidate || window.mozRTCIceCandidate || window.RTCIceCandidate;
var SessionDescription = RTCSessionDescription || window.mozRTCSessionDescription || window.RTCSessionDescription;
var getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
function hasSupportForVideoChat() {
return window.RTCPeerConnection && window.RTCIceCandidate && window.RTCSessionDescription && navigator.mediaDevices && navigator.mediaDevices.getUserMedia && (RTCPeerConnection.prototype.addStream || RTCPeerConnection.prototype.addTrack) ? true : false;
}
function loadMyCameraStream() {
if (getUserMedia) {
getUserMedia.call(navigator, { video: {facingMode: "user", aspectRatio: 4 / 3/*height: 272, width: 322*/}, audio: { echoCancellation : true } },
function(localMediaStream) {
//Add my video
$("div#videoBox video#you")[0].muted = true;
$("div#videoBox video#you")[0].autoplay = true;
$("div#videoBox video#you").attr('playsinline', '');
$("div#videoBox video#you").attr('webkit-playsinline', '');
$("div#videoBox video#you")[0].srcObject = localMediaStream;
localStream = localMediaStream;
},
function(e) {
addStatusMsg("Your Video has error : " + e);
}
);
} else {
addStatusMsg("Your browser does not support WebRTC (Camera/Voice chat).");
return;
}
}
function loadStrangerCameraStream(isOfferer_) {
if(!hasSupportForVideoChat())
return;
//Only add pending ICE Candidates when getOffer() is finished.
finishSDPVideoOfferOrAnswer = false;
iceCandidates = []; //clear ICE Candidates array.
isOfferer = isOfferer_;
peerConnection = new PeerConnection(rtc_server, rtc_peer_options);
if (peerConnection.addTrack !== undefined)
localStream.getTracks().forEach(track => peerConnection.addTrack(track, localStream));
else
peerConnection.addStream(localStream);
peerConnection.onicecandidate = function(e) {
if (!e || !e.candidate)
return;
ws.send(JSON.stringify(['ice_candidate', {"candidate": e.candidate}]));
};
if (peerConnection.addTrack !== undefined) {
//newer technology
peerConnection.ontrack = function(e) {
//e.streams.forEach(stream => doAddStream(stream));
addStatusMsg("ontrack called");
//Add stranger video
$("div#videoBox video#stranger").attr('playsinline', '');
$("div#videoBox video#stranger").attr('webkit-playsinline', '');
$('div#videoBox video#stranger')[0].srcObject = e.streams[0];
$("div#videoBox video#stranger")[0].autoplay = true;
};
} else {
//older technology
peerConnection.onaddstream = function(e) {
addStatusMsg("onaddstream called");
//Add stranger video
$("div#videoBox video#stranger").attr('playsinline', '');
$("div#videoBox video#stranger").attr('webkit-playsinline', '');
$('div#videoBox video#stranger')[0].srcObject = e.stream;
$("div#videoBox video#stranger")[0].autoplay = true;
};
}
if(isOfferer) {
peerConnection.createOffer(
function(offer) {
peerConnection.setLocalDescription(offer, function () {
//both offer and peerConnection.localDescription are the same.
addStatusMsg('createOffer, localDescription: ' + JSON.stringify(peerConnection.localDescription));
//addStatusMsg('createOffer, offer: ' + JSON.stringify(offer));
ws.send(JSON.stringify(['send_offer', {"offer": peerConnection.localDescription}]));
},
function(e) {
addStatusMsg('createOffer, set description error' + e);
});
},
function(e) {
addStatusMsg("createOffer error: " + e);
},
rtc_media_constraints
);
}
}
function closeStrangerCameraStream() {
$('div#videoBox video#stranger')[0].srcObject = null
if(peerConnection)
peerConnection.close();
}
function iceCandidate(candidate) {
//ICE = Interactive Connectivity Establishment
if(!finishSDPVideoOfferOrAnswer) {
iceCandidates.push(candidate);
addStatusMsg("Queued iceCandidate");
return;
}
if(!peerConnection) {
addStatusMsg("iceCandidate peerConnection not created error.");
return;
}
peerConnection.addIceCandidate(new IceCandidate(candidate));
addStatusMsg("Added on time, Peer Ice Candidate = " + JSON.stringify(candidate));
}
function getAnswer(answer) {
if(!hasSupportForVideoChat())
return;
if(!peerConnection) {
addStatusMsg("getAnswer peerConnection not created error.");
return;
}
peerConnection.setRemoteDescription(new SessionDescription(answer), function() {
addStatusMsg("getAnswer SessionDescription answer is ok");
finishSDPVideoOfferOrAnswer = true;
while (iceCandidates.length) {
var candidate = iceCandidates.shift();
try {
peerConnection.addIceCandidate(new IceCandidate(candidate));
addStatusMsg("Adding queued ICE Candidates");
} catch(e) {
addStatusMsg("Error adding queued ICE Candidates error:" + e);
}
}
iceCandidates = [];
},
function(e) {
addStatusMsg("getAnswer SessionDescription fail error: " + e);
});
}
function getOffer(offer) {
if(!hasSupportForVideoChat())
return;
if(!peerConnection) {
addStatusMsg("getOffer peerConnection not created error.");
return;
}
addStatusMsg("getOffer setRemoteDescription offer: " + JSON.stringify(offer));
peerConnection.setRemoteDescription(new SessionDescription(offer), function() {
finishSDPVideoOfferOrAnswer = true;
while (iceCandidates.length) {
var candidate = iceCandidates.shift();
try {
peerConnection.addIceCandidate(new IceCandidate(candidate));
addStatusMsg("Adding queued ICE Candidates");
} catch(e) {
addStatusMsg("Error adding queued ICE Candidates error:" + e);
}
}
iceCandidates = [];
if(!isOfferer) {
peerConnection.createAnswer(
function(answer) {
peerConnection.setLocalDescription(answer);
addStatusMsg("getOffer create answer sent: " + JSON.stringify(answer));
ws.send(JSON.stringify(['send_answer', {"answer": answer}]));
},
function(e) {
addStatusMsg("getOffer setRemoteDescription create answer fail: " + e);
}
);
}
});
}
Here is the patch I did on server-side WebSocket (Java) server.
//JSON
//["connected", {videoChatOfferer: true}]
//["connected", {videoChatOfferer: false}]
JSONObject obj = new JSONObject();
JSONArray list = new JSONArray();
list.put("loadStrangerCameraStream");
obj.put("videoChatOfferer", true); //first guy offerer for WebRTC.
list.put(obj);
server.sendMessage(websocket, list.toString()); //connected to chat partner
obj.put("videoChatOfferer", false); //second guy isn't offerer.
list.put(obj);
server.sendMessage(stranger.getWebSocket(), list.toString()); //connected to chat partner

js+html5 getUserMedia conflicts with microphone (android) (mobile thinks phone call is active)

Edited - Update in bottom of post
I'm building a web based app for android using phonegap,
and i came across this problem,
after the initialization of getusermedia when i use the Volume down/up button the volume control is for call and not for speaker,
even if i didn't start a new recording...
in addition i noticed that the phone actually thinks its inside a phone call while the app is running for example:
i start my app , then i open whatsapp and try to record a voice message the message is being canceled.
I know that the next segment is the problem (i commented it and there was no problem)
/***recording audio block***/
function audioRecordingInit() {
navigator.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
if (navigator.getUserMedia) {
console.log('getUserMedia supported.');
var constraints = {audio: true};
var chunks = [];
var onSuccess = function (stream) {
$.globals.mediaRecorder = new MediaRecorder(stream);
$.globals.mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
var blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
chunks = [];
if(!$("#recordBtn").hasClass("private"))
$.globals.lastRecorded.audio.src = window.URL.createObjectURL(blob);
else
$.globals.lastRecordedPrivate.audio.src = window.URL.createObjectURL(blob);
console.log("audio created");
};
$.globals.mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
};
var onError = function (err) {
console.log('The following error occured: ' + err);
};
navigator.getUserMedia(constraints, onSuccess, onError);// I think this is the problem
}
else{
console.log('getUserMedia not supported on your browser!');
}
/***end of recording audio block***/
}
this function is called after device ready
and im adding also the start recording and stop recording functions below
function startRecording(event) {
document.getElementById("recordingTime").innerText = "00:00";
$.globals.mediaRecorder.start();
console.log($.globals.mediaRecorder.state);
console.log("recorder started");
$.globals.timerInterval = setInterval(function () {
$.globals.sec += 1;
if ($.globals.sec == 60) {
$.globals.min++;
$.globals.sec = 0;
}
if ($.globals.min < 10) {
if ($.globals.sec < 10)
$.globals.timeText = "0" + $.globals.min + ":0" + $.globals.sec;
else
$.globals.timeText = "0" + $.globals.min + ":" + $.globals.sec;
}
else if ($.globals.sec < 10)
$.globals.timeText = min + ":0" + $.globals.sec;
else
$.globals.timeText = min + ":" + $.globals.sec;
document.getElementById("recordingTime").innerText = $.globals.timeText;
}, 1000);
}
function stopRecording() {
if($(".circleNav").hasClass("recording"))
$(".circleNav").toggleClass("recording");
$.globals.currentState="Recorded";
console.log($.globals.mediaRecorder.state);
if($.globals.mediaRecorder.state=="inactive"){
$.globals.mediaRecorder.start();
}
$.globals.mediaRecorder.stop();
console.log("recorder stopped");
clearInterval($.globals.timerInterval);
}
startRecording starts when touchstarts on record button
stopRecording is called when touchend on record button
thank you for your help
Update:
the conflict was with the microphone because the stream was always live and not only when recording.
now it works fine but still needs to make the record button disabled while in a phone call otherwise it will conflict and possibly crash the app or diconnect the mic from the all or maybe even disconnect it.
ok,
so after alot of reading about MediaRecorder,MediaStream and MediaStreamTrack.
I found the problem, the stream of audio stayed active and was using the microphone and in so denied me access to phone calls and voice messages in whatsapp.
I will add my solution below:
function audioRecordingInit() {
navigator.mediaDevices.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
if (navigator.getUserMedia) {
console.log('getUserMedia supported.');
navigator.getUserMedia({audio:true}, onSuccessMedia, onErrorMedia);
return true;
}
else{
console.log('getUserMedia not supported on your browser!');
return false;
}
}
notice that i made this function return a boolean value and i separated the success and error functions.
function onSuccessMedia(stream) {
var chunks = [];
$.globals.mediaStream=stream;
console.log(stream);
$.globals.mediaRecorder = new MediaRecorder(stream);
$.globals.mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
var blob = new Blob(chunks, { 'type' : 'audio/mp3; codecs=opus' });
chunks = [];
if(!$("#recordBtn").hasClass("private"))
$.globals.lastRecorded.audio.src = window.URL.createObjectURL(blob);
else
$.globals.lastRecordedPrivate.audio.src = window.URL.createObjectURL(blob);
console.log("audio created");
};
$.globals.mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
};
$.globals.mediaRecorder.start();
console.log("recording start");
}
function onErrorMedia(err) {
console.log('The following error occured: ' + err);
}
here i moved the mediaRecorder.start() to the onSuccess function instead inside of the start recording...
and finely I changed the start recording function to
function startRecording(event) {
if(audioRecordingInit()) {
//here is all of the visual changes
}
}
and that's it everything is working correctly

how to use attachMediaStream?

I tried to test an exercise with WebRTC socket.io for a video call and chat .
I could not get the console errors on firefox but not attacking me the two local / remote stream between them .
my browser code is this
'use strict';
var p = navigator.mediaDevices.getUserMedia ({video: true, audio: true});
// Clean-up function:
// collect garbage before unloading browser's window
window.onbeforeunload = function(e){
hangup();
}
// Data channel information
var sendChannel, receiveChannel;
var sendButton = document.getElementById("sendButton");
var sendTextarea = document.getElementById("dataChannelSend");
var receiveTextarea = document.getElementById("dataChannelReceive");
// HTML5 <video> elements
var localVideo = document.querySelector('#localVideo');
var remoteVideo = document.querySelector('#remoteVideo');
// Handler associated with Send button
sendButton.onclick = sendData;
// Flags...
var isChannelReady = false;
var isInitiator = false;
var isStarted = false;
// WebRTC data structures
// Streams
var SStream;
var BStream;
var localStream;
var remoteStream;
// PeerConnection
var pc;
var pc_constraints = {
'optional': [
{'DtlsSrtpKeyAgreement': true}
]};
var sdpConstraints = {};
// Let's get started: prompt user for input (room name)
var room = prompt('Enter room name:');
// Connect to signaling server
var socket = io.connect("http://localhost:8181");
// Send 'Create or join' message to singnaling server
if (room !== '') {
console.log('Create or join room ', room);
socket.emit('create or join',room);
}
// Server-mediated message exchanging...
// 1. Server-->Client...
// this peer is the initiator
socket.on('created', function (room){
console.log('Created room ' + room);
isInitiator = true;
// Call getUserMedia()
p.then (function (MediaStream) {
SStream=MediaStream;
localVideo.src = window.URL.createObjectURL (MediaStream);
localVideo.onloadedmetadata = function (e) {
console.log("add local stream");
sendMessage('got user media');
};
});
p.catch (function (err) {console.log (err.name);});
checkAndStart();
});
// Handle 'join' message coming back from server:
// another peer is joining the channel
socket.on('join',function(room){
console.log('this peer is the iniator of room '+room+' !');
isChannelReady=true;
})
// Handle 'joined' message coming back from server:
// this is the second peer joining the channel
socket.on('joined', function (room){
console.log('This peer has joined room ' + room);
isChannelReady = true;
// Call getUserMedia()
p.then (function (MediaStream) {
isChannelReady = true;
BStream=MediaStream;
remoteVideo.src = window.URL.createObjectURL (MediaStream);
remoteVideo.onloadedmetadata = function (e) {
console.log("add remote stream");
sendMessage('got user media');
};
});
p.catch (function (err) {console.log (err.name);});
});
// Server-sent log message...
socket.on('log', function (array){
console.log.apply(console, array);
});
// Receive message from the other peer via the signaling server
socket.on('message', function (message){
console.log('Received message: ', message);
if (message === 'got user media') {
console.log('sono in if');
checkAndStart();
}
else if (message.type === 'offer') {
if (!isInitiator && !isStarted) {
checkAndStart();
}
pc.setRemoteDescription(new RTCSessionDescription(message));
doAnswer();
}
else if (message.type === 'answer' && isStarted) {
pc.setRemoteDescription(new RTCSessionDescription(message));
}
else if (message.type === 'candidate' && isStarted) {
var candidate = new RTCIceCandidate({sdpMLineIndex:message.label,candidate:message.candidate});
pc.addIceCandidate(candidate);
}
else if (message === 'bye' && isStarted) {
handleRemoteHangup();
}
});
// 2. Client-->Server
// Send message to the other peer via the signaling server
function sendMessage(message){
console.log('Sending message: ', message);
socket.emit('message', message);
}
// Channel negotiation trigger function
function checkAndStart() {
if (!isStarted && isChannelReady) {
createPeerConnection();
isStarted = true;
if (isInitiator) {
doCall();
}
}
}
// PeerConnection management...
function createPeerConnection() {
try {
/*posso aggiungere turn, google ecc...*/
pc = new RTCPeerConnection({ iceServers: [{ urls: 'stun:93.33.165.245' }] }, {
'optional': [
{'DtlsSrtpKeyAgreement': true}
]});
if(isInitiator){
SStream.getTracks().forEach(track => pc.addTrack(track,SStream));
pc.ontrack= handleRemoteStreamAdded;}
else{
BStream.getTracks().forEach(track => pc.addTrack(track,BStream));}
pc.onicecandidate = handleIceCandidate;
console.log('create RTCPeerConnection');
}
catch (e) {
console.log('Failed to create PeerConnection, exception: ' + e.message);
alert('Cannot create RTCPeerConnection object.');
return;
}
pc.ontrack= handleRemoteStreamAdded;
pc.onremovestream = handleRemoteStreamRemoved;
if (isInitiator) {
try {
// Create a reliable data channel
sendChannel = pc.createDataChannel("sendDataChannel",
{reliable: true});
console.log('Created send data channel');
} catch (e) {
alert('Failed to create data channel. ');
console.log('createDataChannel() failed with exception: ' + e.message);
}
sendChannel.onopen = handleSendChannelStateChange;
sendChannel.onmessage = handleMessage;
sendChannel.onclose = handleSendChannelStateChange;
} else { // Joiner
pc.ondatachannel = gotReceiveChannel;
}
}
// Data channel management
function sendData() {
var data = sendTextarea.value;
if(isInitiator) sendChannel.send(data);
else receiveChannel.send(data);
console.log('Sent data: ' + data);
}
// Handlers...
function gotReceiveChannel(event) {
console.log('Receive Channel Callback');
receiveChannel = event.channel;
receiveChannel.onmessage = handleMessage;
receiveChannel.onopen = handleReceiveChannelStateChange;
receiveChannel.onclose = handleReceiveChannelStateChange;
}
function handleMessage(event) {
console.log('Received message: ' + event.data);
receiveTextarea.value += event.data + '\n';
}
function handleSendChannelStateChange() {
var readyState = sendChannel.readyState;
console.log('Send channel state is: ' + readyState);
// If channel ready, enable user's input
if (readyState == "open") {
dataChannelSend.disabled = false;
dataChannelSend.focus();
dataChannelSend.placeholder = "";
sendButton.disabled = false;
} else {
dataChannelSend.disabled = true;
sendButton.disabled = true;
}
}
function handleReceiveChannelStateChange() {
var readyState = receiveChannel.readyState;
console.log('Receive channel state is: ' + readyState);
// If channel ready, enable user's input
if (readyState == "open") {
dataChannelSend.disabled = false;
dataChannelSend.focus();
dataChannelSend.placeholder = "";
sendButton.disabled = false;
} else {
dataChannelSend.disabled = true;
sendButton.disabled = true;
}
}
// ICE candidates management
function handleIceCandidate(event) {
console.log('handleIceCandidate event: ', event);
if (event.candidate) {
sendMessage({
type: 'candidate',
label: event.candidate.sdpMLineIndex,
id: event.candidate.sdpMid,
candidate: event.candidate.candidate});
} else {
console.log('End of candidates.');
}
}
// Create Offer
function doCall() {
console.log('Creating Offer...');
pc.createOffer(setLocalAndSendMessage, onSignalingError, sdpConstraints);
}
// Signaling error handler
function onSignalingError(error) {
console.log('Failed to create signaling message : ' + error.name);
}
// Create Answer
function doAnswer() {
console.log('Sending answer to peer.');
pc.createAnswer(setLocalAndSendMessage, onSignalingError, sdpConstraints);
}
// Success handler for both createOffer()
// and createAnswer()
function setLocalAndSendMessage(sessionDescription) {
pc.setLocalDescription(sessionDescription);
sendMessage(sessionDescription);
}
// Remote stream handlers...
function handleRemoteStreamAdded(event) {
attachMediaStream(BStream, event.stream);
BStream = event.stream;
}
function handleRemoteStreamRemoved(event) {
console.log('Remote stream removed. Event: ', event);
}
// Clean-up functions...
function hangup() {
console.log('Hanging up.');
stop();
sendMessage('bye');
}
function handleRemoteHangup() {
console.log('Session terminated.');
stop();
isInitiator = false;
}
function stop() {
isStarted = false;
if (sendChannel) sendChannel.close();
if (receiveChannel) receiveChannel.close();
if (pc) pc.close();
pc = null;
sendButton.disabled=true;
}
someone tell me where I'm wrong and how to fix ?
attachMediaStream is not part of WebRTC. It's a shim adapter.js used to expose for setting video.src or video.srcObject (which Chrome still doesn't support, but Canary does).
In any case, you're passing in the wrong arguments, which should be an element and a stream, not two streams. I.e. make it:
attachMediaStream(remoteVideo, event.streams[0]);
or better, use the spec-way that adapter.js now supports on all browsers:
remoteVideo.srcObject = event.streams[0];
Important: The pc.ontrack event contains event.streams (plural), not event.stream! - It's an array since a track may (but rarely does) exist in more than one stream.
If you're not using the latest adapter.js, then note that pc.ontrack is only natively available in Firefox at the moment, so in Chrome you would need the older pc.onaddstream (and its event.stream).
PS: You're currently setting remoteVideo.src to the local video. Don't do that.
PPS: Remove pc_constraints. Really old stuff that will break Chrome.
Here's a demo of ontrack (use https fiddle in Chrome):
var pc1 = new RTCPeerConnection(), pc2 = new RTCPeerConnection();
navigator.mediaDevices.getUserMedia({ video: true, audio: true })
.then(stream => pc1.addStream(video1.srcObject = stream))
.catch(log);
var add = (pc, can) => can && pc.addIceCandidate(can).catch(log);
pc1.onicecandidate = e => add(pc2, e.candidate);
pc2.onicecandidate = e => add(pc1, e.candidate);
pc2.ontrack = e => video2.srcObject = e.streams[0];
pc1.oniceconnectionstatechange = e => log(pc1.iceConnectionState);
pc1.onnegotiationneeded = e =>
pc1.createOffer().then(d => pc1.setLocalDescription(d))
.then(() => pc2.setRemoteDescription(pc1.localDescription))
.then(() => pc2.createAnswer()).then(d => pc2.setLocalDescription(d))
.then(() => pc1.setRemoteDescription(pc2.localDescription))
.catch(log);
var log = msg => div.innerHTML += "<br>" + msg;
<video id="video1" height="120" width="160" autoplay muted></video>
<video id="video2" height="120" width="160" autoplay></video><br>
<div id="div"></div>
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>

Specify webcam with sourceId in getUserMedia

I have two webcams and am trying to specify which one to show in my video tag. My HTML is simply <video autoplay></video>. Here is my javascript:
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
var constraints = {
video: {
optional: [{
sourceId: "64-character-alphanumeric-source-id-here"
}]
}
};
var video = document.querySelector("video");
function successCallback(stream) {
window.stream = stream; // stream available to console
if (window.URL) {
video.src = window.URL.createObjectURL(stream);
} else {
video.src = stream;
}
video.play();
}
function errorCallback(error) {
console.log("navigator.getUserMedia error: ", error);
}
navigator.getUserMedia(constraints, successCallback, errorCallback);
However, even when I change the sourceId to my second webcam, I can't get it to display that webcam. js.fiddle code
this code is working for me in mobile chrome: It tries to detect the back facing video stream.
MediaStreamTrack.getSources(function(sourceInfos) {
var videoSourceId;
for (var i = 0; i != sourceInfos.length; ++i) {
var sourceInfo = sourceInfos[i];
if(sourceInfo.kind == "video" && sourceInfo.facing == "environment") {
videoSourceId = sourceInfo.id;
}
}
var constraints = {
audio: false,
video: {
optional: [{sourceId: videoSourceId}]
}
};
....
});
note that there is no fallback in this code

Categories