How to send and receive desktop capture stream generated via getUsermedia() - javascript

I am making a Screen sharing app with WebRTC + Socket.io and stuck at a place.
Connected with two browser using WebRTC + Socket.io and can send text
I am taking support from codelab but it is not for stream.(If solution is based on this link then highly helpful)
How can I send getUserMedia() stream:
dataChannel.send(stream);
And receive same stream on channel.onmessage():
I am getting event.data as '[object MediaStream]' not stream.
channel.onmessage = function(event){
// unable to get correct stream
// event.data is "[object MediaStream]" in string
}
function createPeerConnection(isInitiator, config) {
console.log('Creating Peer connection as initiator?', isInitiator, 'config:', config);
peerConn = new RTCPeerConnection(config);
// send any ice candidates to the other peer
peerConn.onicecandidate = function (event) {
console.log('onIceCandidate event:', event);
if (event.candidate) {
sendMessage({
type: 'candidate',
label: event.candidate.sdpMLineIndex,
id: event.candidate.sdpMid,
candidate: event.candidate.candidate
});
} else {
console.log('End of candidates.');
}
};
if (isInitiator) {
console.log('Creating Data Channel');
dataChannel = peerConn.createDataChannel("screen");
onDataChannelCreated(dataChannel);
console.log('Creating an offer');
peerConn.createOffer(onLocalSessionCreated, logError);
} else {
peerConn.ondatachannel = function (event) {
console.log('ondatachannel:', event.channel);
dataChannel = event.channel;
onDataChannelCreated(dataChannel);
};
}
}
It is working fine for string or json i.e. dataChannel.send('Hello');
I have created a wiki page for same: wiki
Please help.

Please try something like this: (explanation at the end of the code)
var btnShareYourCamera = document.querySelector('#share-your-camera');
var localVideo = document.querySelector('#local-video');
var remoteVideo = document.querySelector('#remote-video');
var websocket = new WebSocket('wss://path-to-server:port/');
websocket.onmessage = function(event) {
var data = JSON.parse(event.data);
if (data.sdp) {
if (data.sdp.type === 'offer') {
getUserMedia(function(video_stream) {
localVideo.srcObject = video_stream;
answererPeer(new RTCSessionDescription(data.sdp), video_stream);
});
}
if (data.sdp.type === 'answer') {
offerer.setRemoteDescription(new RTCSessionDescription(data.sdp));
}
}
if (data.candidate) {
addIceCandidate((offerer || answerer), new RTCIceCandidate(data.candidate));
}
};
var iceTransportPolicy = 'all';
var iceTransportLimitation = 'udp';
function addIceCandidate(peer, candidate) {
if (iceTransportLimitation === 'tcp') {
if (candidate.candidate.toLowerCase().indexOf('tcp') === -1) {
return; // ignore UDP
}
}
peer.addIceCandidate(candidate);
}
var offerer, answerer;
var iceServers = {
iceServers: [{
'urls': [
'stun:stun.l.google.com:19302',
'stun:stun1.l.google.com:19302',
'stun:stun2.l.google.com:19302',
'stun:stun.l.google.com:19302?transport=udp',
]
}],
iceTransportPolicy: iceTransportPolicy,
rtcpMuxPolicy: 'require',
bundlePolicy: 'max-bundle'
};
// https://https;//cdn.webrtc-experiment.com/IceServersHandler.js
if (typeof IceServersHandler !== 'undefined') {
iceServers.iceServers = IceServersHandler.getIceServers();
}
var mediaConstraints = {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
};
/* offerer */
function offererPeer(video_stream) {
offerer = new RTCPeerConnection(iceServers);
offerer.idx = 1;
video_stream.getTracks().forEach(function(track) {
offerer.addTrack(track, video_stream);
});
offerer.ontrack = function(event) {
remoteVideo.srcObject = event.streams[0];
};
offerer.onicecandidate = function(event) {
if (!event || !event.candidate) return;
websocket.send(JSON.stringify({
candidate: event.candidate
}));
};
offerer.createOffer(mediaConstraints).then(function(offer) {
offerer.setLocalDescription(offer).then(function() {
websocket.send(JSON.stringify({
sdp: offer
}));
});
});
}
/* answerer */
function answererPeer(offer, video_stream) {
answerer = new RTCPeerConnection(iceServers);
answerer.idx = 2;
video_stream.getTracks().forEach(function(track) {
answerer.addTrack(track, video_stream);
});
answerer.ontrack = function(event) {
remoteVideo.srcObject = event.streams[0];
};
answerer.onicecandidate = function(event) {
if (!event || !event.candidate) return;
websocket.send(JSON.stringify({
candidate: event.candidate
}));
};
answerer.setRemoteDescription(offer).then(function() {
answerer.createAnswer(mediaConstraints).then(function(answer) {
answerer.setLocalDescription(answer).then(function() {
websocket.send(JSON.stringify({
sdp: answer
}));
});
});
});
}
var video_constraints = {
mandatory: {},
optional: []
};
function getUserMedia(successCallback) {
function errorCallback(e) {
alert(JSON.stringify(e, null, '\t'));
}
var mediaConstraints = {
video: true,
audio: true
};
navigator.mediaDevices.getUserMedia(mediaConstraints).then(successCallback).catch(errorCallback);
}
btnShareYourCamera.onclick = function() {
getUserMedia(function(video_stream) {
localVideo.srcObject = video_stream;
offererPeer(video_stream);
});
};
You must attach stream using peer.addTrack as you can see in the above example
You must receive remote stream using peer.ontrack as you can see in the above example
i.e. use addTrack to attach your camera and use ontrack to receive remote camera.
You must never send your stream using dataChannel.send. Both are totally different protocols. A MediaStream must be shared using RTP; not SCTP. RTP is used only if you call peer.addTrack method to attach your camera stream.
This process happens before you open or join a room.
See single-page demo here: https://www.webrtc-experiment.com/getStats/
HTML for above code snippet:
<button id="share-your-camera"></button>
<video id="local-video" controls autoplay playsinline></video>
<video id="remote-video" controls autoplay playsinline></video>

Related

WebRTC connectionState stuck at "new" - Safari only, works in Chrome and FF

I'm having trouble connecting to my local peer as remote with WebRTC video and audio. This issue is only happening in Safari on desktop and iOS. On Chrome and Firefox the issue is non-existant.
I'm assuming it has something to do with the fact that in Safari, it always asks if you want to allow audio/video but I'm not sure. That's just the only difference I can make out between the browsers. Even after selecting 'allow', the issue persists.
Reproduction steps:
In Chrome, open the initial local connection with audio/video
In Safari, open the remote connection and choose to enable audio/video
Result:
Local connection never makes an offer and the connectionState of the remote (Safari) gets stuck as new. See the following RTCPeerConnection object:
Here is the exact same object via the exact same steps, but in Chrome or Firefox:
Edit:
After more testing, I've found the following:
Below format: (First Connection) > (Second Connection)
Chrome > Chrome: Works
Chrome > Firefox: Works
Chrome > Safari: Doesn't work
Safari > Chrome: Works
Safari > Safari: Works
The issue doesn't seem to exist when using Safari for both sides of the connection...only when Safari is used as the secondary connection.
Here is my code:
import h from './helpers.js';
document.getElementById('close-chat').addEventListener('click', (e) => {
document.querySelector('#right').style.display = "none";
});
document.getElementById('open-chat').addEventListener('click', (e) => {
document.querySelector('#right').style.display = "flex";
});
window.addEventListener('load', () => {
sessionStorage.setItem('connected', 'false');
const room = h.getParam('room');
const user = h.getParam('user');
sessionStorage.setItem('username', user);
const username = sessionStorage.getItem('username');
if (!room) {
document.querySelector('#room-create').attributes.removeNamedItem('hidden');
}
else if (!username) {
document.querySelector('#username-set').attributes.removeNamedItem('hidden');
}
else {
let commElem = document.getElementsByClassName('room-comm');
for (let i = 0; i < commElem.length; i++) {
commElem[i].attributes.removeNamedItem('hidden');
}
var pc = [];
let socket = io('/stream');
var socketId = '';
var myStream = '';
var screen = '';
// Get user video by default
getAndSetUserStream();
socket.on('connect', () => {
console.log('Connected');
sessionStorage.setItem('remoteConnected', 'false');
h.connectedChat();
setTimeout(h.establishingChat, 3000);
setTimeout(h.oneMinChat, 60000);
setTimeout(h.twoMinChat, 120000);
setTimeout(h.threeMinChat, 180000);
setTimeout(h.fourMinChat, 240000);
setTimeout(h.fiveMinChat, 300000);
// Set socketId
socketId = socket.io.engine.id;
socket.emit('subscribe', {
room: room,
socketId: socketId
});
socket.on('new user', (data) => {
// OG user gets log when new user joins here.
console.log('New User');
console.log(data);
socket.emit('newUserStart', { to: data.socketId, sender: socketId });
pc.push(data.socketId);
init(true, data.socketId);
});
socket.on('newUserStart', (data) => {
console.log('New User Start');
console.log(data);
pc.push(data.sender);
init(false, data.sender);
});
socket.on('ice candidates', async (data) => {
console.log('Ice Candidates:');
console.log(data);
data.candidate ? await pc[data.sender].addIceCandidate(new RTCIceCandidate(data.candidate)) : '';
});
socket.on('sdp', async (data) => {
console.log('SDP:');
console.log(data);
if (data.description.type === 'offer') {
data.description ? await pc[data.sender].setRemoteDescription(new RTCSessionDescription(data.description)) : '';
h.getUserFullMedia().then(async (stream) => {
if (!document.getElementById('local').srcObject) {
h.setLocalStream(stream);
}
// Save my stream
myStream = stream;
stream.getTracks().forEach((track) => {
pc[data.sender].addTrack(track, stream);
});
let answer = await pc[data.sender].createAnswer();
await pc[data.sender].setLocalDescription(answer);
socket.emit('sdp', { description: pc[data.sender].localDescription, to: data.sender, sender: socketId });
}).catch((e) => {
console.error(e);
});
}
else if (data.description.type === 'answer') {
await pc[data.sender].setRemoteDescription(new RTCSessionDescription(data.description));
}
});
socket.on('chat', (data) => {
h.addChat(data, 'remote');
});
});
function getAndSetUserStream() {
console.log('Get and set user stream.');
h.getUserFullMedia({ audio: true, video: true }).then((stream) => {
// Save my stream
myStream = stream;
h.setLocalStream(stream);
}).catch((e) => {
console.error(`stream error: ${e}`);
});
}
function sendMsg(msg) {
let data = {
room: room,
msg: msg,
sender: username
};
// Emit chat message
socket.emit('chat', data);
// Add localchat
h.addChat(data, 'local');
}
function init(createOffer, partnerName) {
console.log('P1:');
console.log(partnerName);
pc[partnerName] = new RTCPeerConnection(h.getIceServer());
console.log('P2:');
console.log(pc[partnerName]);
if (screen && screen.getTracks().length) {
console.log('Screen:');
console.log(screen);
screen.getTracks().forEach((track) => {
pc[partnerName].addTrack(track, screen); // Should trigger negotiationneeded event
});
}
else if (myStream) {
console.log('myStream:');
console.log(myStream);
myStream.getTracks().forEach((track) => {
pc[partnerName].addTrack(track, myStream); // Should trigger negotiationneeded event
});
}
else {
h.getUserFullMedia().then((stream) => {
console.log('Stream:');
console.log(stream);
// Save my stream
myStream = stream;
stream.getTracks().forEach((track) => {
console.log('Tracks:');
console.log(track);
pc[partnerName].addTrack(track, stream); // Should trigger negotiationneeded event
});
h.setLocalStream(stream);
}).catch((e) => {
console.error(`stream error: ${e}`);
});
}
// Create offer
if (createOffer) {
console.log('Create Offer');
pc[partnerName].onnegotiationneeded = async () => {
let offer = await pc[partnerName].createOffer();
console.log('Offer:');
console.log(offer);
await pc[partnerName].setLocalDescription(offer);
console.log('Partner Details:');
console.log(pc[partnerName]);
socket.emit('sdp', { description: pc[partnerName].localDescription, to: partnerName, sender: socketId });
};
}
// Send ice candidate to partnerNames
pc[partnerName].onicecandidate = ({ candidate }) => {
console.log('Send ICE Candidates:');
console.log(candidate);
socket.emit('ice candidates', { candidate: candidate, to: partnerName, sender: socketId });
};
// Add
pc[partnerName].ontrack = (e) => {
console.log('Adding partner video...');
let str = e.streams[0];
if (document.getElementById(`${partnerName}-video`)) {
document.getElementById(`${partnerName}-video`).srcObject = str;
}
else {
// Video elem
let newVid = document.createElement('video');
newVid.id = `${partnerName}-video`;
newVid.srcObject = str;
newVid.autoplay = true;
newVid.className = 'remote-video';
newVid.playsInline = true;
newVid.controls = true;
// Put div in main-section elem
document.getElementById('left').appendChild(newVid);
const video = document.getElementsByClassName('remote-video');
}
};
pc[partnerName].onconnectionstatechange = (d) => {
console.log('Connection State:');
console.log(pc[partnerName].iceConnectionState);
switch (pc[partnerName].iceConnectionState) {
case 'new':
console.log('New connection...!');
break;
case 'checking':
console.log('Checking connection...!');
break;
case 'connected':
console.log('Connected with dispensary!');
sessionStorage.setItem('remoteConnected', 'true');
h.establishedChat();
break;
case 'disconnected':
console.log('Disconnected');
sessionStorage.setItem('connected', 'false');
sessionStorage.setItem('remoteConnected', 'false');
h.disconnectedChat();
h.closeVideo(partnerName);
break;
case 'failed':
console.log('Failed');
sessionStorage.setItem('connected', 'false');
sessionStorage.setItem('remoteConnected', 'false');
h.disconnectedChat();
h.closeVideo(partnerName);
break;
case 'closed':
console.log('Closed');
sessionStorage.setItem('connected', 'false');
sessionStorage.setItem('remoteConnected', 'false');
h.disconnectedChat();
h.closeVideo(partnerName);
break;
}
};
pc[partnerName].onsignalingstatechange = (d) => {
switch (pc[partnerName].signalingState) {
case 'closed':
console.log("Signalling state is 'closed'");
h.closeVideo(partnerName);
break;
}
};
}
// Chat textarea
document.getElementById('chat-input').addEventListener('keypress', (e) => {
if (e.which === 13 && (e.target.value.trim())) {
e.preventDefault();
sendMsg(e.target.value);
setTimeout(() => {
e.target.value = '';
}, 50);
}
});
}
});
It would be helpful to see the console logs from a failed (stuck in the "new" state) Safari run.
One possibility is that Safari isn't doing the full ice candidate gathering. As Phillip Hancke noted, seeing the SDP would help figure out if that's happening. As would seeing the console logs. In the past, Safari has had various quirks and bugs related to candidate gathering.
One way to force Safari to gather candidates is to explicitly set offerToReceiveAudio and offerToReceiveVideo:
await pc[partnerName].createOffer({ offerToReceiveAudio: true, offerToReceiveVideo: true })

No sound when making outgoing call via JsSIP (Asterisk)

I'm trying to debug the existent system, where calls are made via Asteriks.
When accepting incoming calls, everything works fine, but on making outgoing call there is apparently no sound (but I accept 'addstream' event and attach stream to audio). Production code takes 500 lines, but this code does pretty much the same, but doesn't work as well
const socket = new JsSIP.WebSocketInterface('wss://callwss.agdevelopments.net');
socket.via_transport = 'wss';
const configuration = {
password: "SIP4003!",
realm: "callws,s.agdevelopments.net",
register: true,
session_timers: false,
uri: "sip:4003#callwss.agdevelopments.net",
sockets: [socket]
}
const ua = new JsSIP.UA(configuration)
// Setup events
ua.on('connected', function () {
console.log('Connected')
})
ua.on('disconnected', function () {
console.log('Connected')
})
// Make a call
const eventHandlers = {
'progress': function (e) {
console.log('call is in progress');
},
'failed': function (e) {
console.log('call failed with cause: ' + (e.data ? e.data.cause : 'no cause'), e);
},
'ended': function (e) {
console.log('call ended with cause: ' + (e.data ? e.data.cause : 'no cause'), e);
},
'confirmed': function (e) {
console.log('call confirmed');
},
'addstream': (e) => {
console.log('Add stream (event handlers)')
audio.srcObject = e.stream
audio.play()
}
};
const options = {
'eventHandlers': eventHandlers,
'mediaConstraints': {'audio': true, 'video': false}
};
const audio = new window.Audio()
ua.on('registered', function () {
const session = ua.call('0513887341', options)
if (session.connection) {
console.log('Connection is valid')
session.connection.addEventListener('addstream', e => {
console.log('Add stream')
audio.srcObject = e.stream
audio.play()
})
session.on('addstream', function(e){
// set remote audio stream (to listen to remote audio)
// remoteAudio is <audio> element on page
const remoteAudio = audio
remoteAudio.src = window.URL.createObjectURL(e.stream);
remoteAudio.play();
});
session.connection.addEventListener('peerconnection', e => {
console.log('Peer connection')
audio.srcObject = e.stream
audio.play()
})
} else {
console.log('Connection is null')
}
})
ua.on('newRTCSession', (data) => {
console.log('New RTC Session')
const session = data.session
session.on('addstream', function(e){
// set remote audio stream (to listen to remote audio)
// remoteAudio is <audio> element on page
const remoteAudio = audio
remoteAudio.src = window.URL.createObjectURL(e.stream);
remoteAudio.play();
});
})
ua.start()
Also attaching screenshots from Asterisk. The first one is outgoing call with no sound, the second is incoming with sound
The issue was solved from IT side. There was no problems in JsSIP or code

Original audio of tab gets muted while using chrome.tabCapture.capture() and MediaRecorder()

when i use chrome.tabCapture.capture() with MediaRecorder API to record stream original audio of tabs which i am capturing gets muted but the audio comes OK in recorded stream, i want the audio in the tab to run normally ....
class Recorder {
constructor(onChunksAvailable) {
this.chunks = [];
this.active = false;
this.callback = onChunksAvailable;
}
start(stream) {
if (this.active) {
throw new Error("recorder is already running");
}
this.recorder = new MediaRecorder(stream, {
mimeType: "audio/webm",
});
this.recorder.onstop = () => {
stream.getAudioTracks()[0].stop();
this.callback([...this.chunks]);
setTimeout(() => {
this.chunks = [];
});
this.active = false;
};
this.recorder.ondataavailable = (event) => this.chunks.push(event.data);
this.active = true;
this.recorder.start();
}
stop() {
if (!this.active) {
throw new Error("recorder is already stop");
} else {
this.recorder.stop();
}
}
}
let rec = new Recorder(async (chunks) => {
//using chunks then to get the stream
});
chrome.tabCapture.capture(
{
audio: true,
video: false,
},
function (stream) {
rec.start(stream);
}
Forgive me for lack of documentation as I last played with these APIs years ago, but MDN has some stuff.
In my case adding these 3 lines to the start function was fixed.
this.context = new AudioContext();
this.stream = this.context.createMediaStreamSource(stream);
this.stream.connect(this.context.destination);
class Recorder {
constructor(onChunksAvailable) {
this.chunks = [];
this.active = false;
this.callback = onChunksAvailable;
this.context = new AudioContext();
}
start(stream) {
if (this.active) {
throw new Error("recorder is already running");
}
// Reconnect the stream to actual output
this.stream = this.context.createMediaStreamSource(stream);
this.stream.connect(this.context.destination);
this.recorder = new MediaRecorder(stream, {
mimeType: "audio/webm",
});
this.recorder.onstop = () => {
stream.getAudioTracks()[0].stop();
this.callback([...this.chunks]);
setTimeout(() => {
this.chunks = [];
});
this.active = false;
};
this.recorder.ondataavailable = (event) => this.chunks.push(event.data);
this.active = true;
this.recorder.start();
}
stop() {
if (!this.active) {
throw new Error("recorder is already stop");
} else {
this.recorder.stop();
}
}
}
let rec = new Recorder(async (chunks) => {
//using chunks then to get the stream
});
chrome.tabCapture.capture(
{
audio: true,
video: false,
},
function (stream) {
rec.start(stream);
})
Sorry for lack of details, but I believe when you start an audio capture it disconnects the stream from the default output (speakers). By creating a secondary MediaStreamSource and connecting it to the default output (AudioContext.destination) you can allow the stream to continue outputting to speakers while being input to your recorder.
Sources
MDN: AudioContext
MDN: MediaStreamSource
Chrome extension I made 2 years ago

Not receiving video, onicecandidate is not executing

So I was following this tutorial to learn how to implement a WebRTC server-client setup. Once I got that working I wanted to split the client into two parts, one sender and one receiver. Now they can establish a connection with each other but the receiver never gets the stream from the sender.
I managed to determine that the code flow between the original code and split versions remains the same, except that neither peer executes the onicecandidate event.
According to this I need to explicitly include OfferToReceiveAudio: true and OfferToReceiveVideo: true since I'm using Chrome, which I did but it didn't seem to make any difference.
Currently, they both receive SDP from each other, there is a local and remote description in the peerConnection, and iceGatheringState is "new" but iceConnectionState is "checking" (unlike the second link where he states it should also be "new")
How come they aren't exchanging ICE candidates when it's split in two like this?
Sender.js
const HTTPSPort = 3434;
const domain = '127.0.0.1';
const wssHost = 'wss://' + domain + ':' + HTTPSPort + '/websocket/';
// Feed settings
const video = true;
const audio = true;
const constraints = { "audio": audio, "video": video };
var videoContainer = null, feed = null,
pC = null, wsc = new WebSocket(wssHost),
pCConfig = [
{ 'url': 'stun:stun.services.mozilla.com' },
{ 'url': 'stun:stun.l.google.com:19302' }
];
function pageReady() {
// Check browser WebRTC availability
navigator.mediaDevices.getUserMedia(constraints).then(function (stream) {
videoContainer = document.getElementById('videoFeed');
// Get the feed and show it in the local video element
feed = stream;
videoContainer.srcObject = feed;
}).catch(function () {
alert("Sorry, your browser does not support WebRTC!");
});
}
wsc.onmessage = function (evt) {
if (!pC) {
// Initiate peerConnection
pC = new RTCPeerConnection(pCConfig);
// Send any ice candidates to the other peer
pC.onicecandidate = onIceCandidateHandler;
pC.addStream(feed);
}
// Read the message
var signal = JSON.parse(evt.data);
if (signal.sdp) {
log('Received SDP from remote peer.');
pC.setRemoteDescription(new RTCSessionDescription(signal.sdp));
answerCall();
} else if (signal.candidate) {
log('Received ICECandidate from remote peer.');
pC.addIceCandidate(new RTCIceCandidate(signal.candidate));
}
};
function answerCall() {
pC.createAnswer().then(function (answer) {
var ans = new RTCSessionDescription(answer);
pC.setLocalDescription(ans).then(function () {
wsc.send(JSON.stringify({ 'sdp': ans }));
}).catch(errorHandler);
}).catch(errorHandler);
}
function onIceCandidateHandler(evt) {
if (!evt || !evt.candidate) return;
wsc.send(JSON.stringify({ 'candidate': evt.candidate }));
};
Receiver.js
const HTTPSPort = 3434;
const domain = '127.0.0.1';
const wssHost = 'wss://' + domain + ':' + HTTPSPort + '/websocket/';
var remoteVideo = null,
pC = null, wsc = new WebSocket(wssHost),
pCConfig = [
{ 'url': 'stun:stun.services.mozilla.com' },
{ 'url': 'stun:stun.l.google.com:19302' }
],
mediaConstraints = {
mandatory: {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
}
};
function pageReady() {
remoteVideo = document.getElementById('remoteVideo');
icebutton = document.getElementById('checkICE');
icebutton.addEventListener('click', function (evt) {
console.log(pC);
})
};
wsc.onopen = function () {
// Initiates peerConnection
pC = new RTCPeerConnection(pCConfig);
// Send any ICE candidates to the other peer
pC.onicecandidate = onIceCandidateHandler;
// Once remote stream arrives, show it in the remote video element
pC.onaddstream = onAddStreamHandler;
// Offer a connection to the server
createAndSendOffer();
};
function createAndSendOffer() {
pC.createOffer(mediaConstraints).then(function (offer) {
var off = new RTCSessionDescription(offer);
pC.setLocalDescription(off).then(function () {
wsc.send(JSON.stringify({ 'sdp': off }));
}).catch(errorHandler);
}).catch(errorHandler);
}
wsc.onmessage = function (evt) {
// Read the message
var signal = JSON.parse(evt.data);
if (signal.sdp) {
console.log('Received SDP from remote peer.');
pC.setRemoteDescription(new RTCSessionDescription(signal.sdp));
} else if (signal.candidate) {
console.log('Received ICECandidate from remote peer.');
pC.addIceCandidate(new RTCIceCandidate(signal.candidate));
}
};
function onIceCandidateHandler(evt) {
if (!evt || !evt.candidate) return;
wsc.send(JSON.stringify({ 'candidate': evt.candidate }));
};
function onAddStreamHandler(evt) {
// Set remote video stream as source for remote video HTML element
remoteVideo.srcObject = evt.stream;
};
You forgot iceServers. Change
pCConfig = [
{ 'url': 'stun:stun.services.mozilla.com' },
{ 'url': 'stun:stun.l.google.com:19302' }
];
to
pCConfig = {
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' }
]
};
Additionally:
url has been deprecated, use urls.
The mozilla stun server has been decommissioned, so save yourself some time and exclude it.
mandatory and OfferToReceiveAudio have been deprecated. Use offerToReceiveAudio.
mandatory and OfferToReceiveVideo have been deprecated. Use offerToReceiveVideo.
Tips
Kudos for using promises (unlike the tutorial)! Note you can return them to flatten things:
function createAndSendOffer() {
return pC.createOffer(mediaConstraints).then(function (offer) {
return pC.setLocalDescription(offer);
})
.then(function () {
wsc.send(JSON.stringify({ sdp: pC.localDescription }));
})
.catch(errorHandler);
}

Angular p2p video chat - remote stream is black video

I am trying set up a simple p2p video chat between caller and callee.
This is the code:
var OnBroadcast
, i
, isCaller = true
//just for testing pourpose
, URLparams = $location.search()
, iceServers = {
'iceServers':[{
'url':'stun:stun.l.google.com:19302'
}]
}
, connOpt = {
'optional':[{
'DtlsSrtpKeyAgreement': true
}]
}
, sdpConstraints = {
'mandatory': {
'OfferToReceiveAudio': true,
'OfferToReceiveVideo': true
}
}
, localVideo = $window.document.getElementById('localVideo')
, remoteVideo = $window.document.getElementById('remoteVideo')
, peerConnection = new RTCPeerConnection(iceServers, connOpt);
if (URLparams && URLparams.stranger) {
isCaller = false;
}
peerConnection.onaddstream = function (stream) {
if (!isCaller) {
$log.info('Caller Stream is', stream);
peerConnection.addStream(stream.stream);
remoteVideo.src = $window.URL.createObjectURL(stream.stream);
}
};
peerConnection.onicecandidate = function (ices) {
if (isCaller) {
ws.broadcast({
'scope': 'callerICES',
'message': ices
});
} else {
ws.broadcast({
'scope': 'calleeICES',
'message': ices
});
}
};
navigator.getUserMedia({
'audio': true,
'video': true
}, function (stream) {
localVideo.src = $window.URL.createObjectURL(stream);
if (isCaller) {
/* VIDEO CHAT P2P----------
* create CALLER Peer
* CALLER addStream to peer
* create CALLER Offer and CALLER setLocalDescription
* send CALLER Offer to CALLEE and set CALLEE remoteDescription
* create Answer from CALLEE and CALLEE setLocalDescription
* send Answer to CALLER and set CALLER setRemoteDescription
* CALLER icecandidate and send it to CALLEE and CALLEE addIceCandidate
* CALLEE icecandidate and send it to CALLER and CALLER addIceCandidate
* CALLEE addStream
*/
peerConnection.addStream(stream);
peerConnection.createOffer(function (offer) {
peerConnection.setLocalDescription(offer, function () {
ws.broadcast({
'scope': 'callerOFFER',
'message': offer
});
});
}, function (err) {
$log.error('Unable to create Offer from Caller', err);
});
}
}, function (err) {
$log.error('Unable to getUserMedia', err);
});
OnBroadcast = $rootScope.$on('comunicator:toAll', function (eventInfo, message) {
if (message.what.scope === 'callerOFFER') {
if (!isCaller) {
peerConnection.setRemoteDescription(new RTCSessionDescription(message.what.message), function () {
peerConnection.createAnswer(function (answer) {
$log.info('Setup localDesc Callee');
peerConnection.setLocalDescription(new RTCSessionDescription(answer), function () {
ws.broadcast({
'scope':'calleeANSWER',
'message': answer
});
}, function (err) {
$log.info('Unable to set localDesc for Callee', err);
},
sdpConstraints);
}, function (err) {
$log.error('Unable to create Answer from Callee', err);
});
});
}
}
if (message.what.scope === 'calleeANSWER') {
if (isCaller) {
peerConnection.setRemoteDescription(new RTCSessionDescription(message.what.message), function () {
$log.info('Setup remoteDesc Callee');
});
}
}
if (message.what.scope === 'callerICES') {
if (!isCaller) {
for (i = 0; i < message.what.length; i += 1) {
peerConnection.addIceCandidate(new RTCIceCandidate(message.what[i]));
}
$log.info('Setup CALLEE ices', message.what);
}
}
if (message.what.scope === 'calleeICES') {
if (isCaller) {
for (i = 0; i < message.what.length; i += 1) {
peerConnection.addIceCandidate(new RTCIceCandidate(message.what[i]));
}
$log.info('Setup CALLER ices', message.what);
}
}
});
Everything seems to work, but when i attach remote video to <video id="remoteVideo"></video> i see a black video only, i am testing this on the same url and same wifi:
caller: localhost:8000
callee: localhost:8000?stranger=true
Can someone explain me which is the problem please?
One should note that we shold not add ICECandidate to PeerConnection before it's remoteDescription is set, assuming that that is not your current problem, it might be:
peerConnection.onaddstream = function (stream) {
if (!isCaller) {
$log.info('Caller Stream is', stream);
peerConnection.addStream(stream.stream); // ---> specially this line.
remoteVideo.src = $window.URL.createObjectURL(stream.stream);
}
};
I am not sure why callee is sending back remote stream as it's local stream, other than that with this code, caller upon receiving callee's stream, would do nothing. peerConnection.addStream is used for sharing the your local stream with the remote user. and normally callee need not wait for caller's stream to respond with it's stream. so change the above code into...
peerConnection.onaddstream = function (event) {
$log.info('add stream event:', event);
remoteVideo.src = $window.URL.createObjectURL(event.stream);
};
... // also add the retrieved stream to peerConnection for both caller and callee.
navigator.getUserMedia({
'audio': true,
'video': true
}, function (stream) {
peerConnection.addStream(stream); // before if (isCaller) check
localVideo.src = $window.URL.createObjectURL(stream);
if (isCaller) {
...
other than that I hope that the code you have written is just a prototype for testing if WebRTC works, it has got some design issues, few of them may be:
Ice candidate handling can be clubbed, no difference between caller or callee iceCandidate.
do not like the way of explicitly specifying the caller and callee.
current system would not support multiple peers per room.

Categories