Webrtc - Switch Camera in realtime - javascript

I have to switch camera in webrtc when 2 user connecting in the call. I'm having a problem trying to change my camera in real time, It works for the local video, but the remote person cannot see the new camera, and still sees the old one. I tried to stop the stream and init again but still not working. This is just some of my code. I have searched everywhere and I can't find a solution. Can someone help me out?
``
$(".btn_rear_camera").click(function() {
if (cameratype == "user") {
capture('environment');
} else {
capture('user');
}
});
function capture(facingMode) {
cameratype = facingMode;
localStream.getTracks().forEach(function(track) {
track.stop();
});
var constraints = {
video: {
deviceId: devicesIds[1]
},
audio: true
};
navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {
replaceTracks(stream);
}).catch(function(error) {
});
}
function replaceTracks(newStream) {
var elementId = "localVideo";
detachMediaStream(elementId);
newStream.getTracks().forEach(function(track) {
localStream.addTrack(track);
});
attachMediaStream(elementId, newStream);
// optionally, if you have active peer connections:
_replaceTracksForPeer(peerConnection);
function _replaceTracksForPeer(peer) {
peer.getSenders().map(function(sender) {
sender.replaceTrack(newStream.getTracks().find(function(track) {
return track.kind === sender.track.kind;
}));
});
}
}
function detachMediaStream(id) {
var elem = document.getElementById(id);
if (elem) {
elem.pause();
if (typeof elem.srcObject === 'object') {
elem.srcObject = null;
} else {
elem.src = '';
}
}
};
function attachMediaStream(id, stream) {
var elem = document.getElementById(id);
if (elem) {
if (typeof elem.srcObject === 'object') {
elem.srcObject = stream;
} else {
elem.src = window.URL.createObjectURL(stream);
}
elem.onloadedmetadata = function(e) {
elem.play();
};
} else {
throw new Error('Unable to attach media stream');
}
};
``

Related

How to restart track in webrtc video calling

I implemented video calling using webrtc with use of https://github.com/webtutsplus/videoChat-WebFrontend library. But when I implement switch camera functionality on click of sender at that time sender video stop at receiver end. I followed Unable to change camera / stream for WebRTC call link for switch camera.
So anyone please give me a solution regarding this issue.
Thanks.
I tried Unable to change camera / stream for WebRTC call for sitch camera but receiver not getting the sender video when switch the camera.
I followed Unable to change camera / stream for WebRTC call link to switch camera in video calling between 2 users.
This is the code to switch the camera.
``
$(".btn_rear_camera").click(function() {
if (cameratype == "user") {
capture('environment');
} else {
capture('user');
}
});
function capture(facingMode) {
cameratype = facingMode;
localStream.getTracks().forEach(function(track) {
track.stop();
});
var constraints = {
video: {
deviceId: devicesIds[1]
},
audio: true
};
navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {
replaceTracks(stream);
}).catch(function(error) {
});
}
function replaceTracks(newStream) {
var elementId = "localVideo";
detachMediaStream(elementId);
newStream.getTracks().forEach(function(track) {
localStream.addTrack(track);
});
attachMediaStream(elementId, newStream);
// optionally, if you have active peer connections:
_replaceTracksForPeer(peerConnection);
function _replaceTracksForPeer(peer) {
peer.getSenders().map(function(sender) {
sender.replaceTrack(newStream.getTracks().find(function(track) {
return track.kind === sender.track.kind;
}));
});
}
}
function detachMediaStream(id) {
var elem = document.getElementById(id);
if (elem) {
elem.pause();
if (typeof elem.srcObject === 'object') {
elem.srcObject = null;
} else {
elem.src = '';
}
}
};
function attachMediaStream(id, stream) {
var elem = document.getElementById(id);
if (elem) {
if (typeof elem.srcObject === 'object') {
elem.srcObject = stream;
} else {
elem.src = window.URL.createObjectURL(stream);
}
elem.onloadedmetadata = function(e) {
elem.play();
};
} else {
throw new Error('Unable to attach media stream');
}
};
``

Recording voice and convert speech to text at the same time

I want to use the Web Speech API for speech recognition and record the user's voice in Android Devices at the same time (I mean user holds a button, his/her voice is recorded and transcript to text at the same time .
This is working perfectly in Windows but with Android it just returns the error :
no-speech
Seems like defining the MediaRecorder blocks access of microphone for Web Speech API in Android!
How can I fix this?
If I remove this line which is responsible for recording, speech recognition works again:
new MediaRecorder(stream); // adding this line ruins the speech recognition
Here is the code in action:
In the given code I didn't remove this, in order to show that the code won't work on Android devices:
Note: this code should be tested with an Android device, it is working fine in desktop.
CodePen: https://codepen.io/pixy-dixy/pen/GRddgYL?editors=1010
Demo here in SO:
let audioChunks = [];
let rec;
let stopRecognize;
const output = document.getElementById('output');
async function Recognize() {
console.log('Recognize')
let recognitionAllowed = true;
stopRecognize = function() {
if(recognitionAllowed) {
recognition.stop();
recognitionAllowed = false;
}
}
var SpeechRecognition = SpeechRecognition || webkitSpeechRecognition;
var SpeechGrammarList = SpeechGrammarList || webkitSpeechGrammarList;
var SpeechRecognitionEvent = SpeechRecognitionEvent || webkitSpeechRecognitionEvent;
var recognition = new SpeechRecognition();
var speechRecognitionList = new SpeechGrammarList();
recognition.grammars = speechRecognitionList;
recognition.lang = 'en-GB';
recognition.continuous = false;
recognition.interimResults = true;
recognition.maxAlternatives = 1;
recognition.start();
recognition.onresult = function(event) {
window.interim_transcript = '';
window.speechResult = '';
for(var i = event.resultIndex; i < event.results.length; ++i) {
if(event.results[i].isFinal) {
speechResult += event.results[i][0].transcript;
console.log(speechResult);
output.innerHTML = speechResult;
} else {
interim_transcript += event.results[i][0].transcript;
console.log(interim_transcript);
output.innerHTML = interim_transcript;
}
}
}
recognition.onerror = function(event) {
// restartRecognition();
console.log('recognition error: ' + event.error);
}
recognition.onend = async function(event) {
restartRecognition();
}
function restartRecognition() {
try { if(recognitionAllowed) recognition.start(); } catch(err) {}
}
}
const startRecognition = document.getElementById('start-recognition');
startRecognition.addEventListener('mousedown', handleRecognitionStart);
startRecognition.addEventListener('mouseup', handleRecognitionEnd);
startRecognition.addEventListener('touchstart', handleRecognitionStart);
startRecognition.addEventListener('touchend', handleRecognitionEnd);
function handleRecognitionStart(e) {
console.log('handleRecognitionStart', isTouchDevice)
const event = e.type;
if(isTouchDevice && event == 'touchstart') {
recognitionStart();
} else if(!isTouchDevice && event == 'mousedown') {
console.log('handleRecognitionStart')
recognitionStart();
}
}
const isTouchDevice = touchCheck();
function touchCheck() {
const maxTouchPoints = navigator.maxTouchPoints || navigator.msMaxTouchPoints;
return 'ontouchstart' in window || maxTouchPoints > 0 || window.matchMedia && matchMedia('(any-pointer: coarse)').matches;
}
function handleRecognitionEnd(e) {
const event = e.type;
console.log(':::', event == 'touchend');
if(isTouchDevice && event == 'touchend') {
recognitionEnd();
} else if(!isTouchDevice && event == 'mouseup') {
recognitionEnd();
}
}
function recognitionEnd() {
resetRecognition();
}
function recognitionStart() {
console.log('recognitionStart')
Recognize();
audioChunks = [];
voiceRecorder.start()
}
function resetRecognition() {
console.log('reset')
if(typeof stopRecognize == "function") stopRecognize();
// if(rec.state !== 'inactive') rec.stop();
voiceRecorder.stop()
}
const playAudio = document.getElementById('play');
playAudio.addEventListener('click', () => {
console.log('play');
voiceRecorder.play();
})
class VoiceRecorder {
constructor() {
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
console.log("getUserMedia supported")
} else {
console.log("getUserMedia is not supported on your browser!")
}
this.mediaRecorder
this.stream
this.playerRef = document.querySelector("#player")
this.recorderRef = document.querySelector("#recorder")
this.chunks = []
this.isRecording = false
this.constraints = {
audio: true,
video: false
}
}
handleSuccess(stream) {
this.stream = stream
this.stream.oninactive = () => {
console.log("Stream ended!")
};
this.recorderRef.srcObject = this.stream
this.mediaRecorder = new MediaRecorder(this.stream)
console.log(this.mediaRecorder)
this.mediaRecorder.ondataavailable = this.onMediaRecorderDataAvailable.bind(this)
this.mediaRecorder.onstop = this.onMediaRecorderStop.bind(this)
this.recorderRef.play()
this.mediaRecorder.start()
}
handleError(error) {
console.log("navigator.getUserMedia error: ", error)
}
onMediaRecorderDataAvailable(e) { this.chunks.push(e.data) }
onMediaRecorderStop(e) {
const blob = new Blob(this.chunks, { 'type': 'audio/ogg; codecs=opus' })
const audioURL = window.URL.createObjectURL(blob)
this.playerRef.src = audioURL;
this.chunks = [];
this.stream.getAudioTracks().forEach(track => track.stop());
this.stream = null;
}
play() { this.playerRef.play(); }
start() {
console.log('start')
if(this.isRecording) return;
console.log('33')
this.isRecording = true;
this.playerRef.src = '';
navigator.mediaDevices
.getUserMedia(this.constraints)
.then(this.handleSuccess.bind(this))
.catch(this.handleError.bind(this))
}
stop() {
if(!this.isRecording) return;
this.isRecording = false;
this.recorderRef.pause();
this.mediaRecorder.stop();
}
}
voiceRecorder = new VoiceRecorder();
<button id="start-recognition">Hold This Button and Speak In Android This should output the text and record your voice at the s</button>
<button id="play">Play Recorded Audio</button>
<h1 id="output">Voice over here</h1>
<audio id="recorder" muted hidden></audio>
<audio id="player" hidden></audio>

Aframe movement control on mobile phone regarding to twoway-motion.js

I am testing twoway-motion.js on Aframe, by providing a simple way to navigate specifically without a device orientation permission from a mobile phone.
please check this glitch page for details: https://glitch.com/~scrawny-efraasia
also please see twoway-motion.js by #flowerio
AFRAME.registerComponent('twoway-motion', {
schema: {
speed: { type: "number", default: 40 },
threshold: { type: "number", default: -40 },
nonMobileLoad: { type: "boolean", default: false },
removeCheckpoints: {type: "boolean", default: true },
chatty: {type: "boolean", default: true }
},
init: function () {
var twowaymotion = document.querySelector("[camera]").components["twoway-motion"];
twowaymotion.componentName = "twoway-motion";
report = function(text) {
if (twowaymotion.data.chatty) {
console.log(twowaymotion.componentName, ":", text);
}
}
report("init.");
// report("asked to load with speed=", this.data.speed);
if (!AFRAME.utils.device.isMobile() && this.data.nonMobileLoad === false) {
// this is only for mobile devices.
//document.querySelector("[camera]").removeAttribute("twoway-motion");
report("Retired. Will only work on mobile.");
return;
} else {
if (this.data.nonMobileLoad === true) {
report("Loading on non-mobile platform.");
}
}
if (this.el.components["wasd-controls"] === undefined) {
this.el.setAttribute("wasd-controls", "true");
report("Installing wasd-controls.");
}
this.el.components["wasd-controls"].data.acceleration = this.data.speed;
// two-way hides checkpoint-controls by default.
if (this.data.removeCheckpoints) {
if (this.el.components["checkpoint-controls"] !== undefined) {
var checkpoints = document.querySelectorAll("[checkpoint]");
for (var cp = 0; cp < checkpoints.length; cp++) {
checkpoints[cp].setAttribute("visible", false);
}
}
}
this.el.removeAttribute("universal-controls");
if (this.el.components["look-controls"] === undefined) {
this.el.setAttribute("look-controls", "true");
}
var cur = document.querySelector("[cursor]");
if (cur !== null) {
console.log(this.componentName, ": found a cursor.");
this.cur = cur;
//this.curcolor = cur.getAttribute("material").color;
this.curcolor = cur.getAttribute("color");
} else {
console.log(this.componentName, ": didn't find a cursor.");
}
var canvas = document.querySelector(".a-canvas");
canvas.addEventListener("mousedown", function (e) {
report("mousedown", e);
twowaymotion.touching = true;
this.touchTime = new Date().getTime();
});
canvas.addEventListener("mouseup", function (e) {
report("mouseup", e);
twowaymotion.touching = false;
});
canvas.addEventListener("touchstart", function (e) {
this.touch = e;
report("touches.length: ", e.touches.length);
if (e.touches.length > 1) {
report("multitouch: doing nothing");
} else {
report("touchstart", e);
twowaymotion.touching = true;
}
});
canvas.addEventListener("touchend", function () {
console.log(this.componentName, " touchend");
twowaymotion.touching = false;
});
},
update: function() {
if (this.el.components["twoway-controls"] !== undefined) {
this.el.components["wasd-controls"].data.acceleration = this.el.components["wasd-controls"].data.speed;
}
},
tick: function () {
if (!AFRAME.utils.device.isMobile() && this.data.nonMobileLoad === false) {
// this is only for mobile devices, unless you ask for it.
return;
}
if (!this.isPlaying) {
return;
}
var cam = this.el;
var camrot = cam.getAttribute("rotation");
if (camrot.x < this.data.threshold) {
// we are looking down
if (this.cur !== null && this.cur !== undefined) {
this.cur.setAttribute("material", "color", "orange");
}
if (this.touching === true) {
cam.components["wasd-controls"].keys["ArrowDown"] = true;
} else {
cam.components["wasd-controls"].keys["ArrowDown"] = false;
cam.components["wasd-controls"].keys["ArrowUp"] = false;
}
} else {
// we are looking forward or up
if (this.cur !== null && this.cur !== undefined) {
this.cur.setAttribute("material", "color", this.curcolor);
}
if (this.touching === true) {
cam.components["wasd-controls"].keys["ArrowUp"] = true;
} else {
cam.components["wasd-controls"].keys["ArrowDown"] = false;
cam.components["wasd-controls"].keys["ArrowUp"] = false;
}
}
},
pause: function () {
// we get isPlaying automatically from A-Frame
},
play: function () {
// we get isPlaying automatically from A-Frame
},
remove: function () {
if (this.el.components["wasd-controls"] === undefined) {
this.el.removeAttribute("wasd-controls");
}
} });
Since device orientation permission is not granted on mobile phones, move backward is not working, also, when the audience tries to rotate in a different direction by touching the screen or sliding on screen, it still functions as move forward.
from what I imagine, if there is a simple edit, if the audience touches the screen more than 2 seconds, it start to move forward, if audience just rotate, it will not move forward, since when you slide or touch on the screen to rotate the touching time might not be so long as 2 seconds...
this is the easiest solution that I can imagine under the restriction of without device orientation permission.
or is there any other better way to divide rotate and move forward by touching screen regarding touching time?
Thks!!!!!

convert audio buffer to play as audio element

I'm using the souncloud api to stream audio from soundcloud. However, I'm trying to get the tracks BPM programmatically so I have the following
Soundcloud.prototype.play = function (options) {
options = options || {};
var src;
if (options.streamUrl) {
src = options.streamUrl;
} else if (this._playlist) {
var length = this._playlist.tracks.length;
if (length) {
if (options.playlistIndex === undefined) {
this._playlistIndex = this._playlistIndex || 0;
} else {
this._playlistIndex = options.playlistIndex;
}
// be silent if index is out of range
if (this._playlistIndex >= length || this._playlistIndex < 0) {
this._playlistIndex = 0;
return;
}
src = this._playlist.tracks[this._playlistIndex].stream_url;
}
} else if (this._track) {
src = this._track.stream_url;
}
if (!src) {
throw new Error('There is no tracks to play, use `streamUrl` option or `load` method');
}
if (this._clientId) {
src = _appendQueryParam(src, 'client_id', this._clientId);
}
if (src !== this.audio.src) {
pulse.loadBufferFromURI(src, (event, pulse) => {
this.trackBPM = pulse.beat.bpm
this.audio.src = window.URL.createObjectURL(pulse.renderedBuffer);
this.playing = src;
console.log('trackBPM',pulse.beat.bpm);
})
} else return this.audio.play();
};
Now
pulse.loadBufferFromURI(src, (event, pulse) => {
this.trackBPM = pulse.beat.bpm
this.audio.src = window.URL.createObjectURL(pulse.renderedBuffer);
this.playing = src;
console.log('trackBPM',pulse.beat.bpm);
})
attempts to load the stream_url as aduiobuffer to get the bpm. The question is can I covert the buffer into a source compatible enough to play using <audio> element, reason being I want to take advantage of the event listeners...

How to implement webRTC in firefox?

This code works in google chrome fine ,
but i tried to convert it to support Firefox
and i always get no error in the console .
the cam it seems ruining but there's no video.
here's the script
var socket = new WebSocket('ws://127.0.0.1:1338/'); // change the IP address to your websocket server
var stunServer = "stun.l.google.com:19302";
var sourcevid = document.getElementById('sourcevid');
var remotevid = document.getElementById('remotevid');
var localStream = null;
var remoteStream;
var peerConn = null;
var started = false;
var isRTCPeerConnection = true;
var mediaConstraints = {mandatory: {
OfferToReceiveAudio:true,
OfferToReceiveVideo:true }};
var logg = function(s) { console.log(s); };
// send the message to websocket server
function sendMessage(message) {
var mymsg = JSON.stringify(message);
logg("SEND: " + mymsg);
socket.send(mymsg);
}
function createPeerConnection() {
try {
logg("Creating peer connection");
var servers = [];
servers.push({'url':'stun:' + stunServer});
var pc_config = {'iceServers':servers};
peerConn = new webkitRTCPeerConnection(pc_config);
peerConn.onicecandidate = onIceCandidate;
} catch (e) {
try {
peerConn = new RTCPeerConnection('STUN ' + stunServer, onIceCandidate00);
isRTCPeerConnection = false;
} catch (e) {
logg("Failed to create PeerConnection, exception: " + e.message);
}
}
peerConn.onaddstream = onRemoteStreamAdded;
peerConn.onremovestream = onRemoteStreamRemoved;
}
// when remote adds a stream, hand it on to the local video element
function onRemoteStreamAdded(event) {
logg("Added remote stream");
remotevid.src = window.webkitURL.createObjectURL(event.stream);
}
function waitForRemoteVideo() {
if (remoteStream.videoTracks.length === 0 || remotevid.currentTime > 0) {
transitionToActive();
} else {
setTimeout(waitForRemoteVideo, 100);
}
}
function transitionToActive() {
remotevid.style.opacity = 1;
card.style.webkitTransform = "rotateY(180deg)";
setTimeout(function() { sourcevid.src = ""; }, 500);
setStatus("<input type=\"button\" id=\"hangup\" value=\"Hang up\" onclick=\"onHangup()\" />");
}
// when remote removes a stream, remove it from the local video element
function onRemoteStreamRemoved(event) {
logg("Remove remote stream");
remotevid.src = "";
}
function onIceCandidate(event) {
if (event.candidate) {
sendMessage({type: 'candidate',
label: event.candidate.sdpMLineIndex,
id: event.candidate.sdpMid,
candidate: event.candidate.candidate});
} else {
logg("End of candidates.");
}
}
function onIceCandidate00(candidate, moreToFollow) {
if (candidate) {
sendMessage({type: 'candidate', label: candidate.label, candidate: candidate.toSdp()});
}
if (!moreToFollow) {
logg("End of candidates.");
}
}
// start the connection upon user request
function connect() {
if (!started && localStream) {
console.log("Creating PeerConnection.");
createPeerConnection();
logg('Adding local stream...');
peerConn.addStream(localStream);
started = true;
logg("isRTCPeerConnection: " + isRTCPeerConnection);
//create offer
if (isRTCPeerConnection) {
peerConn.createOffer(setLocalAndSendMessage, null, mediaConstraints);
} else {
var offer = peerConn.createOffer(mediaConstraints);
peerConn.setLocalDescription(peerConn.SDP_OFFER, offer);
sendMessage({type: 'offer', sdp: offer.toSdp()});
peerConn.startIce();
}
} else {
alert("Local stream not running yet.");
}
}
// accept connection request
socket.addEventListener("message", onMessage, false);
function onMessage(evt) {
logg("RECEIVED: " + evt.data);
if (isRTCPeerConnection)
processSignalingMessage(evt.data);
else
processSignalingMessage00(evt.data);
}
function processSignalingMessage(message) {
var msg = JSON.parse(message);
if (msg.type === 'offer') {
if (!started && localStream) {
createPeerConnection();
logg('Adding local stream...');
peerConn.addStream(localStream);
started = true;
logg("isRTCPeerConnection: " + isRTCPeerConnection);
if (isRTCPeerConnection) {
//set remote description
peerConn.setRemoteDescription(new RTCSessionDescription(msg));
//create answer
console.log("Sending answer to peer.");
peerConn.createAnswer(setLocalAndSendMessage, null, mediaConstraints);
} else {
//set remote description
peerConn.setRemoteDescription(peerConn.SDP_OFFER, new SessionDescription(msg.sdp));
//create answer
var offer = peerConn.remoteDescription;
var answer = peerConn.createAnswer(offer.toSdp(), mediaConstraints);
console.log("Sending answer to peer.");
setLocalAndSendMessage00(answer);
}
}
} else if (msg.type === 'answer' && started) {
peerConn.setRemoteDescription(new RTCSessionDescription(msg));
} else if (msg.type === 'candidate' && started) {
var candidate = new RTCIceCandidate({sdpMLineIndex:msg.label, candidate:msg.candidate});
peerConn.addIceCandidate(candidate);
} else if (msg.type == 'chat'){
addChatMsg(msg.nick, msg.cid, msg.data);
}
else if (msg.type === 'bye' && started) {
onRemoteHangUp();
}
}
function processSignalingMessage00(message) {
var msg = JSON.parse(message);
// if (msg.type === 'offer') --> will never happened since isRTCPeerConnection=true initially
if (msg.type === 'answer' && started) {
peerConn.setRemoteDescription(peerConn.SDP_ANSWER, new SessionDescription(msg.sdp));
} else if (msg.type === 'candidate' && started) {
var candidate = new IceCandidate(msg.label, msg.candidate);
peerConn.processIceMessage(candidate);
} else if (msg.type === 'bye' && started) {
onRemoteHangUp();
}
}
function setLocalAndSendMessage(sessionDescription) {
peerConn.setLocalDescription(sessionDescription);
sendMessage(sessionDescription);
}
function setLocalAndSendMessage00(answer) {
peerConn.setLocalDescription(peerConn.SDP_ANSWER, answer);
sendMessage({type: 'answer', sdp: answer.toSdp()});
peerConn.startIce();
}
function onRemoteHangUp() {
logg("Remote Hang up.");
closeSession();
}
function onHangUp() {
logg("Hang up.");
if (started) {
sendMessage({type: 'bye'});
closeSession();
}
}
function closeSession() {
peerConn.close();
peerConn = null;
started = false;
remotevid.src = "";
}
window.onbeforeunload = function() {
if (started) {
sendMessage({type: 'bye'});
}
}
function startVideo() {
// Replace the source of the video element with the stream from the camera
if (navigator.mozGetUserMedia) {
try {
navigator.mozGetUserMedia({audio: true, video: true}, successCallback, errorCallback);
} catch (e) {
navigator.mozGetUserMedia("video,audio", successCallback, errorCallback);
}
}
else {
try {
navigator.webkitGetUserMedia({audio: true, video: true}, successCallback, errorCallback);
} catch (e) {
navigator.webkitGetUserMedia("video,audio", successCallback, errorCallback);
}
}
function successCallback(stream) {
if (navigator.mozGetUserMedia) {
sourcevid.mozSrcObject = stream;
sourcevid.style.webkitTransform = "rotateY(180deg)";
localStream = stream;
}
if(navigator.webkitGetUserMedia){
sourcevid.src = window.webkitURL.createObjectURL(stream);
sourcevid.style.webkitTransform = "rotateY(180deg)";
localStream = stream;
}
}
function errorCallback(error) {
logg('An error occurred: [CODE ' + error.code + ']');
}
}
function stopVideo() {
sourcevid.src = "";
}
and here is the html
<script type="text/javascript" src="{{ asset('bundles/PFESiivt/js/visio.js') }}"></script>
<div id="main">
<div id="" style="height:280px;width:700;">
<div id="livevideodivk" style="float:left;">
<video id="sourcevid" style="height:280px;width:320px;" autoplay></video>
</div>
<div id="remotevideodivk" style="float:left;margin-left:10px">
<video id="remotevid" style="height:280px;width:320px;" autoplay></video>
</div>
</div>
<center>
<button id="btn" type="button" onclick="startVideo();">Start video</button>
<button id="btn" type="button" onclick="stopVideo();">Stop video</button>
<button id="btn" type="button" onclick="connect();">Connect</button>
<button id="btn" type="button" onclick="onHangUp();">Hang Up</button>
</center>
</div>
Have not gone through the complete code, but for starters...
for firefox it is mozRTCPeerConnection not RTCPeerConnection.
secondly, for firefox PeerConnection object, the onicecandidate handler is missing.
P. S: I think it is a very bad idea to post complete code, would advice to do bit of debugging yourself( to identify the block causing issue) and then post the relevant block when not able to solve it.

Categories