I have found quite a few similar qustions but they are mainly old and outdated and some of them provided a few pointers which I followed to no avail.
Basically, I'm tring to use webrtc and the phonegap iosrtc plugin to create a simple 2 way video chat.
I followed everything to the letter and I managed to show my own video in the app but the 'remote' video is not showing and I ant figure out what the issue is at all!
I have enabled the debugging in phonegap but I don't get any errors.
This is my complete code:
HTML:
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
<script src="https://rtcmulticonnection.herokuapp.com/dist/RTCMultiConnection.min.js"></script>
<script src="https://rtcmulticonnection.herokuapp.com/node_modules/webrtc-adapter/out/adapter.js"></script>
<script src="https://rtcmulticonnection.herokuapp.com/socket.io/socket.io.js"></script>
<!-- custom layout for HTML5 audio/video elements -->
<link rel="stylesheet" href="https://rtcmulticonnection.herokuapp.com/dev/getHTMLMediaElement.css">
<script src="https://rtcmulticonnection.herokuapp.com/dev/getHTMLMediaElement.js"></script>
<script src="https://rtcmulticonnection.herokuapp.com/node_modules/recordrtc/RecordRTC.js"></script>
<script src="cordova.js"></script>
<script src="ios-websocket-hack.js"></script>
<section class="make-center">
<div>
<label><input type="checkbox" id="record-entire-conference"> Record Entire Conference In The Browser?</label>
<span id="recording-status" style="display: none;"></span>
<button id="btn-stop-recording" style="display: none;">Stop Recording</button>
<br><br>
<input type="text" id="room-id" value="abcdef" autocorrect=off autocapitalize=off size=20>
<button id="open-room">Open Room</button>
<button id="join-room">Join Room</button>
<button id="open-or-join-room">Auto Open Or Join Room</button>
</div>
<div id="videos-container" style="margin: 20px 0;"></div>
<div id="room-urls" style="text-align: center;display: none;background: #F1EDED;margin: 15px -10px;border: 1px solid rgb(189, 189, 189);border-left: 0;border-right: 0;"></div>
</section>
JAVASCRIPT:
// ......................................................
// .......................UI Code........................
// ......................................................
document.getElementById('open-room').onclick = function() {
disableInputButtons();
connection.open(document.getElementById('room-id').value, function(isRoomOpened, roomid, error) {
if(isRoomOpened === true) {
showRoomURL(connection.sessionid);
}
else {
disableInputButtons(true);
if(error === 'Room not available') {
alert('Someone already created this room. Please either join or create a separate room.');
return;
}
alert(error);
}
});
};
document.getElementById('join-room').onclick = function() {
disableInputButtons();
connection.join(document.getElementById('room-id').value, function(isJoinedRoom, roomid, error) {
if (error) {
disableInputButtons(true);
if(error === 'Room not available') {
alert('This room does not exist. Please either create it or wait for moderator to enter in the room.');
return;
}
alert(error);
}
});
};
document.getElementById('open-or-join-room').onclick = function() {
disableInputButtons();
connection.openOrJoin(document.getElementById('room-id').value, function(isRoomExist, roomid, error) {
if(error) {
disableInputButtons(true);
alert(error);
}
else if (connection.isInitiator === true) {
// if room doesn't exist, it means that current user will create the room
showRoomURL(roomid);
}
});
};
// ......................................................
// ..................RTCMultiConnection Code.............
// ......................................................
var connection = new RTCMultiConnection();
// by default, socket.io server is assumed to be deployed on your own URL
//connection.socketURL = '/';
// comment-out below line if you do not have your own socket.io server
connection.socketURL = 'https://rtcmulticonnection.herokuapp.com:443/';
connection.socketMessageEvent = 'video-conference-demo';
connection.session = {
audio: true,
video: true
};
connection.sdpConstraints.mandatory = {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
};
connection.videosContainer = document.getElementById('videos-container');
connection.onstream = function(event) {
var existing = document.getElementById(event.streamid);
if(existing && existing.parentNode) {
existing.parentNode.removeChild(existing);
}
/*event.mediaElement.removeAttribute('src');
event.mediaElement.removeAttribute('srcObject');
event.mediaElement.muted = true;
event.mediaElement.volume = 0;*/
var video = document.createElement('video');
try {
video.setAttributeNode(document.createAttribute('autoplay'));
video.setAttributeNode(document.createAttribute('playsinline'));
} catch (e) {
video.setAttribute('autoplay', true);
video.setAttribute('playsinline', true);
}
if(event.type === 'local') {
video.setAttribute('class', 'myvideo');
//$('.yourVideo').attr('src', event.stream);
//$('.yourVideo').attr('id', event.streamid);
alert('local');
video.volume = 0;
try {
video.setAttributeNode(document.createAttribute('muted'));
} catch (e) {
video.setAttribute('muted', true);
}
}
if (event.type === 'remote') {
alert('remote');
video.setAttribute('class', 'othersvideo');
}
video.src = URL.createObjectURL(event.stream);
//var width = parseInt(connection.videosContainer.clientWidth / 3) - 20;
var width = $(document).width();
var height = $(document).height();
var mediaElement = getHTMLMediaElement(video, {
/*title: event.userid,*/
buttons: ['full-screen'],
width: 100,
height: 100,
showOnMouseEnter: false
});
connection.videosContainer.appendChild(mediaElement);
setTimeout(function() {
// mediaElement.media.play();
video.play();
}, 5000);
mediaElement.id = event.streamid;
// to keep room-id in cache
localStorage.setItem(connection.socketMessageEvent, connection.sessionid);
chkRecordConference.parentNode.style.display = 'none';
if(chkRecordConference.checked === true) {
btnStopRecording.style.display = 'inline-block';
recordingStatus.style.display = 'inline-block';
var recorder = connection.recorder;
if(!recorder) {
recorder = RecordRTC([event.stream], {
type: 'video'
});
recorder.startRecording();
connection.recorder = recorder;
}
else {
recorder.getInternalRecorder().addStreams([event.stream]);
}
if(!connection.recorder.streams) {
connection.recorder.streams = [];
}
connection.recorder.streams.push(event.stream);
recordingStatus.innerHTML = 'Recording ' + connection.recorder.streams.length + ' streams';
}
if(event.type === 'local') {
connection.socket.on('disconnect', function() {
if(!connection.getAllParticipants().length) {
location.reload();
}
});
}
};
var recordingStatus = document.getElementById('recording-status');
var chkRecordConference = document.getElementById('record-entire-conference');
var btnStopRecording = document.getElementById('btn-stop-recording');
btnStopRecording.onclick = function() {
var recorder = connection.recorder;
if(!recorder) return alert('No recorder found.');
recorder.stopRecording(function() {
var blob = recorder.getBlob();
invokeSaveAsDialog(blob);
connection.recorder = null;
btnStopRecording.style.display = 'none';
recordingStatus.style.display = 'none';
chkRecordConference.parentNode.style.display = 'inline-block';
});
};
connection.onstreamended = function(event) {
var mediaElement = document.getElementById(event.streamid);
if (mediaElement) {
mediaElement.parentNode.removeChild(mediaElement);
}
};
connection.onMediaError = function(e) {
if (e.message === 'Concurrent mic process limit.') {
if (DetectRTC.audioInputDevices.length <= 1) {
alert('Please select external microphone. Check github issue number 483.');
return;
}
var secondaryMic = DetectRTC.audioInputDevices[1].deviceId;
connection.mediaConstraints.audio = {
deviceId: secondaryMic
};
connection.join(connection.sessionid);
}
};
// ..................................
// ALL below scripts are redundant!!!
// ..................................
function disableInputButtons(enable) {
document.getElementById('room-id').onkeyup();
document.getElementById('open-or-join-room').disabled = !enable;
document.getElementById('open-room').disabled = !enable;
document.getElementById('join-room').disabled = !enable;
document.getElementById('room-id').disabled = !enable;
}
// ......................................................
// ......................Handling Room-ID................
// ......................................................
function showRoomURL(roomid) {
var roomHashURL = '#' + roomid;
var roomQueryStringURL = '?roomid=' + roomid;
var html = '<h2>Unique URL for your room:</h2><br>';
html += 'Hash URL: ' + roomHashURL + '';
html += '<br>';
html += 'QueryString URL: ' + roomQueryStringURL + '';
var roomURLsDiv = document.getElementById('room-urls');
roomURLsDiv.innerHTML = html;
roomURLsDiv.style.display = 'block';
}
(function() {
var params = {},
r = /([^&=]+)=?([^&]*)/g;
function d(s) {
return decodeURIComponent(s.replace(/\+/g, ' '));
}
var match, search = window.location.search;
while (match = r.exec(search.substring(1)))
params[d(match[1])] = d(match[2]);
window.params = params;
})();
var roomid = '';
if (localStorage.getItem(connection.socketMessageEvent)) {
roomid = localStorage.getItem(connection.socketMessageEvent);
} else {
roomid = connection.token();
}
var txtRoomId = document.getElementById('room-id');
txtRoomId.value = roomid;
txtRoomId.onkeyup = txtRoomId.oninput = txtRoomId.onpaste = function() {
localStorage.setItem(connection.socketMessageEvent, document.getElementById('room-id').value);
};
var hashString = location.hash.replace('#', '');
if (hashString.length && hashString.indexOf('comment-') == 0) {
hashString = '';
}
var roomid = params.roomid;
if (!roomid && hashString.length) {
roomid = hashString;
}
if (roomid && roomid.length) {
document.getElementById('room-id').value = roomid;
localStorage.setItem(connection.socketMessageEvent, roomid);
// auto-join-room
(function reCheckRoomPresence() {
connection.checkPresence(roomid, function(isRoomExist) {
if (isRoomExist) {
connection.join(roomid);
return;
}
setTimeout(reCheckRoomPresence, 5000);
});
})();
disableInputButtons();
}
// detect 2G
if(navigator.connection &&
navigator.connection.type === 'cellular' &&
navigator.connection.downlinkMax <= 0.115) {
alert('2G is not supported. Please use a better internet service.');
}
window.cordova.InAppBrowser.open(" https://vps267717.ovh.net/webrtc", "_blank", "location=no,toolbar=yes");
As you can see in my code, I have this:
if (event.type === 'remote') {
alert('remote');
video.setAttribute('class', 'othersvideo');
}
But this never fires for some reason!
I have spent the last 3 days trying to figure this out but I haven't had any luck.
So any help and advice would be appreciated.
Thanks in advance.
Related
I want to use the Web Speech API for speech recognition and record the user's voice in Android Devices at the same time (I mean user holds a button, his/her voice is recorded and transcript to text at the same time .
This is working perfectly in Windows but with Android it just returns the error :
no-speech
Seems like defining the MediaRecorder blocks access of microphone for Web Speech API in Android!
How can I fix this?
If I remove this line which is responsible for recording, speech recognition works again:
new MediaRecorder(stream); // adding this line ruins the speech recognition
Here is the code in action:
In the given code I didn't remove this, in order to show that the code won't work on Android devices:
Note: this code should be tested with an Android device, it is working fine in desktop.
CodePen: https://codepen.io/pixy-dixy/pen/GRddgYL?editors=1010
Demo here in SO:
let audioChunks = [];
let rec;
let stopRecognize;
const output = document.getElementById('output');
async function Recognize() {
console.log('Recognize')
let recognitionAllowed = true;
stopRecognize = function() {
if(recognitionAllowed) {
recognition.stop();
recognitionAllowed = false;
}
}
var SpeechRecognition = SpeechRecognition || webkitSpeechRecognition;
var SpeechGrammarList = SpeechGrammarList || webkitSpeechGrammarList;
var SpeechRecognitionEvent = SpeechRecognitionEvent || webkitSpeechRecognitionEvent;
var recognition = new SpeechRecognition();
var speechRecognitionList = new SpeechGrammarList();
recognition.grammars = speechRecognitionList;
recognition.lang = 'en-GB';
recognition.continuous = false;
recognition.interimResults = true;
recognition.maxAlternatives = 1;
recognition.start();
recognition.onresult = function(event) {
window.interim_transcript = '';
window.speechResult = '';
for(var i = event.resultIndex; i < event.results.length; ++i) {
if(event.results[i].isFinal) {
speechResult += event.results[i][0].transcript;
console.log(speechResult);
output.innerHTML = speechResult;
} else {
interim_transcript += event.results[i][0].transcript;
console.log(interim_transcript);
output.innerHTML = interim_transcript;
}
}
}
recognition.onerror = function(event) {
// restartRecognition();
console.log('recognition error: ' + event.error);
}
recognition.onend = async function(event) {
restartRecognition();
}
function restartRecognition() {
try { if(recognitionAllowed) recognition.start(); } catch(err) {}
}
}
const startRecognition = document.getElementById('start-recognition');
startRecognition.addEventListener('mousedown', handleRecognitionStart);
startRecognition.addEventListener('mouseup', handleRecognitionEnd);
startRecognition.addEventListener('touchstart', handleRecognitionStart);
startRecognition.addEventListener('touchend', handleRecognitionEnd);
function handleRecognitionStart(e) {
console.log('handleRecognitionStart', isTouchDevice)
const event = e.type;
if(isTouchDevice && event == 'touchstart') {
recognitionStart();
} else if(!isTouchDevice && event == 'mousedown') {
console.log('handleRecognitionStart')
recognitionStart();
}
}
const isTouchDevice = touchCheck();
function touchCheck() {
const maxTouchPoints = navigator.maxTouchPoints || navigator.msMaxTouchPoints;
return 'ontouchstart' in window || maxTouchPoints > 0 || window.matchMedia && matchMedia('(any-pointer: coarse)').matches;
}
function handleRecognitionEnd(e) {
const event = e.type;
console.log(':::', event == 'touchend');
if(isTouchDevice && event == 'touchend') {
recognitionEnd();
} else if(!isTouchDevice && event == 'mouseup') {
recognitionEnd();
}
}
function recognitionEnd() {
resetRecognition();
}
function recognitionStart() {
console.log('recognitionStart')
Recognize();
audioChunks = [];
voiceRecorder.start()
}
function resetRecognition() {
console.log('reset')
if(typeof stopRecognize == "function") stopRecognize();
// if(rec.state !== 'inactive') rec.stop();
voiceRecorder.stop()
}
const playAudio = document.getElementById('play');
playAudio.addEventListener('click', () => {
console.log('play');
voiceRecorder.play();
})
class VoiceRecorder {
constructor() {
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
console.log("getUserMedia supported")
} else {
console.log("getUserMedia is not supported on your browser!")
}
this.mediaRecorder
this.stream
this.playerRef = document.querySelector("#player")
this.recorderRef = document.querySelector("#recorder")
this.chunks = []
this.isRecording = false
this.constraints = {
audio: true,
video: false
}
}
handleSuccess(stream) {
this.stream = stream
this.stream.oninactive = () => {
console.log("Stream ended!")
};
this.recorderRef.srcObject = this.stream
this.mediaRecorder = new MediaRecorder(this.stream)
console.log(this.mediaRecorder)
this.mediaRecorder.ondataavailable = this.onMediaRecorderDataAvailable.bind(this)
this.mediaRecorder.onstop = this.onMediaRecorderStop.bind(this)
this.recorderRef.play()
this.mediaRecorder.start()
}
handleError(error) {
console.log("navigator.getUserMedia error: ", error)
}
onMediaRecorderDataAvailable(e) { this.chunks.push(e.data) }
onMediaRecorderStop(e) {
const blob = new Blob(this.chunks, { 'type': 'audio/ogg; codecs=opus' })
const audioURL = window.URL.createObjectURL(blob)
this.playerRef.src = audioURL;
this.chunks = [];
this.stream.getAudioTracks().forEach(track => track.stop());
this.stream = null;
}
play() { this.playerRef.play(); }
start() {
console.log('start')
if(this.isRecording) return;
console.log('33')
this.isRecording = true;
this.playerRef.src = '';
navigator.mediaDevices
.getUserMedia(this.constraints)
.then(this.handleSuccess.bind(this))
.catch(this.handleError.bind(this))
}
stop() {
if(!this.isRecording) return;
this.isRecording = false;
this.recorderRef.pause();
this.mediaRecorder.stop();
}
}
voiceRecorder = new VoiceRecorder();
<button id="start-recognition">Hold This Button and Speak In Android This should output the text and record your voice at the s</button>
<button id="play">Play Recorded Audio</button>
<h1 id="output">Voice over here</h1>
<audio id="recorder" muted hidden></audio>
<audio id="player" hidden></audio>
Basically what I'm trying to do is locally detect faces while being in a video conference. For this purpose I'm using RTCMultiConnection for the video conference part, this specific part to be exact and OpenCV.js for the computer vision part, specifically this repository.
I know that I need to send the mediaStream object from video element to the canvas element inside the OpenCV function. I was not getting that stream shown on the canvas so I figured I would make the variable containing that mediaStream object global so I can pass it to the OpenCV function but that is not working either.
I would be truly grateful for any insight you can provide, thank you.
Here's the code:
<!DOCTYPE html>
<html lang="en" dir="ltr">
<head>
<meta charset="utf-8">
<title>Video Conferencing using RTCMultiConnection</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
<link rel="shortcut icon" href="/demos/logo.png">
<link rel="stylesheet" href="/demos/stylesheet.css">
<script src="/demos/menu.js"></script>
<script async src="js/opencv.js" ></script>
<script src="js/utils.js"></script>
</head>
<body>
<header>
<!-- <a class="logo" href="/"><img src="/demos/logo.png" alt="RTCMultiConnection"></a>
Menu<img src="/demos/menu-icon.png" alt="Menu">
<!-- <nav>
<li>
Home
</li>
<li>
Demos
</li>
<li>
Getting Started
</li>
<li>
FAQ
</li>
<li>
YouTube
</li>
<li>
Wiki
</li>
<li>
Github
</li>
</nav> -->
</header> -->
<h1>
Video Conferencing using RTCMultiConnection
<p class="no-mobile">
Multi-user (many-to-many) video chat using mesh networking model.
</p>
</h1>
<section class="make-center">
<div>
<label><input type="checkbox" id="record-entire-conference"> Record Entire Conference In The Browser?</label>
<span id="recording-status" style="display: none;"></span>
<button id="btn-stop-recording" style="display: none;">Stop Recording</button>
<br><br>
<input type="text" id="room-id" value="abcdef" autocorrect=off autocapitalize=off size=20>
<button id="open-room">Open Room</button>
<button id="join-room">Join Room</button>
<button id="open-or-join-room">Auto Open Or Join Room</button>
</div>
<div id="videos-container" style="margin: 20px 0;"></div>
<canvas id="canvas_output"></canvas>
<div id="room-urls" style="text-align: center;display: none;background: #F1EDED;margin: 15px -10px;border: 1px solid rgb(189, 189, 189);border-left: 0;border-right: 0;"></div>
</section>
<script src="/dist/RTCMultiConnection.min.js"></script>
<script src="/node_modules/webrtc-adapter/out/adapter.js"></script>
<script src="/socket.io/socket.io.js"></script>
<!-- <script async src="js/opencv.js" onload="openCvReady()"></script> -->
<!-- <script src="js/utils.js"></script> -->
<!-- custom layout for HTML5 audio/video elements -->
<link rel="stylesheet" href="/dev/getHTMLMediaElement.css">
<script src="/dev/getHTMLMediaElement.js"></script>
<script src="/node_modules/recordrtc/RecordRTC.js"></script>
<script>
// ......................................................
// .......................UI Code........................
// ......................................................
document.getElementById('open-room').onclick = function() {
disableInputButtons();
connection.open(document.getElementById('room-id').value, function(isRoomOpened, roomid, error) {
if(isRoomOpened === true) {
showRoomURL(connection.sessionid);
}
else {
disableInputButtons(true);
if(error === 'Room not available') {
alert('Someone already created this room. Please either join or create a separate room.');
return;
}
alert(error);
}
});
};
document.getElementById('join-room').onclick = function() {
disableInputButtons();
connection.join(document.getElementById('room-id').value, function(isJoinedRoom, roomid, error) {
if (error) {
disableInputButtons(true);
if(error === 'Room not available') {
alert('This room does not exist. Please either create it or wait for moderator to enter in the room.');
return;
}
alert(error);
}
});
};
document.getElementById('open-or-join-room').onclick = function() {
disableInputButtons();
connection.openOrJoin(document.getElementById('room-id').value, function(isRoomExist, roomid, error) {
if(error) {
disableInputButtons(true);
alert(error);
}
else if (connection.isInitiator === true) {
// if room doesn't exist, it means that current user will create the room
showRoomURL(roomid);
}
});
};
// ......................................................
// .......................UI Code ENDS........................
// ......................................................
// ***********************************************************
// ......................................................
// ..................RTCMultiConnection Code.............
// ......................................................
var connection = new RTCMultiConnection();
// by default, socket.io server is assumed to be deployed on your own URL
connection.socketURL = '/';
// comment-out below line if you do not have your own socket.io server
// connection.socketURL = 'https://rtcmulticonnection.herokuapp.com:443/';
connection.socketMessageEvent = 'video-conference-demo';
connection.session = {
audio: true,
video: true
};
connection.sdpConstraints.mandatory = {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
};
// STAR_FIX_VIDEO_AUTO_PAUSE_ISSUES
// via: https://github.com/muaz-khan/RTCMultiConnection/issues/778#issuecomment-524853468
var bitrates = 512;
var resolutions = 'Ultra-HD';
var videoConstraints = {};
if (resolutions == 'HD') {
videoConstraints = {
width: {
ideal: 1280
},
height: {
ideal: 720
},
frameRate: 30
};
}
if (resolutions == 'Ultra-HD') {
videoConstraints = {
width: {
ideal: 1920
},
height: {
ideal: 1080
},
frameRate: 30
};
}
connection.mediaConstraints = {
video: videoConstraints,
audio: true
};
var CodecsHandler = connection.CodecsHandler;
connection.processSdp = function(sdp) {
var codecs = 'vp8';
if (codecs.length) {
sdp = CodecsHandler.preferCodec(sdp, codecs.toLowerCase());
}
if (resolutions == 'HD') {
sdp = CodecsHandler.setApplicationSpecificBandwidth(sdp, {
audio: 128,
video: bitrates,
screen: bitrates
});
sdp = CodecsHandler.setVideoBitrates(sdp, {
min: bitrates * 8 * 1024,
max: bitrates * 8 * 1024,
});
}
if (resolutions == 'Ultra-HD') {
sdp = CodecsHandler.setApplicationSpecificBandwidth(sdp, {
audio: 128,
video: bitrates,
screen: bitrates
});
sdp = CodecsHandler.setVideoBitrates(sdp, {
min: bitrates * 8 * 1024,
max: bitrates * 8 * 1024,
});
}
return sdp;
};
// END_FIX_VIDEO_AUTO_PAUSE_ISSUES
// https://www.rtcmulticonnection.org/docs/iceServers/
// use your own TURN-server here!
connection.iceServers = [{
'urls': [
'stun:stun.l.google.com:19302',
'stun:stun1.l.google.com:19302',
'stun:stun2.l.google.com:19302',
'stun:stun.l.google.com:19302?transport=udp',
]
}];
connection.videosContainer = document.getElementById('videos-container');
connection.onstream = function(specialEvent) {
// its just event but I was debugging it on the console and wanted to change the
// name to specialEvent
var existing = document.getElementById(specialEvent.streamid);
if(existing && existing.parentNode) {
existing.parentNode.removeChild(existing);
}
// specialEvent.mediaElement.removeAttribute('src');
// specialEvent.mediaElement.removeAttribute('srcObject');
specialEvent.mediaElement.muted = true;
specialEvent.mediaElement.volume = 0;
var video = document.createElement('video');
video.setAttribute("id", "cam_input");
window.video = video;
try {
video.setAttributeNode(document.createAttribute('autoplay'));
video.setAttributeNode(document.createAttribute('playsinline'));
} catch (e) {
video.setAttribute('autoplay', true);
video.setAttribute('playsinline', true);
}
if(specialEvent.type === 'local') {
video.volume = 0;
try {
video.setAttributeNode(document.createAttribute('muted'));
} catch (e) {
video.setAttribute('muted', true);
}
}
video.srcObject = specialEvent.stream;
var width = parseInt(connection.videosContainer.clientWidth / 3) - 20;
var mediaElement = getHTMLMediaElement(video, {
title: specialEvent.userid,
buttons: ['full-screen'],
width: width,
showOnMouseEnter: false
});
connection.videosContainer.appendChild(mediaElement);
setTimeout(function() {
mediaElement.media.play();
}, 5000);
mediaElement.id = specialEvent.streamid;
// to keep room-id in cache
localStorage.setItem(connection.socketMessageEvent, connection.sessionid);
chkRecordConference.parentNode.style.display = 'none';
if(chkRecordConference.checked === true) {
btnStopRecording.style.display = 'inline-block';
recordingStatus.style.display = 'inline-block';
var recorder = connection.recorder;
if(!recorder) {
recorder = RecordRTC([specialEvent.stream], {
type: 'video'
});
recorder.startRecording();
connection.recorder = recorder;
}
else {
recorder.getInternalRecorder().addStreams([specialEvent.stream]);
}
if(!connection.recorder.streams) {
connection.recorder.streams = [];
}
connection.recorder.streams.push(specialEvent.stream);
recordingStatus.innerHTML = 'Recording ' + connection.recorder.streams.length + ' streams';
}
if(specialEvent.type === 'local') {
connection.socket.on('disconnect', function() {
if(!connection.getAllParticipants().length) {
location.reload();
}
});
}
window.specialEvent = specialEvent;
};
// ******************************
//
// OPENCV CODE STARTS FROM HERE
//
// ******************************
function openCvReady() {
// cv['onRuntimeInitialized']=()=>{
// let video = document.getElementById("cam_input"); // video is the id of video tag
// navigator.mediaDevices.getUserMedia({ video: true, audio: false })
// .then(function(stream) {
// console.log(stream);
// video.srcObject = stream;
// video.play();
// })
// .catch(function(err) {
// console.log("An error occurred! " + err);
// });
// let cam_input = document.getElementById("cam_input");
let video = document.getElementById("cam_input");
let src = new cv.Mat(video.height, video.width, cv.CV_8UC4);
let dst = new cv.Mat(video.height, video.width, cv.CV_8UC1);
let gray = new cv.Mat();
let cap = new cv.VideoCapture(cam_input);
let faces = new cv.RectVector();
// if (face == true){
// console.log("face shown");
// }
let classifier = new cv.CascadeClassifier();
let utils = new Utils('errorMessage');
let faceCascadeFile = 'haarcascade_frontalface_default.xml'; // path to xml
utils.createFileFromUrl(faceCascadeFile, faceCascadeFile, () => {
classifier.load(faceCascadeFile); // in the callback, load the cascade from file
});
const FPS = 24;
function processVideo() {
let begin = Date.now();
cap.read(src);
src.copyTo(dst);
cv.cvtColor(dst, gray, cv.COLOR_RGBA2GRAY, 0);
try{
classifier.detectMultiScale(gray, faces, 1.1, 3, 0);
console.log(faces.size());
}catch(err){
console.log(err);
}
for (let i = 0; i < faces.size(); ++i) {
let face = faces.get(i);
console.log("face:");
console.log(face);
let point1 = new cv.Point(face.x, face.y);
let point2 = new cv.Point(face.x + face.width, face.y + face.height);
cv.rectangle(dst, point1, point2, [255, 0, 0, 255]);
}
cv.imshow("canvas_output", dst);
// schedule next one.
let delay = 1000/FPS - (Date.now() - begin);
setTimeout(processVideo, delay);
}
// schedule first one.
setTimeout(processVideo, 0);
// };
}
var recordingStatus = document.getElementById('recording-status');
var chkRecordConference = document.getElementById('record-entire-conference');
var btnStopRecording = document.getElementById('btn-stop-recording');
btnStopRecording.onclick = function() {
var recorder = connection.recorder;
if(!recorder) return alert('No recorder found.');
recorder.stopRecording(function() {
var blob = recorder.getBlob();
invokeSaveAsDialog(blob);
connection.recorder = null;
btnStopRecording.style.display = 'none';
recordingStatus.style.display = 'none';
chkRecordConference.parentNode.style.display = 'inline-block';
});
};
connection.onstreamended = function(event) {
var mediaElement = document.getElementById(event.streamid);
if (mediaElement) {
mediaElement.parentNode.removeChild(mediaElement);
}
};
connection.onMediaError = function(e) {
if (e.message === 'Concurrent mic process limit.') {
if (DetectRTC.audioInputDevices.length <= 1) {
alert('Please select external microphone. Check github issue number 483.');
return;
}
var secondaryMic = DetectRTC.audioInputDevices[1].deviceId;
connection.mediaConstraints.audio = {
deviceId: secondaryMic
};
connection.join(connection.sessionid);
}
};
// ..................................
// ALL below scripts are redundant!!!
// ..................................
function disableInputButtons(enable) {
document.getElementById('room-id').onkeyup();
document.getElementById('open-or-join-room').disabled = !enable;
document.getElementById('open-room').disabled = !enable;
document.getElementById('join-room').disabled = !enable;
document.getElementById('room-id').disabled = !enable;
}
// ......................................................
// ......................Handling Room-ID................
// ......................................................
function showRoomURL(roomid) {
var roomHashURL = '#' + roomid;
var roomQueryStringURL = '?roomid=' + roomid;
var html = '<h2>Unique URL for your room:</h2><br>';
html += 'Hash URL: ' + roomHashURL + '';
html += '<br>';
html += 'QueryString URL: ' + roomQueryStringURL + '';
var roomURLsDiv = document.getElementById('room-urls');
roomURLsDiv.innerHTML = html;
roomURLsDiv.style.display = 'block';
}
(function() {
var params = {},
r = /([^&=]+)=?([^&]*)/g;
function d(s) {
return decodeURIComponent(s.replace(/\+/g, ' '));
}
var match, search = window.location.search;
while (match = r.exec(search.substring(1)))
params[d(match[1])] = d(match[2]);
window.params = params;
})();
var roomid = '';
if (localStorage.getItem(connection.socketMessageEvent)) {
roomid = localStorage.getItem(connection.socketMessageEvent);
} else {
roomid = connection.token();
}
var txtRoomId = document.getElementById('room-id');
txtRoomId.value = roomid;
txtRoomId.onkeyup = txtRoomId.oninput = txtRoomId.onpaste = function() {
localStorage.setItem(connection.socketMessageEvent, document.getElementById('room-id').value);
};
var hashString = location.hash.replace('#', '');
if (hashString.length && hashString.indexOf('comment-') == 0) {
hashString = '';
}
var roomid = params.roomid;
if (!roomid && hashString.length) {
roomid = hashString;
}
if (roomid && roomid.length) {
document.getElementById('room-id').value = roomid;
localStorage.setItem(connection.socketMessageEvent, roomid);
// auto-join-room
(function reCheckRoomPresence() {
connection.checkPresence(roomid, function(isRoomExist) {
if (isRoomExist) {
connection.join(roomid);
return;
}
setTimeout(reCheckRoomPresence, 5000);
});
})();
disableInputButtons();
}
// detect 2G
if(navigator.connection &&
navigator.connection.type === 'cellular' &&
navigator.connection.downlinkMax <= 0.115) {
alert('2G is not supported. Please use a better internet service.');
}
</script>
<footer>
<small id="send-message"></small>
</footer>
<script src="https://www.webrtc-experiment.com/common.js"></script>
</body>
</html>
While setting the remoteDescription , I am getting below error in firefox :
DOMException [InvalidStateError: "Cannot set remote offer in state have-local-offer"
code: 11
nsresult: 0x8053000b
location: http://localhost:8080/resources/assets/js/test-online.js:111]
Please find below my test-online.js code
var localVideo;
var remoteVideo;
var peerConnection;
var serverConnection;
var peerConnectionConfig = {'iceServers': [{'url': 'stun:stun.services.mozilla.com'}, {'url': 'stun:stun.l.google.com:19302'}]};
pageReady();
var offerOptions = {
offerToReceiveAudio: 1,
offerToReceiveVideo: 1
};
var sdpConstraints = {'mandatory': {
'OfferToReceiveAudio':true,
'OfferToReceiveVideo':true }};
function pageReady() {
localVideo = document.getElementById('localVideo');
remoteVideo = document.getElementById('remoteVideo');
localVideo.addEventListener('loadedmetadata', function() {
trace('Local video videoWidth: ' + this.videoWidth +
'px, videoHeight: ' + this.videoHeight + 'px');
});
remoteVideo.addEventListener('loadedmetadata', function() {
trace('Remote video videoWidth: ' + this.videoWidth +
'px, videoHeight: ' + this.videoHeight + 'px');
});
remoteVideo.onresize = function() {
trace('Remote video size changed to ' +
remoteVideo.videoWidth + 'x' + remoteVideo.videoHeight);
// We'll use the first onsize callback as an indication that video has started
// playing out.
if (startTime) {
var elapsedTime = window.performance.now() - startTime;
trace('Setup time: ' + elapsedTime.toFixed(3) + 'ms');
startTime = null;
}
};
serverConnection = new SockJS("/onlineHandler");
serverConnection.onopen = function() {
console.log("Opening server connection");
};
serverConnection.onmessage = gotMessageFromServer;
serverConnection.onclose = function() {
console.log("Closing server connection");
};
//serverConnection.onmessage = gotMessageFromServer;
var constraints = {
video: true,
audio: true,
};
navigator.mediaDevices.getUserMedia(constraints)
.then(getUserMediaSuccess)
.catch(function(e) {
alert('getUserMedia() error: ' + e.name);
});
}
function getUserMediaSuccess(stream) {
trace('Received local stream');
localVideo.srcObject = stream;
localStream = stream;
}
function start(isCaller) {
trace('Starting call');
startTime = window.performance.now();
var videoTracks = localStream.getVideoTracks();
var audioTracks = localStream.getAudioTracks();
if (videoTracks.length > 0) {
trace('Using video device: ' + videoTracks[0].label);
}
if (audioTracks.length > 0) {
trace('Using audio device: ' + audioTracks[0].label);
}
peerConnection = new RTCPeerConnection(peerConnectionConfig);
peerConnection.onicecandidate = gotIceCandidate;
peerConnection.oniceconnectionstatechange = onIceStateChange;
peerConnection.onaddStream = gotRemoteStream;
peerConnection.addStream(localStream);
if(isCaller) {
peerConnection.createOffer(gotDescription, errorHandler , offerOptions);
}
}
function gotMessageFromServer(message) {
/* if(!peerConnection) start(false);
var signal = JSON.parse(message.data);
// console.log("Got Message from server :" + message.data);
if(signal.sdp) {;
console.log("hi in sdp" + message.data);
peerConnection.setRemoteDescription(new RTCSessionDescription(signal.sdp), function() {
console.log("Creating answer :");
if (peerConnection.remoteDescription.type == 'offer')
peerConnection.createAnswer(gotDescription, errorHandler);
}, errorHandler);
} else if(signal.ice) {
peerConnection.addIceCandidate(new RTCIceCandidate(signal.ice));
}*/
var signal = JSON.parse(message.data);
if (signal.type === 'offer') {
peerConnection.setRemoteDescription(new RTCSessionDescription(signal),doAnswer,errorHandler);
} else if (signal.type === 'answer') {
peerConnection.setRemoteDescription(new RTCSessionDescription(signal),doNothing, errorHandler);
} else if (signal.type === 'candidate') {
var candidate = new RTCIceCandidate({
sdpMLineIndex:signal.label,
candidate: signal.candidate
});
peerConnection.addIceCandidate(candidate);
} else if (signal === 'bye' && isStarted) {
handleRemoteHangup();
}
}
function doNothing(){
}
function doAnswer() {
console.log('Sending answer to peer.');
peerConnection.createAnswer(gotDescription, errorHandler, sdpConstraints);
}
function handleRemoteHangup() {
// console.log('Session terminated.');
// stop();
// isInitiator = false;
}
function gotIceCandidate(event) {
if(event.candidate != null) {
var message ={
type: 'candidate',
label: event.candidate.sdpMLineIndex,
id: event.candidate.sdpMid,
candidate: event.candidate.candidate};
// serverConnection.send(JSON.stringify({'ice': event.candidate}));
serverConnection.send(JSON.stringify(message));
}
}
function onIceStateChange(event) {
if (peerConnection) {
trace(' ICE state: ' + peerConnection.iceConnectionState);
console.log('ICE state change event: ', event);
}
}
function gotDescription(description) {
// trace('Offer from peerConnection\n' + description.sdp);
description.sdp = preferOpus(description.sdp);
// pc.setLocalDescription(description);
console.log('setLocalAndSendMessage sending message' , description);
// trace('peerConnection setLocalDescription start');
peerConnection.setLocalDescription(
description,
function () {
serverConnection.send(JSON.stringify(description));
},
onSetSessionDescriptionError
);
}
function preferOpus(sdp) {
var sdpLines = sdp.split('\r\n');
var mLineIndex;
// Search for m line.
for (var i = 0; i < sdpLines.length; i++) {
if (sdpLines[i].search('m=audio') !== -1) {
mLineIndex = i;
break;
}
}
if (mLineIndex === null) {
return sdp;
}
// If Opus is available, set it as the default in m line.
for (i = 0; i < sdpLines.length; i++) {
if (sdpLines[i].search('opus/48000') !== -1) {
var opusPayload = extractSdp(sdpLines[i], /:(\d+) opus\/48000/i);
if (opusPayload) {
sdpLines[mLineIndex] = setDefaultCodec(sdpLines[mLineIndex], opusPayload);
}
break;
}
}
// Remove CN in m line and sdp.
sdpLines = removeCN(sdpLines, mLineIndex);
sdp = sdpLines.join('\r\n');
return sdp;
}
function extractSdp(sdpLine, pattern) {
var result = sdpLine.match(pattern);
return result && result.length === 2 ? result[1] : null;
}
// Set the selected codec to the first in m line.
function setDefaultCodec(mLine, payload) {
var elements = mLine.split(' ');
var newLine = [];
var index = 0;
for (var i = 0; i < elements.length; i++) {
if (index === 3) { // Format of media starts from the fourth.
newLine[index++] = payload; // Put target payload to the first.
}
if (elements[i] !== payload) {
newLine[index++] = elements[i];
}
}
return newLine.join(' ');
}
// Strip CN from sdp before CN constraints is ready.
function removeCN(sdpLines, mLineIndex) {
var mLineElements = sdpLines[mLineIndex].split(' ');
// Scan from end for the convenience of removing an item.
for (var i = sdpLines.length-1; i >= 0; i--) {
var payload = extractSdp(sdpLines[i], /a=rtpmap:(\d+) CN\/\d+/i);
if (payload) {
var cnPos = mLineElements.indexOf(payload);
if (cnPos !== -1) {
// Remove CN payload from m line.
mLineElements.splice(cnPos, 1);
}
// Remove CN line in sdp
sdpLines.splice(i, 1);
}
}
sdpLines[mLineIndex] = mLineElements.join(' ');
return sdpLines;
}
function onSetSessionDescriptionError(error) {
trace('Failed to set session description: ' + error.toString());
}
function gotRemoteStream(event) {
remoteVideo.srcObject = event.stream;
trace('Received remote stream');
}
function errorHandler(error) {
console.log(error);
}
And my html code is below :
<%# taglib uri="http://java.sun.com/jsp/jstl/core" prefix="c"%>
<%# taglib uri="http://www.springframework.org/tags/form" prefix="form"%>
<html lang="en">
<head>
<link
href="//netdna.bootstrapcdn.com/font-awesome/4.0.3/css/font-awesome.css"
rel="stylesheet">
<!-- Meta tag to not followed by search engine. -->
<meta name="robots" content="noindex,nofollow,nosnippet,noodp,noarchive">
<meta name="keywords" content="JavaScript, WebRTC" />
<meta name="description" content="WebRTC codelab" />
<meta name="viewport" content="width=device-width,initial-scale=1,minimum-scale=1,maximum-scale=1">
<style>
video {
filter: hue-rotate(180deg) saturate(200%);
-moz-filter: hue-rotate(180deg) saturate(200%);
-webkit-filter: hue-rotate(180deg) saturate(200%);
/*-webkit-filter: grayscale(0.3) hue-rotate(360deg) saturate(10) opacity(0.7) sepia(0.5); */
}
</style>
</head>
<body>
<div id='videos'>
<video id='localVideo' autoplay muted></video>
<video id='remoteVideo' autoplay></video>
</div>
<input type="button" id="start" onclick="start(true)" value="Start Video"></input>
<script src="//cdn.jsdelivr.net/sockjs/1.0.0/sockjs.min.js"></script>
<script
src="${pageContext.request.contextPath}/resources/assets/js/jquery-2.1.1.min.js"></script>
<script
src="${pageContext.request.contextPath}/resources/assets/js/bootstrap.min.js"></script>
<script src ="${pageContext.request.contextPath}/resources/assets/js/adapter-0.2.10.js"></script>
<script src="${pageContext.request.contextPath}/resources/assets/js/test-online.js"></script>
</body>
</html>
I am not able to understand what I am doing wrong here.I am still a novice in webrtc field juts want to run this basic thing in my code.
The offer-answer exchange is a state machine, and some methods are disallowed in certain states.
Calling setLocalDescription (or setRemoteDescription) changes the signaling state to "have-local-offer" (or "have-remote-offer").
At that point, it is the application's job to bring the state back to stable or closed, as described in the spec.
For instance, it is the application's responsibility to handle glare (which is where both sides send an offer at the same time).
There is also a bug in Firefox that it doesn't allow you to call createOffer or setLocalDescription again once in have-local-offer and vice versa for the answer (the little hoops in the state diagram in the spec linked above). But it doesn't sound from your error message like you're hitting that.
This code works in google chrome fine ,
but i tried to convert it to support Firefox
and i always get no error in the console .
the cam it seems ruining but there's no video.
here's the script
var socket = new WebSocket('ws://127.0.0.1:1338/'); // change the IP address to your websocket server
var stunServer = "stun.l.google.com:19302";
var sourcevid = document.getElementById('sourcevid');
var remotevid = document.getElementById('remotevid');
var localStream = null;
var remoteStream;
var peerConn = null;
var started = false;
var isRTCPeerConnection = true;
var mediaConstraints = {mandatory: {
OfferToReceiveAudio:true,
OfferToReceiveVideo:true }};
var logg = function(s) { console.log(s); };
// send the message to websocket server
function sendMessage(message) {
var mymsg = JSON.stringify(message);
logg("SEND: " + mymsg);
socket.send(mymsg);
}
function createPeerConnection() {
try {
logg("Creating peer connection");
var servers = [];
servers.push({'url':'stun:' + stunServer});
var pc_config = {'iceServers':servers};
peerConn = new webkitRTCPeerConnection(pc_config);
peerConn.onicecandidate = onIceCandidate;
} catch (e) {
try {
peerConn = new RTCPeerConnection('STUN ' + stunServer, onIceCandidate00);
isRTCPeerConnection = false;
} catch (e) {
logg("Failed to create PeerConnection, exception: " + e.message);
}
}
peerConn.onaddstream = onRemoteStreamAdded;
peerConn.onremovestream = onRemoteStreamRemoved;
}
// when remote adds a stream, hand it on to the local video element
function onRemoteStreamAdded(event) {
logg("Added remote stream");
remotevid.src = window.webkitURL.createObjectURL(event.stream);
}
function waitForRemoteVideo() {
if (remoteStream.videoTracks.length === 0 || remotevid.currentTime > 0) {
transitionToActive();
} else {
setTimeout(waitForRemoteVideo, 100);
}
}
function transitionToActive() {
remotevid.style.opacity = 1;
card.style.webkitTransform = "rotateY(180deg)";
setTimeout(function() { sourcevid.src = ""; }, 500);
setStatus("<input type=\"button\" id=\"hangup\" value=\"Hang up\" onclick=\"onHangup()\" />");
}
// when remote removes a stream, remove it from the local video element
function onRemoteStreamRemoved(event) {
logg("Remove remote stream");
remotevid.src = "";
}
function onIceCandidate(event) {
if (event.candidate) {
sendMessage({type: 'candidate',
label: event.candidate.sdpMLineIndex,
id: event.candidate.sdpMid,
candidate: event.candidate.candidate});
} else {
logg("End of candidates.");
}
}
function onIceCandidate00(candidate, moreToFollow) {
if (candidate) {
sendMessage({type: 'candidate', label: candidate.label, candidate: candidate.toSdp()});
}
if (!moreToFollow) {
logg("End of candidates.");
}
}
// start the connection upon user request
function connect() {
if (!started && localStream) {
console.log("Creating PeerConnection.");
createPeerConnection();
logg('Adding local stream...');
peerConn.addStream(localStream);
started = true;
logg("isRTCPeerConnection: " + isRTCPeerConnection);
//create offer
if (isRTCPeerConnection) {
peerConn.createOffer(setLocalAndSendMessage, null, mediaConstraints);
} else {
var offer = peerConn.createOffer(mediaConstraints);
peerConn.setLocalDescription(peerConn.SDP_OFFER, offer);
sendMessage({type: 'offer', sdp: offer.toSdp()});
peerConn.startIce();
}
} else {
alert("Local stream not running yet.");
}
}
// accept connection request
socket.addEventListener("message", onMessage, false);
function onMessage(evt) {
logg("RECEIVED: " + evt.data);
if (isRTCPeerConnection)
processSignalingMessage(evt.data);
else
processSignalingMessage00(evt.data);
}
function processSignalingMessage(message) {
var msg = JSON.parse(message);
if (msg.type === 'offer') {
if (!started && localStream) {
createPeerConnection();
logg('Adding local stream...');
peerConn.addStream(localStream);
started = true;
logg("isRTCPeerConnection: " + isRTCPeerConnection);
if (isRTCPeerConnection) {
//set remote description
peerConn.setRemoteDescription(new RTCSessionDescription(msg));
//create answer
console.log("Sending answer to peer.");
peerConn.createAnswer(setLocalAndSendMessage, null, mediaConstraints);
} else {
//set remote description
peerConn.setRemoteDescription(peerConn.SDP_OFFER, new SessionDescription(msg.sdp));
//create answer
var offer = peerConn.remoteDescription;
var answer = peerConn.createAnswer(offer.toSdp(), mediaConstraints);
console.log("Sending answer to peer.");
setLocalAndSendMessage00(answer);
}
}
} else if (msg.type === 'answer' && started) {
peerConn.setRemoteDescription(new RTCSessionDescription(msg));
} else if (msg.type === 'candidate' && started) {
var candidate = new RTCIceCandidate({sdpMLineIndex:msg.label, candidate:msg.candidate});
peerConn.addIceCandidate(candidate);
} else if (msg.type == 'chat'){
addChatMsg(msg.nick, msg.cid, msg.data);
}
else if (msg.type === 'bye' && started) {
onRemoteHangUp();
}
}
function processSignalingMessage00(message) {
var msg = JSON.parse(message);
// if (msg.type === 'offer') --> will never happened since isRTCPeerConnection=true initially
if (msg.type === 'answer' && started) {
peerConn.setRemoteDescription(peerConn.SDP_ANSWER, new SessionDescription(msg.sdp));
} else if (msg.type === 'candidate' && started) {
var candidate = new IceCandidate(msg.label, msg.candidate);
peerConn.processIceMessage(candidate);
} else if (msg.type === 'bye' && started) {
onRemoteHangUp();
}
}
function setLocalAndSendMessage(sessionDescription) {
peerConn.setLocalDescription(sessionDescription);
sendMessage(sessionDescription);
}
function setLocalAndSendMessage00(answer) {
peerConn.setLocalDescription(peerConn.SDP_ANSWER, answer);
sendMessage({type: 'answer', sdp: answer.toSdp()});
peerConn.startIce();
}
function onRemoteHangUp() {
logg("Remote Hang up.");
closeSession();
}
function onHangUp() {
logg("Hang up.");
if (started) {
sendMessage({type: 'bye'});
closeSession();
}
}
function closeSession() {
peerConn.close();
peerConn = null;
started = false;
remotevid.src = "";
}
window.onbeforeunload = function() {
if (started) {
sendMessage({type: 'bye'});
}
}
function startVideo() {
// Replace the source of the video element with the stream from the camera
if (navigator.mozGetUserMedia) {
try {
navigator.mozGetUserMedia({audio: true, video: true}, successCallback, errorCallback);
} catch (e) {
navigator.mozGetUserMedia("video,audio", successCallback, errorCallback);
}
}
else {
try {
navigator.webkitGetUserMedia({audio: true, video: true}, successCallback, errorCallback);
} catch (e) {
navigator.webkitGetUserMedia("video,audio", successCallback, errorCallback);
}
}
function successCallback(stream) {
if (navigator.mozGetUserMedia) {
sourcevid.mozSrcObject = stream;
sourcevid.style.webkitTransform = "rotateY(180deg)";
localStream = stream;
}
if(navigator.webkitGetUserMedia){
sourcevid.src = window.webkitURL.createObjectURL(stream);
sourcevid.style.webkitTransform = "rotateY(180deg)";
localStream = stream;
}
}
function errorCallback(error) {
logg('An error occurred: [CODE ' + error.code + ']');
}
}
function stopVideo() {
sourcevid.src = "";
}
and here is the html
<script type="text/javascript" src="{{ asset('bundles/PFESiivt/js/visio.js') }}"></script>
<div id="main">
<div id="" style="height:280px;width:700;">
<div id="livevideodivk" style="float:left;">
<video id="sourcevid" style="height:280px;width:320px;" autoplay></video>
</div>
<div id="remotevideodivk" style="float:left;margin-left:10px">
<video id="remotevid" style="height:280px;width:320px;" autoplay></video>
</div>
</div>
<center>
<button id="btn" type="button" onclick="startVideo();">Start video</button>
<button id="btn" type="button" onclick="stopVideo();">Stop video</button>
<button id="btn" type="button" onclick="connect();">Connect</button>
<button id="btn" type="button" onclick="onHangUp();">Hang Up</button>
</center>
</div>
Have not gone through the complete code, but for starters...
for firefox it is mozRTCPeerConnection not RTCPeerConnection.
secondly, for firefox PeerConnection object, the onicecandidate handler is missing.
P. S: I think it is a very bad idea to post complete code, would advice to do bit of debugging yourself( to identify the block causing issue) and then post the relevant block when not able to solve it.
I am trying to render a pdf in chrome using PDFJS
This is the function I am calling:
open: function pdfViewOpen(url, scale, password) {
var parameters = {password: password};
if (typeof url === 'string') { // URL
this.setTitleUsingUrl(url);
parameters.url = url;
} else if (url && 'byteLength' in url) { // ArrayBuffer
parameters.data = url;
}
if (!PDFView.loadingBar) {
PDFView.loadingBar = new ProgressBar('#loadingBar', {});
}
this.pdfDocument = null;
var self = this;
self.loading = true;
getDocument(parameters).then(
function getDocumentCallback(pdfDocument) {
self.load(pdfDocument, scale);
self.loading = false;
},
function getDocumentError(message, exception) {
if (exception && exception.name === 'PasswordException') {
if (exception.code === 'needpassword') {
var promptString = mozL10n.get('request_password', null,
'PDF is protected by a password:');
password = prompt(promptString);
if (password && password.length > 0) {
return PDFView.open(url, scale, password);
}
}
}
var loadingErrorMessage = mozL10n.get('loading_error', null,
'An error occurred while loading the PDF.');
if (exception && exception.name === 'InvalidPDFException') {
// change error message also for other builds
var loadingErrorMessage = mozL10n.get('invalid_file_error', null,
'Invalid or corrupted PDF file.');
//#if B2G
// window.alert(loadingErrorMessage);
// return window.close();
//#endif
}
var loadingIndicator = document.getElementById('loading');
loadingIndicator.textContent = mozL10n.get('loading_error_indicator',
null, 'Error');
var moreInfo = {
message: message
};
self.error(loadingErrorMessage, moreInfo);
self.loading = false;
},
function getDocumentProgress(progressData) {
self.progress(progressData.loaded / progressData.total);
}
);
}
This is the call:
PDFView.open('/MyPDFs/Pdf2.pdf', 'auto', null);
All I get is this:
If you notice, even the page number is retrieved but the content is not painted in the pages. CanĀ“t find why.. Is the any other function I should call next to PDFView.open?
Found the solution...!
This is the code that does the work.
$(document).ready(function () {
PDFView.initialize();
var params = PDFView.parseQueryString(document.location.search.substring(1));
//#if !(FIREFOX || MOZCENTRAL)
var file = params.file || DEFAULT_URL;
//#else
//var file = window.location.toString()
//#endif
//#if !(FIREFOX || MOZCENTRAL)
if (!window.File || !window.FileReader || !window.FileList || !window.Blob) {
document.getElementById('openFile').setAttribute('hidden', 'true');
} else {
document.getElementById('fileInput').value = null;
}
//#else
//document.getElementById('openFile').setAttribute('hidden', 'true');
//#endif
// Special debugging flags in the hash section of the URL.
var hash = document.location.hash.substring(1);
var hashParams = PDFView.parseQueryString(hash);
if ('disableWorker' in hashParams)
PDFJS.disableWorker = (hashParams['disableWorker'] === 'true');
//#if !(FIREFOX || MOZCENTRAL)
var locale = navigator.language;
if ('locale' in hashParams)
locale = hashParams['locale'];
mozL10n.setLanguage(locale);
//#endif
if ('textLayer' in hashParams) {
switch (hashParams['textLayer']) {
case 'off':
PDFJS.disableTextLayer = true;
break;
case 'visible':
case 'shadow':
case 'hover':
var viewer = document.getElementById('viewer');
viewer.classList.add('textLayer-' + hashParams['textLayer']);
break;
}
}
//#if !(FIREFOX || MOZCENTRAL)
if ('pdfBug' in hashParams) {
//#else
//if ('pdfBug' in hashParams && FirefoxCom.requestSync('pdfBugEnabled')) {
//#endif
PDFJS.pdfBug = true;
var pdfBug = hashParams['pdfBug'];
var enabled = pdfBug.split(',');
PDFBug.enable(enabled);
PDFBug.init();
}
if (!PDFView.supportsPrinting) {
document.getElementById('print').classList.add('hidden');
}
if (!PDFView.supportsFullscreen) {
document.getElementById('fullscreen').classList.add('hidden');
}
if (PDFView.supportsIntegratedFind) {
document.querySelector('#viewFind').classList.add('hidden');
}
// Listen for warnings to trigger the fallback UI. Errors should be caught
// and call PDFView.error() so we don't need to listen for those.
PDFJS.LogManager.addLogger({
warn: function () {
PDFView.fallback();
}
});
var mainContainer = document.getElementById('mainContainer');
var outerContainer = document.getElementById('outerContainer');
mainContainer.addEventListener('transitionend', function (e) {
if (e.target == mainContainer) {
var event = document.createEvent('UIEvents');
event.initUIEvent('resize', false, false, window, 0);
window.dispatchEvent(event);
outerContainer.classList.remove('sidebarMoving');
}
}, true);
document.getElementById('sidebarToggle').addEventListener('click',
function () {
this.classList.toggle('toggled');
outerContainer.classList.add('sidebarMoving');
outerContainer.classList.toggle('sidebarOpen');
PDFView.sidebarOpen = outerContainer.classList.contains('sidebarOpen');
PDFView.renderHighestPriority();
});
document.getElementById('viewThumbnail').addEventListener('click',
function () {
PDFView.switchSidebarView('thumbs');
});
document.getElementById('viewOutline').addEventListener('click',
function () {
PDFView.switchSidebarView('outline');
});
document.getElementById('previous').addEventListener('click',
function () {
PDFView.page--;
});
document.getElementById('next').addEventListener('click',
function () {
PDFView.page++;
});
document.querySelector('.zoomIn').addEventListener('click',
function () {
PDFView.zoomIn();
});
document.querySelector('.zoomOut').addEventListener('click',
function () {
PDFView.zoomOut();
});
document.getElementById('fullscreen').addEventListener('click',
function () {
PDFView.fullscreen();
});
document.getElementById('openFile').addEventListener('click',
function () {
document.getElementById('fileInput').click();
});
document.getElementById('print').addEventListener('click',
function () {
window.print();
});
document.getElementById('download').addEventListener('click',
function () {
PDFView.download();
});
document.getElementById('pageNumber').addEventListener('change',
function () {
PDFView.page = this.value;
});
document.getElementById('scaleSelect').addEventListener('change',
function () {
PDFView.parseScale(this.value);
});
document.getElementById('first_page').addEventListener('click',
function () {
PDFView.page = 1;
});
document.getElementById('last_page').addEventListener('click',
function () {
PDFView.page = PDFView.pdfDocument.numPages;
});
document.getElementById('page_rotate_ccw').addEventListener('click',
function () {
PDFView.rotatePages(-90);
});
document.getElementById('page_rotate_cw').addEventListener('click',
function () {
PDFView.rotatePages(90);
});
//#if (FIREFOX || MOZCENTRAL)
//if (FirefoxCom.requestSync('getLoadingType') == 'passive') {
// PDFView.setTitleUsingUrl(file);
// PDFView.initPassiveLoading();
// return;
//}
//#endif
//#if !B2G
PDFView.open(file, 0);
//#endif
});
The system must be initialized first before PDFView.open call!
Thanks