When sending a message using WebRTC sendDirectlyToAll, the message is never recieved the first time, but every time after that.
I've stripped the code down to a very simple state now, but it's still the same. Anyone got a clue about why this is happening?
Here is the code:
var webrtc = new SimpleWebRTC({
localVideoEl: 'localVideo',
remoteVideosEl: 'remoteVideos',
autoRequestMedia: false,
media: {
video: true,
audio: false
},
localVideo: {
autoplay: true,
mirror: true,
muted: true
}
});
$("#chat-send-button").on("click", function (e) {
sendMessage();
});
function sendMessage() {
console.log("sendMessage");
const chatMessage = $("#chat-message-input");
webrtc.sendDirectlyToAll(
"chat",
"info", {
"chatmessage": chatMessage.val()
}
)
chatMessage.val("");
}
webrtc.on("channelMessage", function (peer, channel, data) {
console.log(peer);
console.log(channel);
console.log("data", data);
$("#chat-message-container").text(data.payload.chatmessage);
});
You probably need for the WebRTC connection to be established before allowing the user to send a message - do you make use of the readyToCall event described in the documentation https://github.com/SimpleWebRTC/SimpleWebRTC#3-tell-it-to-join-a-room-when-ready
(a link to an editable runable code snippet might help)
Related
So, basically I'm trying to receive a call from provider to my app. For that purpose Quickblox gives us a listener to receive the upcoming calls onCallListener. So here is my code snippet that should work but doesn't.
const calleesIds = [4104]
const sessionType = QB.webrtc.CallType.VIDEO
const additionalOptions = {}
let callSession = QB.webrtc.createNewSession(calleesIds, sessionType, null, additionalOptions)
console.log(callSession, "SESSION")
const mediaParams = {
audio: true,
video: true,
options: {
muted: true,
mirror: true,
},
elemId: "myVideoStream"
}
QB.webrtc.onCallListener = function(session: any, extension: object) {
callSession = session
console.log('asdasd')
// if you are going to take a call
session.getUserMedia(mediaParams, function (error: object, stream: object) {
if (error) {
console.error(error)
} else {
session.accept(extension)
session.attachMediaStream("videoStream", stream)
}
})
}
P.S. I also integrated chat which works perfect!
Found the solution by myself! Whenever you create a user and dialog id, search that user in the quickblox dashboard by the dialogId and change its settings: you will see that userId and providerId is the same which is wrong. So put your userId in the userId field and save that. After that you video calling listeners will work fine!)
P. S. also in the backend replace provider token with user token.
I have a Pubnub instance,
I want to know how to handle reconnection when internet does down and comes back up with like a given number of retries? The documentation definitely gives the appropriate docs but I am unable to put it into code.
Help would be greatly appreciated.
my code:
this.pubnub = new PubNub({
subscribeKey: this.serverDetails.authInfo.subscribeKey,
authKey: this.serverDetails.authInfo.authKey,
uuid,
restore: true,
ssl: true
});
this.listeners = {
message: msgEvent => {
console.log(msgEvent);
},
status: statusEvent => {
}
};
this.pubnub.addListener(this.listeners);
Set restore:true in your init code.
this.pubnub = new PubNub({
subscribeKey: this.serverDetails.authInfo.subscribeKey,
authKey: this.serverDetails.authInfo.authKey,
uuid,
ssl: true,
restore: true // this allows reconnect to restore your channel subscription
});
Im making an audio transcripter script for my webpage and i want it to stop listening the user when it capture silence, i can capture the silence, but i cant delete or turn off the event 'stopped_speaking'. Is there a way to do it? if its not, how can i resolve my problem? thx.
here my code:
function bg_startRecording() {
navigator.mediaDevices.getUserMedia({
video: false,
audio: true
}).then(async function(stream) {
recorder = RecordRTC(stream, {
type: 'audio',
mimeType: 'audio/wav',
recorderType: StereoAudioRecorder,
disableLogs: false,
numberOfAudioChannels: 1,
});
recorder.startRecording();
var options = {};
speechEvents = hark(stream, options);
speechEvents.on('stopped_speaking', function() {
speechEvents.off('stopped_speaking'); <----- throw an error
bg_stopRecording() //function to stop recording and transcript
});
})
};
The error:
main.js:49 Uncaught TypeError: speechEvents.off is not a function
at Object.stopped_speaking (main.js:49)
at harker.emit (hark.js:16)
at hark.js:109
I've come across the SimpleWebRTC package. Trying to get it to work, but can't seem to get the remote stream coming through. I'm also using Pusher for signalling, rather than the default that comes with SimpleWebRTC.
I've set up my own connection:
var myConnection = {
pusher: new Pusher('mypusherkey', { cluster: 'ap1' } ),
channel: null,
on: function (event, callback) {
this.pusher.bind (event, callback);
},
emit: function () {
if (arguments.length == 1) {
if (arguments[0] === "join") {
this.channel = this.pusher.subscribe(arguments[1]);
}
}
else
this.channel.trigger(arguments);
},
getSessionId: function() {
return this.pusher.connection.socket_id;
},
disconnect: function() {
this.pusher.disconnect();
}
};
Then I have the SimpleWebRTC initialisation:
var webrtc = new SimpleWebRTC({
// the id/element dom element that will hold "our" video
localVideoEl: 'localVideo',
// the id/element dom element that will hold remote videos
remoteVideosEl: 'remotesVideos',
// immediately ask for camera access
autoRequestMedia: true,
debug: true,
connection: myConnection
});
// we have to wait until it's ready
webrtc.on('readyToCall', function () {
console.log('ready to join');
// you can name it anything
webrtc.joinRoom('test-video-chat');
});
Doing a simple test between 2 PCs, it's not setting up the remote stream. In the dev console apart from the intitial event hook ups, I'm not seeing any other activity happening, especially SimpleWebRTC "readyToCall" not firing.
you probably need to emit a 'connect' signal from your socket adapter to trigger this code
I am trying to set the getusermedia video constraints like setting min/max frame-rates and resolutions etc... in my peer.js webrtc application which is a simple peer to peer chat application. I have being trying to integrate it into my application but it seems to break it.Any help would be greatly appreciated other online tutorials look different to my app set up. Down at function 1 is where I have been trying to set the constraints it just doesn't show the video anymore. Is this the correct place?
Also will these constraints work on a video-file playing instead of the webcam?. I am using the Google chrome flags that plays a video file instead of a camera.
navigator.getWebcam = (navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
// PeerJS object ** FOR PRODUCTION, GET YOUR OWN KEY at http://peerjs.com/peerserver **
var peer = new Peer({
key: 'XXXXXXXXXXXXXXXX',
debug: 3,
config: {
'iceServers': [{
url: 'stun:stun.l.google.com:19302'
}, {
url: 'stun:stun1.l.google.com:19302'
}, {
url: 'turn:numb.viagenie.ca',
username: "XXXXXXXXXXXXXXXXXXXXXXXXX",
credential: "XXXXXXXXXXXXXXXXX"
}]
}
});
// On open, set the peer id so when peer is on we display our peer id as text
peer.on('open', function(){
$('#my-id').text(peer.id);
});
peer.on('call', function(call) {
// Answer automatically for demo
call.answer(window.localStream);
step3(call);
});
// Click handlers setup
$(function() {
$('#make-call').click(function() {
//Initiate a call!
var call = peer.call($('#callto-id').val(), window.localStream);
step3(call);
});
$('end-call').click(function() {
window.existingCall.close();
step2();
});
// Retry if getUserMedia fails
$('#step1-retry').click(function() {
$('#step1-error').hide();
step();
});
// Get things started
step1();
});
function step1() {
//Get audio/video stream
navigator.getWebcam({audio: true, video: true}, function(stream){
// Display the video stream in the video object
$('#my-video').prop('src', URL.createObjectURL(stream));
// Displays error
window.localStream = stream;
step2();
}, function(){ $('#step1-error').show(); });
}
function step2() { //Adjust the UI
$('#step1', '#step3').hide();
$('#step2').show();
}
function step3(call) {
// Hang up on an existing call if present
if (window.existingCall) {
window.existingCall.close();
}
// Wait for stream on the call, then setup peer video
call.on('stream', function(stream) {
$('#their-video').prop('src', URL.createObjectURL(stream));
});
$('#step1', '#step2').hide();
$('#step3').show();
}
Your JavaScript looks invalid. You can't declare a var inside a function argument list. Did you paste wrong? Try:
var constraints = {
audio: false,
video: { mandatory: { minWidth: 1280, minHeight: 720 } }
};
navigator.getWebcam(constraints, function(stream){ etc. }
Now it's valid JavaScript at least. I'm not familiar with PeerJS, but the constraints you're using look like the Chrome ones, so if you're on Chrome then hopefully they'll work, unless PeerJS does it differently for some reason.
Your subject says "WebRTC Camera constraints" so I should mention that the Chrome constraints are non-standard. See this answer for an explanation.