How could I go about implementing 'video boxes' for members of the call (cabin) using Peer and Socket.io? - javascript

In what way could I implement the idea of video boxes for all members of the cabin; all of which would include their video stream and their username. My current code for 'cabin.js is below, however it doesn't seem to work and I cannot figure it out at all. Also, I am using Express and Node.js for my backend. Cheers!
const socket = io('/')
const videoGrid = document.getElementById('video-grid')
const peer = new Peer(USER_ID)
const myVideoDiv = document.createElement('div')
myVideoDiv.classList.add("container")
const userText = document.createElement('h3')
userText.innerText = "You"
const userTextDiv = document.createElement('div').appendChild(userText)
const myVideo = document.createElement('video')
myVideoDiv.append(userTextDiv, myVideo)
myVideo.muted = true
const peers = {}
navigator.mediaDevices.getUserMedia({
video: true,
audio: true
}).then(stream => {
addVideoStream(myVideo, myVideoDiv, stream)
peer.on('call', call => {
call.answer(stream, username)
const videoDiv = document.createElement('div')
videoDiv.classList.add("container")
const userText = document.createElement('h3')
userText.innerText = username
const userTextDiv = document.createElement('div').appendChild(userText)
const video = document.createElement('video')
videoDiv.append(userTextDiv, myVideo)
call.on('stream', newVideoStream => {
addVideoStream(video, videoDiv, newVideoStream)
})
})
socket.on('user-connected', (arg) => {
connectNewMember(arg['userId'], arg['username'], stream)
console.log(arg['userId'] + "(", arg['username'] + ")" + " joined.")
})
})
socket.on('user-disconnected', userId => {
if (peers[userId]) peers[userId].close()
})
peer.on('open', id => {
socket.emit('join-cabin', CABIN_ID, id)
})
function connectNewMember(userId, username, stream) {
const call = peer.call(userId, username, stream)
const videoDiv = document.createElement('div')
videoDiv.classList.add("container")
const userText = document.createElement('h3')
userText.innerText = username
const userTextDiv = document.createElement('div').appendChild(userText)
const video = document.createElement('video')
videoDiv.append(userTextDiv, myVideo)
call.on('stream', newVideoStream => {
addVideoStream(video, videoDiv, newVideoStream)
})
call.on('close', () => {
videoDiv.remove()
})
peers[userId] = call
}
function addVideoStream(video, container, stream) {
video.srcObject = stream
video.addEventListener('loadedmetadata', () => {
video.play()
})
videoGrid.append(container)
}
I am extremely new to the concept of 'video-calling applications' and any help would be greatly appreciated. Thanks!
Edit:
Forgot to mention; I am following Web Dev Simplified's Zoom Clone App tutorial, albeit with modifications.

Related

Screen share to the connected peer with webRTC

I am exploring webRTC and trying to build a zoom alike web application. I have successfully sharing screen and showing it my device but I could show the shared screen to the peer networks. I want to share my screen with everyone who are connected to the same room. I don't know how to do that. Can anyone help me with the issue?
Below is the JavaScript code:
Script.js
const socket = io('/')
const videoGrid = document.getElementById('video-grid')
const myPeer = new Peer(undefined, {
host: '/',
port: '8000'
})
const myVideo = document.createElement('video')
myVideo.muted = true
const peers = {}
let videoStream
navigator.mediaDevices.getUserMedia({
video: true,
audio: true
}).then(stream => {
videoStream = stream
addVideoStream(myVideo, stream)
myPeer.on('call', call => {
call.answer(stream)
const video = document.createElement('video')
call.on('stream', userVideoStream => {
addVideoStream(video, userVideoStream)
})
})
socket.on('user-connected', userId => {
connectToNewUser(userId, stream)
})
socket.on('user-disconnected', userId => {
if (peers[userId]) peers[userId].close()
})
})
myPeer.on('open', id => {
socket.emit('join-room', ROOM_ID, id)
})
function connectToNewUser(userId, stream) {
const call = myPeer.call(userId, stream)
const video = document.createElement('video')
call.on('stream', userVideoStream => {
addVideoStream(video, userVideoStream)
})
call.on('close', () => {
video.remove()
})
peers[userId] = call
}
function addVideoStream(video, stream) {
video.srcObject = stream
video.addEventListener('loadedmetadata', () => {
video.play()
})
videoGrid.append(video)
}
//====================================== Front-end styling logics =================================
const audio = document.getElementById('audio')
const audio_mute = document.getElementById('audio-mute')
const video = document.getElementById('video')
const video_mute = document.getElementById('video-mute')
const screen_share = document.getElementById('screen-share')
const record = document.getElementById('record')
const record_stop = document.getElementById('record-stop')
const leave_btn = document.getElementById('leave-btn')
const message_view_box = document.getElementById('message-view-box')
audio.addEventListener('click', function () {
const track = videoStream.getAudioTracks()[0].enabled
console.log(videoStream.getAudioTracks());
if (track) {
videoStream.getAudioTracks()[0].enabled = false
}
else {
videoStream.getAudioTracks()[0].enabled = true
}
audio.style.display = 'none';
audio_mute.style.display = 'inline-block';
})
audio_mute.addEventListener('click', function () {
const track = videoStream.getAudioTracks()[0].enabled
console.log(videoStream.getAudioTracks());
if (track) {
videoStream.getAudioTracks()[0].enabled = false
}
else {
videoStream.getAudioTracks()[0].enabled = true
}
audio_mute.style.display = 'none';
audio.style.display = 'inline-block';
})
video.addEventListener('click', function () {
const track = videoStream.getVideoTracks()[0].enabled
console.log(videoStream.getVideoTracks()[0].enabled);
if (track) {
videoStream.getVideoTracks()[0].enabled = false
}
else {
videoStream.getVideoTracks()[0].enabled = true
}
video.style.display = 'none';
video_mute.style.display = 'inline-block';
})
video_mute.addEventListener('click', function () {
const track = videoStream.getVideoTracks()[0].enabled
console.log(videoStream.getVideoTracks()[0].enabled);
if (track) {
videoStream.getVideoTracks()[0].enabled = false
}
else {
videoStream.getVideoTracks()[0].enabled = true
}
video_mute.style.display = 'none';
video.style.display = 'inline-block';
})
// ============================= Chat box logics ===============================
let chat_box = document.getElementById('chat_box');
let chat_box_input = document.getElementById('chat_box_input');
let send_icon = document.getElementById('send-icon');
chat_box.addEventListener('submit', function (e) {
e.preventDefault()
// console.log(e.target.chat_box_input.value);
if (chat_box_input.value) {
socket.emit('chat message', chat_box_input.value);
chat_box_input.value = '';
}
// e.target.chat_box_input.value = ''
})
socket.on('chat message', function (msg) {
const item = document.createElement('li');
item.textContent = msg;
message_view_box.appendChild(item);
message_view_box.scrollTop = message_view_box.scrollHeight - message_view_box.clientHeight;
});
// =============================================================================
//================================== Screen share logics =======================
const videoElem = document.getElementById('screen')
screen_share.addEventListener('click',async function () {
startCapture();
})
// Options for getDisplayMedia()
const displayMediaOptions = {
video: {
cursor: "always"
},
audio: true
};
async function startCapture() {
try {
videoElem.srcObject = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
// const videoStreamTrack = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
// call.peerConnection.getSenders()[1].replaceTrack(videoStreamTrack)
dumpOptionsInfo();
} catch (err) {
console.error(`Error: ${err}`);
}
}
function dumpOptionsInfo() {
const videoTrack = videoElem.srcObject.getVideoTracks()[0];
}
//==============================================================================
//==============================================================================
record.addEventListener('click', async function () {
record.style.display = 'none';
record_stop.style.display = 'inline-block';
let stream = await navigator.mediaDevices.getDisplayMedia({
video: true,
audio: true
})
//needed for better browser support
const mime = MediaRecorder.isTypeSupported("video/webm; codecs=vp9")
? "video/webm; codecs=vp9"
: "video/webm"
let mediaRecorder = new MediaRecorder(stream, {
mimeType: mime
})
let chunks = []
mediaRecorder.addEventListener('dataavailable', function(e) {
chunks.push(e.data)
})
mediaRecorder.addEventListener('stop', function(){
let blob = new Blob(chunks, {
type: chunks[0].type
})
let url = URL.createObjectURL(blob)
// let video = document.querySelector("video")
// video.src = url
let a = document.createElement('a')
a.href = url
a.download = 'video.webm'
a.click()
})
//we have to start the recorder manually
mediaRecorder.start()
})
record_stop.addEventListener('click', function () {
record_stop.style.display = 'none';
record.style.display = 'inline-block';
mediaRecorder.stop();
})
//==============================================================================
screen sharing functionality below
//================================== Screen share logics =======================
const videoElem = document.getElementById('screen')
screen_share.addEventListener('click',async function () {
startCapture();
})
// Options for getDisplayMedia()
const displayMediaOptions = {
video: {
cursor: "always"
},
audio: true
};
async function startCapture() {
try {
videoElem.srcObject = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
// const videoStreamTrack = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
// call.peerConnection.getSenders()[1].replaceTrack(videoStreamTrack)
dumpOptionsInfo();
} catch (err) {
console.error(`Error: ${err}`);
}
}
function dumpOptionsInfo() {
const videoTrack = videoElem.srcObject.getVideoTracks()[0];
}
//==============================================================================

How do you initialize a WebRTC call using the transceiver API but only enable audio&video later after signaling completed?

I am trying to first connect two WebRTC peers. Once the connection is established I want to give the users on both sides the option to enable/disable video and audio. This should happen without triggering the signaling process again.
I do run into an issue though: If I call replaceTrack(audioTack) the remote peer will not playback audio until I also call replaceTrack(video).
I am unsure why this happen and can not find any clue in the documentation. It does play fine after 10 seconds once I also attach the video track. Without video track there is no audio playback. Why?
function createVideoElement() {
const vid = document.createElement("video")
vid.width = 320;
vid.controls = true;
vid.autoplay = true;
const root = document.body;
document.body.appendChild(vid);
return vid;
}
async function RunTestInit() {
console.log("get media access");
const p1_stream_out = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});
const p2_stream_out = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});
console.log("stream setup");
const p1_stream_in = new MediaStream();
const p2_stream_in = new MediaStream();
const p1_video_in = createVideoElement();
const p2_video_in = createVideoElement();
console.log("peer setup");
const p1 = new RTCPeerConnection();
const p2 = new RTCPeerConnection();
const p1_tca = p1.addTransceiver("audio", {
direction: "sendrecv"
});
const p1_tcv = p1.addTransceiver("video", {
direction: "sendrecv"
});
p1.onicecandidate = (ev) => {
p2.addIceCandidate(ev.candidate);
}
p2.onicecandidate = (ev) => {
p1.addIceCandidate(ev.candidate);
}
p1.onconnectionstatechange = (ev) => {
console.log("p1 state: ", p1.connectionState);
}
p2.onconnectionstatechange = async (ev) => {
console.log("p2 state: ", p2.connectionState);
}
p1.onnegotiationneeded = () => {
//triggers once
console.warn("p1.onnegotiationneeded");
}
p2.onnegotiationneeded = () => {
//should never trigger
console.warn("p2.onnegotiationneeded");
}
p1.ontrack = (ev) => {
console.log("p1.ontrack", ev);
p1_stream_in.addTrack(ev.track);
p1_video_in.srcObject = p1_stream_in;
}
p2.ontrack = (ev) => {
console.log("p2.ontrack", ev);
p2_stream_in.addTrack(ev.track);
p2_video_in.srcObject = p2_stream_in;
}
console.log("signaling");
const offer = await p1.createOffer();
await p1.setLocalDescription(offer);
await p2.setRemoteDescription(offer);
const p2_tca = p2.getTransceivers()[0];
const p2_tcv = p2.getTransceivers()[1];
p2_tca.direction = "sendrecv"
p2_tcv.direction = "sendrecv"
const answer = await p2.createAnswer();
await p2.setLocalDescription(answer);
await p1.setRemoteDescription(answer);
console.log("signaling done");
//send audio from p2 to p1 (direction doesn't matter)
//after this runs nothing will happen and no audio plays
setTimeout(async () => {
await p2_tca.sender.replaceTrack(p2_stream_out.getAudioTracks()[0]);
console.warn("audio playback should start now but nothing happens");
}, 1000);
//audio starts playing once this runs
setTimeout(async () => {
//uncomment this and it works just fine
await p2_tcv.sender.replaceTrack(p2_stream_out.getVideoTracks()[0]);
console.warn("now audio playback starts");
}, 10000);
}
function start() {
setTimeout(async () => {
console.log("Init test case");
await RunTestInit();
}, 1);
}
Same example in the js fiddle (needs camera and microphone access):
https://jsfiddle.net/vnztcx5p/5/
Once audio works this will cause an echo.
that is a known issue. https://bugs.chromium.org/p/chromium/issues/detail?id=813243 and https://bugs.chromium.org/p/chromium/issues/detail?id=403710 have some background information.
In a nutshell the video element expect you to send audio and video data and these need to be synchronized. But you don't send any video data and the element needs to fire a loadedmetadata and resize event because that is what the specification says. Hence it will block audio indefinitely
You can enable/disable audio and video tracks, so you dont have to renegotiate. Note that this tracks have to be added before negotiation starts. You can achieve it with:
mediaStream.getAudioTracks()[0].enabled = false; // or true to enable it.
Or if you want to disable video:
mediaStream.getVideoTracks()[0].enabled = false; // or true to enable it.
Here is the documentation
getAudioTracks()
getVideoTracks()
I got this working. It looks like more a problem with how HTMLVideoElement works rather than WebRTC.
If I set
p1_video_in.srcObject = p1_stream_in;
p2_video_in.srcObject = p2_stream_in;
before I add the tracks to the stream it works.
Complete example looks like this:
function createVideoElement() {
const vid = document.createElement("video")
vid.width = 320;
vid.controls = true;
vid.autoplay = true;
const root = document.body;
document.body.appendChild(vid);
return vid;
}
async function RunTestInit() {
console.log("get media access");
const p1_stream_out = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});
const p2_stream_out = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});
console.log("stream setup");
const p1_stream_in = new MediaStream();
const p2_stream_in = new MediaStream();
const p1_video_in = createVideoElement();
const p2_video_in = createVideoElement();
p1_video_in.srcObject = p1_stream_in;
p2_video_in.srcObject = p2_stream_in;
console.log("peer setup");
const p1 = new RTCPeerConnection();
const p2 = new RTCPeerConnection();
const p1_tca = p1.addTransceiver("audio", {
direction: "sendrecv"
});
const p1_tcv = p1.addTransceiver("video", {
direction: "sendrecv"
});
p1.onicecandidate = (ev) => {
p2.addIceCandidate(ev.candidate);
}
p2.onicecandidate = (ev) => {
p1.addIceCandidate(ev.candidate);
}
p1.onconnectionstatechange = (ev) => {
console.log("p1 state: ", p1.connectionState);
}
p2.onconnectionstatechange = async (ev) => {
console.log("p2 state: ", p2.connectionState);
}
p1.onnegotiationneeded = () => {
//triggers once
console.warn("p1.onnegotiationneeded");
}
p2.onnegotiationneeded = () => {
//should never trigger
console.warn("p2.onnegotiationneeded");
}
p1.ontrack = (ev) => {
console.log("p1.ontrack", ev);
p1_stream_in.addTrack(ev.track);
}
p2.ontrack = (ev) => {
console.log("p2.ontrack", ev);
p2_stream_in.addTrack(ev.track);
}
console.log("signaling");
const offer = await p1.createOffer();
await p1.setLocalDescription(offer);
await p2.setRemoteDescription(offer);
const p2_tca = p2.getTransceivers()[0];
const p2_tcv = p2.getTransceivers()[1];
p2_tca.direction = "sendrecv"
p2_tcv.direction = "sendrecv"
const answer = await p2.createAnswer();
await p2.setLocalDescription(answer);
await p1.setRemoteDescription(answer);
console.log("signaling done");
//send audio from p2 to p1 (direction doesn't matter)
//after this runs nothing will happen and no audio plays
setTimeout(async () => {
await p2_tca.sender.replaceTrack(p2_stream_out.getAudioTracks()[0]);
console.warn("audio playback should start now but nothing happens");
}, 1000);
//audio starts playing once this runs
setTimeout(async () => {
//uncomment this and it works just fine
await p2_tcv.sender.replaceTrack(p2_stream_out.getVideoTracks()[0]);
console.warn("now audio playback starts");
}, 10000);
}
function start() {
setTimeout(async () => {
console.log("Init test case");
await RunTestInit();
}, 1);
}

WebRTC Data Channel Connection Fails as soon as i add media tracks to the connection

I have been trying to create a video group call application. It follows a mesh topology and the steps I follow are these:
When a client connects. A room is created if it is the first client or it broadcasts its socket id to all other clients in that room
When a client receives the socket id it sends it an offer and the cycle starts. A similar flow is followed for the other clients.
The problem is when I simply create data channels, it works fine but as soon as I start adding media Tracks, things break and even the data channel doesn't get connected. I'll attach the code below
import { io } from 'socket.io-client'
class RTConnection {
constructor() {
this.config = {
iceServers: [
{
urls: ["stun:stun.l.google.com:19302"]
}
]
}
this.connections = {}
this.videoElements = {}
this.ICEqueue = {}
}
init = async (roomId, videoElemRef) => {
this.roomId = roomId
this.videoElemRef = videoElemRef.current
let video = document.createElement('VIDEO')
this.myVideo = video
video.onloadedmetadata = e => video.play()
this.videoElemRef.appendChild(video)
const stream = await navigator.mediaDevices.getUserMedia({video: true, audio: false})
video.srcObject = stream
this.socket = io()
this.socket.emit("join-room", roomId)
this.socket.on('new-member', id => {
this.createOffer(id)
})
this.socket.on("offer", (id, offer) => {
this.acceptOffer(id, offer)
})
this.socket.on("new-ice-candidate", (id, candidate) => {
this.handleIceCandidate(id, candidate)
})
this.socket.on("answer", (id, answer) => {
this.acceptAnswer(id, answer)
})
}
acceptAnswer = async (id, answer) => {
const peerConnection = this.connections[id]
await peerConnection.setRemoteDescription(answer)
console.log("Answer Accepted")
}
handleIceCandidate = async (id, candidate) => {
if(!candidate) {
return
}
const peerConnection = this.connections[id]
if(peerConnection.remoteDescription) {
console.log("Setting Ice")
console.log(candidate)
await peerConnection.addIceCandidate(new RTCIceCandidate(candidate))
} else {
if(id in this.ICEqueue) {
console.log("Storing Ice Candidates")
console.log(id)
this.ICEqueue[id].push(candidate)
} else {
this.ICEqueue[id] = []
}
}
}
hydrateIceCandidates = async (id) => {
const peerConnection = this.connections[id]
console.log("Hydrating Ice")
for(let i in this.ICEqueue[id]) {
const candidate = this.ICEqueue[id][i]
await peerConnection.addIceCandidate(candidate)
}
}
acceptOffer = async (id, offer) => {
const peerConnection = new RTCPeerConnection()
peerConnection.ondatachannel = e => {
console.log("Data Channel Received")
peerConnection.dc = e.channel
peerConnection.dc.onopen = e => console.log("Connected!!!!")
}
this.connections[id] = peerConnection
peerConnection.onicecandidate = e => {
this.socket.emit("new-ice-candidate", e.candidate)
}
peerConnection.ontrack = (e) => {
console.log("Received a track");
console.log(e.streams)
const video = document.createElement('VIDEO')
this.videoElemRef.appendChild(video)
video.onloadeddata = ev => {console.log("Meta Data Loaded") ; video.play()}
video.srcObject = e.streams[0]
}
const stream = this.myVideo.srcObject
console.log("Private Stream")
console.log(stream)
//await stream.getTracks().forEach(async track => peerConnection.addTrack(track, stream))
await peerConnection.setRemoteDescription(offer)
console.log("Remote Description Set")
const ans = await peerConnection.createAnswer()
console.log("Answer Created")
await peerConnection.setLocalDescription(ans)
console.log("Local Description Set")
await this.hydrateIceCandidates(id)
this.socket.emit("answer", id, ans)
}
createOffer = async (id) => {
const peerConnection = new RTCPeerConnection()
const stream = this.myVideo.srcObject
//stream.getTracks().forEach(track => peerConnection.addTrack(track, stream))
peerConnection.dc = peerConnection.createDataChannel("channel")
peerConnection.dc.onopen = e => {
console.log("Connected!!!")
}
this.connections[id] = peerConnection
peerConnection.ontrack = (e) => {
console.log("Received a track");
console.log(e.streams)
const video = document.createElement('VIDEO')
console.log("Video Element")
console.log(video)
this.videoElemRef.appendChild(video)
video.onloadedmetadata = ev => video.play()
video.srcObject = e.streams[0]
}
peerConnection.onicecandidate = (e) => {
this.socket.emit("new-ice-candidate", e.candidate)
}
const offer = await peerConnection.createOffer()
await peerConnection.setLocalDescription(offer)
console.log(peerConnection.localDescription)
this.socket.emit("offer", id, offer)
}
stopStream = (stream, elem) => {
stream.getTracks().forEach(track => {
track.stop()
})
this.videoElemRef.current.removeChild(elem)
}
cleanMedia = () => {
this.stopStream(this.videoEl.srcObject, this.videoEl)
for(let id in this.videoElements) {
this.stopStream(this.videoElements[id].srcObject, this.videoElements[id])
delete this.videoElements[id]
}
delete this.videoElements
}
leaveCall = () => {
for(let id in this.connections) {
this.connections[id].close()
delete this.connections[id]
}
delete this.connections
this.cleanMedia()
this.socket.disconnect()
}
}
export default RTConnection
Server-Side Code
const express = require('express')
const fs = require('fs')
const app = express()
const path = require('path')
const v4 = require('uuid').v4
const options = {
key: fs.readFileSync("key.pem"),
cert: fs.readFileSync("cert.pem")
}
const server = require('http').createServer(app)
const io = require('socket.io')(server)
const port = process.env.port || '5000'
app.set('view engine', 'ejs')
app.use("/",express.static(path.resolve(__dirname + '/public')))
app.get('/',(req, res) => {
res.render('home')
})
app.get('/room', (req,res) => {
const roomId = v4()
res.redirect(`/room/${roomId}`)
})
app.get('/room/:roomId',(req, res) => {
res.render('room', {roomId: req.params.roomId})
})
server.listen(port, () => {
console.log(`Starting server on ${port}`)
})
io.on("connection", socket => {
socket.on('join-room', (roomId) => {
socket.join(roomId)
socket.to(roomId).emit("new-member", socket.id)
socket.on("new-ice-candidate", (candidate) => {
socket.to(roomId).emit("new-ice-candidate", socket.id, candidate)
})
socket.on("offer", (id, offer) => {
socket.to(id).emit("offer", socket.id, offer)
})
socket.on("answer", (id, answer) => {
socket.to(id).emit("answer", socket.id, answer)
})
socket.on("disconnecting", () => {
for(let room of socket.rooms) {
socket.to(room).emit("leaveCall", socket.id)
}
})
})
})

Screen sharing is not working with getDisplayMedia

When I add getUserMedia, it is working properly but when I change getUserMedia to getDisplayMedia, it is showing screen share in my window but it is not streaming to another device.
Here is my script.js
const socket = io('/')
const videoGrid = document.getElementById('video-grid')
const myPeer = new Peer(undefined, {
path: '/peerjs',
host: '/',
port: '443'
})
let myVideoStream;
const myVideo = document.createElement('video')
myVideo.muted = true;
const peers = {}
navigator.mediaDevices.getUserMedia({
video: true,
audio: true
}).then(stream => {
myVideoStream = stream;
addVideoStream(myVideo, stream)
myPeer.on('call', call => {
call.answer(stream)
const video = document.createElement('video')
call.on('stream', userVideoStream => {
addVideoStream(video, userVideoStream)
})
})
socket.on('user-connected', userId => {
connectToNewUser(userId, stream)
})
// input value
let text = $("input");
// when press enter send message
$('html').keydown(function (e) {
if (e.which == 13 && text.val().length !== 0) {
socket.emit('message', text.val());
text.val('')
}
});
socket.on("createMessage", message => {
$("ul").append(`<li class="message"><b>user</b><br/>${message}</li>`);
scrollToBottom()
})
})
socket.on('user-disconnected', userId => {
if (peers[userId]) peers[userId].close()
})
myPeer.on('open', id => {
socket.emit('join-room', ROOM_ID, id)
})
function connectToNewUser(userId, stream) {
const call = myPeer.call(userId, stream)
const video = document.createElement('video')
call.on('stream', userVideoStream => {
addVideoStream(video, userVideoStream)
})
call.on('close', () => {
video.remove()
})
peers[userId] = call
}
function addVideoStream(video, stream) {
video.srcObject = stream
video.addEventListener('loadedmetadata', () => {
video.play()
})
videoGrid.append(video)
}
const scrollToBottom = () => {
var d = $('.main__chat_window');
d.scrollTop(d.prop("scrollHeight"));
}
const muteUnmute = () => {
const enabled = myVideoStream.getAudioTracks()[0].enabled;
if (enabled) {
myVideoStream.getAudioTracks()[0].enabled = false;
setUnmuteButton();
} else {
setMuteButton();
myVideoStream.getAudioTracks()[0].enabled = true;
}
}
const playStop = () => {
console.log('object')
let enabled = myVideoStream.getVideoTracks()[0].enabled;
if (enabled) {
myVideoStream.getVideoTracks()[0].enabled = false;
setPlayVideo()
} else {
setStopVideo()
myVideoStream.getVideoTracks()[0].enabled = true;
}
}
const shareScreen = async () => {
let captureStream = null
let userId=1233
try {
captureStream = await navigator.mediaDevices.getDisplayMedia()
connectToNewUser(userId, captureStream)
} catch (err) {
console.error("Error: " + err)
}
//
};
const setMuteButton = () => {
const html = `
<i class="fas fa-microphone"></i>
<span>Mute</span>
`
document.querySelector('.main__mute_button').innerHTML = html;
}
const setUnmuteButton = () => {
const html = `
<i class="unmute fas fa-microphone-slash"></i>
<span>Unmute</span>
`
document.querySelector('.main__mute_button').innerHTML = html;
}
const setStopVideo = () => {
const html = `
<i class="fas fa-video"></i>
<span>Stop Video</span>
`
document.querySelector('.main__video_button').innerHTML = html;
}
const setPlayVideo = () => {
const html = `
<i class="stop fas fa-video-slash"></i>
<span>Play Video</span>
`
document.querySelector('.main__video_button').innerHTML = html;
}
Here is my github fork github code
Everything is working fine with heroku too which is encrypted with ssl certificate.
Here is my heroku link
Replace this
const myPeer = new Peer(undefined, {
path: '/peerjs',
host: '/',
port: '443'
})
With this
var peer = new Peer();

can't do video chat with peer.js

I'm following this website to make a video call in react.
I slightly changed the code in connection.js for my pure javascript project, but other files are same as the tutorial.
Here is my connection.js
import openSocket from 'socket.io-client';
import Peer from 'peerjs';
let socketInstance = null;
let peers = {};
const initializePeerConnection = () => {
return new Peer('', {
host: 'localhost',
port: 9000,
secure: false //secure=true is https and wss. secure=false is http and ws
});
//return new Peer()
}
const initializeSocketConnection = () => {
const socket = openSocket('http://localhost:5000');
return socket
}
class Connection {
videoContainer = {};
message = [];
settings;
streaming = false;
myPeer;
socket;
myID = '';
constructor(settings) {
this.settings = settings;
this.myPeer = initializePeerConnection();
this.socket = initializeSocketConnection();
this.initializeSocketEvents();
this.initializePeersEvents();
}
initializeSocketEvents = () => {
this.socket.on('connect', () => {
console.log('socket connected');
});
this.socket.on('user-disconnected', (userID) => {
console.log('user disconnected-- closing peers', userID);
peers[userID] && peers[userID].close();
this.removeVideo(userID);
});
this.socket.on('disconnect', () => {
console.log('socket disconnected --');
});
this.socket.on('error', (err) => {
console.log('socket error --', err);
});
}
initializePeersEvents = () => {
this.myPeer.on('open', (id) => {
this.myID = id;
const roomID = window.location.pathname.split('/')[2];
const userData = {
userID: id, roomID
}
console.log('mydata::', userData)
this.socket.emit('join-room', userData);
this.setNavigatorToStream();
});
this.myPeer.on('error', (err) => {
console.log('peer connection error', err);
this.myPeer.reconnect();
})
}
setNavigatorToStream = () => {
this.getVideoAudioStream().then((stream) => {
if (stream) {
this.streaming = true;
this.createVideo({ id: this.myID, stream });
this.setPeersListeners(stream);
this.newUserConnection(stream);
}
})
}
getVideoAudioStream = (video=true, audio=true) => {
let quality = this.settings.params?.quality;
if (quality) quality = parseInt(quality);
const myNavigator = navigator.mediaDevices.getUserMedia ||
navigator.mediaDevices.webkitGetUserMedia ||
navigator.mediaDevices.mozGetUserMedia ||
navigator.mediaDevices.msGetUserMedia;
return myNavigator({
video: video ? {
frameRate: quality ? quality : 12,
noiseSuppression: true,
width: {min: 640, ideal: 1280, max: 1920},
height: {min: 480, ideal: 720, max: 1080}
} : false,
audio: audio,
});
}
createVideo = (createObj) => {
console.log('video created')
if (!this.videoContainer[createObj.id]) {
this.videoContainer[createObj.id] = {
...createObj,
};
const roomContainer = document.getElementById('room-container');
const videoContainer = document.createElement('div');
const video = document.createElement('video');
video.srcObject = this.videoContainer[createObj.id].stream;
video.id = createObj.id;
video.autoplay = true;
if (this.myID === createObj.id) {
video.muted = true;
video.className = 'my_video'
}
videoContainer.appendChild(video)
roomContainer.append(videoContainer);
} else {
const video = document.getElementById(createObj.id)
video.srcObject = createObj.stream
}
}
setPeersListeners = (stream) => {
//stream is my stream
this.myPeer.on('call', function(call) {
console.log('answered!!')
call.answer(stream);
call.on('stream', (userVideoStream) => {
console.log('user stream data', userVideoStream)
this.createVideo({ id: call.metadata.id, stream: userVideoStream });
});
call.on('close', () => {
console.log('closing peers listeners', call.metadata.id);
this.removeVideo(call.metadata.id);
});
call.on('error', () => {
console.log('peer error ------');
this.removeVideo(call.metadata.id);
});
peers[call.metadata.id] = call;
});
}
newUserConnection = (stream) => {
//stream is my stream
this.socket.on('new-user-connect', (userData) => {
this.connectToNewUser(userData, stream);
});
}
connectToNewUser(userData, stream) {
console.log('connectTonewUser is called')
const { userID } = userData;
const call = this.myPeer.call(userID, stream, { metadata: { id: this.myID }} );
console.log('your data:: new user connected', userData)
call.on('stream', (userVideoStream) => {
//userVideoStream is your stream
console.log('your stream::', userVideoStream)
this.createVideo({ id: userID, stream: userVideoStream, userData });
});
call.on('close', () => {
console.log('closing new user', userID);
this.removeVideo(userID);
});
call.on('error', () => {
console.log('peer error ------')
this.removeVideo(userID);
})
peers[userID] = call;
}
removeVideo = (id) => {
delete this.videoContainer[id];
const video = document.getElementById(id);
if (video) video.remove();
}
destroyConnection = () => {
const myMediaTracks = this.videoContainer[this.myID]?.stream.getTracks();
myMediaTracks?.forEach((track) => {
track.stop();
})
socketInstance?.socket.disconnect();
this.myPeer.destroy();
}
}
export default function createSocketConnectionInstance(settings={}) {
return socketInstance = new Connection(settings);
}
It is confirmed that both two clients are connected to peer.js and the sockets are also connected.
However, after I called in connectToNewUser, it seems to not answer in setPeersListeners.
I think the shape of video call function is quite same as peerjs doc
Do you have any ideas why it doesn't work?
Thank you :)
It worked when I removed all arguments somehow.
used to be
const initializePeerConnection = () => {
return new Peer('', {
host: 'localhost',
port: 9000,
secure: false //secure=true is https and wss. secure=false is http and ws
});
}
and in terminal
peerjs -p 9000
👇
this is working correctly
const initializePeerConnection = () => {
return new Peer()
}
you don't have to exectute peerjs in terminal

Categories