video stream and websocket javascript - javascript

I'm trying to send video from the camera to the server and take it back to my computer, the video from the camera is perfectly displayed on the first screen, and on the second screen should be the same video, but received from the server. I send the video to the server well and the server accepts it and sends it back to the computer. But the video is not displayed on the second screen.
Here is my code.
Client:
<video autoplay class="video" id="video1"></video>
<video autoplay class="video" id="video2"></video>
<script type="text/javascript">
var video1, video2;
var port="<%out.write(port);%>";
var ws;
var connect=false;
switch(lang){
case "en":
text1="roll up";
text2="expand";
break;
case "ru":
text1="свернуть";
text2="развернуть";
break;
}
video1=document.querySelector('#video1');
ws = new WebSocket("ws://127.0.0.1:"+port);
ws.binaryType = 'arraybuffer';
navigator.mediaDevices.getUserMedia({ audio: true, video: {width: 400, height: 400}})
.then(stream => {
var streamChunks = [];
var outChunks = [];
video1.src = window.URL.createObjectURL(stream);
const mediaRecorder = new MediaRecorder(stream);
//запись потока в массив
mediaRecorder.addEventListener("dataavailable",function(event) {
if(connect==true){
streamChunks.push(event.data);
ws.send(streamChunks[0]);
}
streamChunks = [];
});
mediaRecorder.start();
setInterval(function() {
mediaRecorder.requestData();
}, 1000);
});
var mediaSource = new MediaSource();
var buffer;
var queue = [];
video2=document.querySelector('#video2');
ws.onopen = function(e) {
connect=true;
};
mediaSource.addEventListener('sourceopen', function(e) {
video2.play();
buffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
buffer.addEventListener('update', function() { //Note: Have tried 'updateend'
if (queue.length > 0 && !buffer.updating) {
buffer.appendBuffer(queue.shift());
}
});
}, false);
video2.src=window.URL.createObjectURL(mediaSource);
ws.onmessage = function(event) {
if (event.data instanceof ArrayBuffer) {
try {
if (buffer.updating || queue.length > 0) {
queue.push(event.data);
} else {
buffer.appendBuffer(event.data);
}
} catch (e) {
console.log(e);
}
} else {
writeResponse(event.data);
}
};
</script>
Server
package model;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.HashSet;
import java.util.Set;
import org.java_websocket.WebSocket;
import org.java_websocket.handshake.ClientHandshake;
import org.java_websocket.server.WebSocketServer;
public class video_chat_server extends WebSocketServer {
private Set<WebSocket> conns;
public video_chat_server( int port ) throws UnknownHostException {
super( new InetSocketAddress(port) );
conns = new HashSet<>();
}
public video_chat_server( InetSocketAddress address ) {
super(address);
}
#Override
public void onMessage(WebSocket conn, ByteBuffer blob) {
// TODO Auto-generated method stub
for (WebSocket sock : conns) {
sock.send(blob);
}
}
#Override
public void onOpen(WebSocket conn, ClientHandshake handshake) {
// TODO Auto-generated method stub
if(conns.size()<2)conns.add(conn);
}
}

Related

i am having a problem with playing an audio blob i get from a microphone. it works for the first time but doesnt seem to work after that

i am trying to add a voice message feature to my chat app and i have a problem on playing the audio that i record.
recordAudio.js
import { onUnmounted } from 'vue'
const constraint = { audio: true }
let chunks = []
function record() {
let mediaRecorder
let stream
function close() {
// console.log(mediaRecorder?.state, 'state man')
if (mediaRecorder && mediaRecorder?.state == 'recording'){
mediaRecorder?.stop()
stream && (
stream.getTracks()
.forEach(track => track.stop())
)
}
}
onUnmounted(() => {
close()
})
async function start() {
if (navigator.mediaDevices.getUserMedia) {
const strm = await navigator.mediaDevices.getUserMedia(constraint)
// console.log(strm)
if (!strm) return false
// console.log('media', mediaRecorder)
stream = strm
mediaRecorder = new MediaRecorder(strm)
mediaRecorder.start(100)
// console.log('listingin for audio')
mediaRecorder.ondataavailable = (e) => {
// console.log(e.data)
chunks.push(e.data)
}
}
return true
}
function stop() {
close()
const blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
console.log(chunks)
chunks = []
// const audioURL = URL.createObjectURL(blob)
return blob
}
return {
start,
stop
}
}
export {
record
}
this is a code fragment in a .vue file
import {record} from '#util/recordAudio.js'
const { start, stop } = record()
async function recordAudio() {
if (!recording.value) {
let res = await start()
res && (recording.value = true)
} else {
stop()
recording.value = false
}
}
function sendAudio() {
let audioBlob = stop()
recording.value = false
console.log(audioBlob)
messages.addMessage({
id: props.chat.id,
message: {
id: 1,
to: 2,
from: 1,
message: audioBlob,
seen: true,
type: 'audio',
createdAt: new Date()
}
})
}
let url = {}
function getObjectUrl(id, message) {
if (!url[id]) {
url[id] = URL.createObjectURL(message)
return url[id]
}
return url[id]
}
//this is the template for the audio
<div v-if='message.type == "audio"'>
<audio controls :src='getObjectUrl(message.id, message.message)'></audio>
</div>
it seems to work for the first time and it doesnt work after.
in firefox i get the warning/error
Media resource blob:http://localhost:5374/861d13c5-533f-4cd7-8608-68eecc7deb4e could not be decoded
Media resource blob:http://localhost:5374/861d13c5-533f-4cd7-8608-68eecc7deb4e could not be decoded, error: Error Code: NS_ERROR_DOM_MEDIA_METADATA_ERR (0x806e0006)
error message

RTCPeerConnection not listening for track event of remote

I am trying to use WebRTC to implement video calling in an application. Connection is being established successfully between peers after sharing the offer, answer and ice-candidate.
Remote peer is sending the track (video, audio), but local peer (receiving peer) is not listing for the track of remote peer connection.
Adding track to the peer connection :
const localStream = await window.navigator.mediaDevices.getUserMedia({video: true, audio: true});
localStream.getTracks().forEach(track => {
this.peerConnection.addTrack(track, localStream);
});
Listening track event on other side :
this.peerConnection.addEventListener('track', async (event) => {
console.log("Remote video stream added to the video element...");
remoteStream.addTrack(event.track);
});
How to receive and show the video of remote peer?
Is there any way to inspect the stream/tracks of RTCPeerConnection?
We are using angular for our application. Here is the complete JS code :
Triggering video call component to offer call
makeVideoCall(){
var videoCallDialog = this.matDialog.open(VideoCallComponent, {
height: '600px',
width: '700px',
data : {
'friendUser' : this.friendUser
}
});
}
Listening video call offer and trigger video-call component
if (chatMessage.type == ChatType.VIDEO_CALL_OFFER){
this.userChatService.findFriend(chatMessage.textFrom).subscribe(user=>{
chatMessage.data = JSON.parse(chatMessage.data);
var videoCallDialog = this.matDialog.open(VideoCallComponent, {
height: '600px',
width: '700px',
data : {
'friendUser' : user,
'videoCallOfferChat' : chatMessage
}
});
});
}
import { Component, OnInit, Inject } from '#angular/core';
import { MAT_DIALOG_DATA } from '#angular/material/dialog';
import { User } from 'src/app/user/model/user';
import { RxStompService } from '#stomp/ng2-stompjs';
import { Subscription } from 'rxjs';
import { LoginService } from 'src/app/login/service/login.service';
import { Chat } from 'src/app/chats/model/chat';
import { ChatType } from 'src/app/chats/model/chat-type';
import { v4 as uuidv4 } from 'uuid';
import { CryptoService } from 'src/app/crypto/service/crypto.service';
#Component({
selector: 'app-video-call',
templateUrl: './video-call.component.html',
styleUrls: ['./video-call.component.css']
})
export class VideoCallComponent implements OnInit {
currentUser : User;
friendUser : User;
chatSubscription : Subscription;
peerConnection : RTCPeerConnection;
offer : Chat;
answer : Chat;
iceCandidate : Chat;
generatedIceCandidateCount = 0;
receivedIceCandidateCount = 0;
iceCandidates = new Array<any>();
constructor(
private loginService : LoginService,
#Inject(MAT_DIALOG_DATA) public data: any,
private stompService : RxStompService,
private cryptoService : CryptoService
) {
this.currentUser = this.loginService.getCurrentUser();
if(data.friendUser){
this.friendUser = data.friendUser;
const configuration = {'iceServers': [{'urls': 'stun:stun.l.google.com:19302'}]};
this.peerConnection = new RTCPeerConnection(configuration);
this.startReceivingStreams();
// Listen for local ICE candidates on the local RTCPeerConnection
this.peerConnection.addEventListener('icecandidate', event => {
if (event.candidate) {
// console.log('ICE Candidate generated', event.candidate);
console.log("Genrated Candidate ", ++this.generatedIceCandidateCount);
this.iceCandidates.push(event.candidate);
// this.sendToSocket(this.createChat(null, ChatType.ICE_CANDIDATE, {'iceCandidate' : event.candidate}));
}else{
console.log("Candidates in buffer : ", this.generatedIceCandidateCount);
this.iceCandidates.forEach((candidate) => {
this.sendToSocket(this.createChat(null, ChatType.ICE_CANDIDATE, {'iceCandidate' : candidate}))
});
if(this.iceCandidates.length>0)
this.startTransmittingStreams();
}
});
// Listen for connectionstatechange on the local RTCPeerConnection
this.peerConnection.addEventListener('connectionstatechange', event => {
if (this.peerConnection.connectionState === 'connected') {
console.log("Connection done...");
}
});
}
this.startListeningVideoCalls();
if (data.videoCallOfferChat){
this.startReceivingStreams();
this.answerVideoCall(data.videoCallOfferChat);
}
else{
this.offerVideoCall();
}
}
ngOnInit(): void {}
async startTransmittingStreams(){
const localStream = await window.navigator.mediaDevices.getUserMedia({video: true, audio: true});
localStream.getTracks().forEach(async track => {
console.log("Adding track...", track);
await this.peerConnection.addTrack(track, localStream);
});
}
startReceivingStreams(){
console.log("Start receiving...");
let remoteStream = new MediaStream();
this.peerConnection.ontrack = event => {
console.log("[addEventListener] Remote video stream added to the video element...", event);
let remoteVideo : any = document.getElementById('friendUserVideoTrack');
remoteVideo.srcObject = remoteStream;
remoteStream.addTrack(event.track);
}
}
private startListeningVideoCalls(){
if(this.stompService.connected()){
this.chatSubscription = this.stompService.watch('/text/'+this.currentUser.id).subscribe((data:any)=>{
let chat = JSON.parse(data.body);
if(chat.data)
chat.data = JSON.parse(chat.data);
if (chat.type == ChatType.VIDEO_CALL_ANSWER) {
console.log('Video Call Answer ...', chat);
if (chat.data.answer) {
// Contains RTCConnection answer then connect
console.log('Call accepted', chat.data.answer);
this.videoCallAnswered(chat);
}else{
// Doesn't contains RTCConnection answer : call rejected by recipient
console.log('Call rejected...');
}
}
if(chat.type == ChatType.ICE_CANDIDATE){
console.log('Chat with ICE Candidate ', chat);
this.iceCandidateReceived(chat.data.iceCandidate);
}
});
}
}
async offerVideoCall() {
var options = { offerToReceiveVideo: true, offerToReceiveAudio: true };
const offer = await this.peerConnection.createOffer(options);
await this.peerConnection.setLocalDescription(offer);
this.sendToSocket(this.createChat(null, ChatType.VIDEO_CALL_OFFER, {'offer' : offer}));
}
private async answerVideoCall(receivedChat : Chat){
if (receivedChat.data.offer) {
let remoteDescription = new RTCSessionDescription(receivedChat.data.offer);
await this.peerConnection.setRemoteDescription(remoteDescription);
var options = { offerToReceiveVideo: true, offerToReceiveAudio: true };
const answer = await this.peerConnection.createAnswer(options);
await this.peerConnection.setLocalDescription(answer);
this.sendToSocket(this.createChat(receivedChat.id, ChatType.VIDEO_CALL_ANSWER, {'answer' : answer}));
}
}
private async videoCallAnswered(chat : Chat){
const remoteDesc = new RTCSessionDescription(chat.data.answer);
await this.peerConnection.setRemoteDescription(remoteDesc);
}
private createChat(id : string, chatType : ChatType, data : any){
let chat : Chat = new Chat();
chat.id = uuidv4();
chat.textFrom = this.currentUser.id;
chat.textTo = this.friendUser.id;
chat.textFromName = this.currentUser.name;
chat.textToName = this.friendUser.name;
chat.date = new Date().getTime();
chat.type = chatType;
chat.message = this.cryptoService.encrypt(chat.message, this.friendUser.userChat.sk);
chat.data = JSON.stringify(data);
return chat;
}
private sendToSocket(chat : Chat){
if(this.stompService.connected()){
this.stompService.publish({
destination : "/app/text/"+this.friendUser.id,
body : JSON.stringify(chat)
});
}
}
private async iceCandidateReceived(iceCandidate : any){
if(this.peerConnection.remoteDescription){
try {
await this.peerConnection.addIceCandidate(new RTCIceCandidate(iceCandidate));
console.log("Received Count ", ++this.receivedIceCandidateCount);
} catch (e) {
console.error('Error adding received ice candidate', e);
}
}
}
}
Please try
this.peerConnection.addEventListener('track',function(event){
remoteVideo=document.getElementById('remoteVideo'); //Change your remote 'video element' id here
remoteVideo.srcObject = event.streams[0];
});
Since WebRTC connections are peer to peer, we cannot inspect or monitor video from server. Only things signal servers will know are the SDPs and ice candidates shared via them.

Original audio of tab gets muted while using chrome.tabCapture.capture() and MediaRecorder()

when i use chrome.tabCapture.capture() with MediaRecorder API to record stream original audio of tabs which i am capturing gets muted but the audio comes OK in recorded stream, i want the audio in the tab to run normally ....
class Recorder {
constructor(onChunksAvailable) {
this.chunks = [];
this.active = false;
this.callback = onChunksAvailable;
}
start(stream) {
if (this.active) {
throw new Error("recorder is already running");
}
this.recorder = new MediaRecorder(stream, {
mimeType: "audio/webm",
});
this.recorder.onstop = () => {
stream.getAudioTracks()[0].stop();
this.callback([...this.chunks]);
setTimeout(() => {
this.chunks = [];
});
this.active = false;
};
this.recorder.ondataavailable = (event) => this.chunks.push(event.data);
this.active = true;
this.recorder.start();
}
stop() {
if (!this.active) {
throw new Error("recorder is already stop");
} else {
this.recorder.stop();
}
}
}
let rec = new Recorder(async (chunks) => {
//using chunks then to get the stream
});
chrome.tabCapture.capture(
{
audio: true,
video: false,
},
function (stream) {
rec.start(stream);
}
Forgive me for lack of documentation as I last played with these APIs years ago, but MDN has some stuff.
In my case adding these 3 lines to the start function was fixed.
this.context = new AudioContext();
this.stream = this.context.createMediaStreamSource(stream);
this.stream.connect(this.context.destination);
class Recorder {
constructor(onChunksAvailable) {
this.chunks = [];
this.active = false;
this.callback = onChunksAvailable;
this.context = new AudioContext();
}
start(stream) {
if (this.active) {
throw new Error("recorder is already running");
}
// Reconnect the stream to actual output
this.stream = this.context.createMediaStreamSource(stream);
this.stream.connect(this.context.destination);
this.recorder = new MediaRecorder(stream, {
mimeType: "audio/webm",
});
this.recorder.onstop = () => {
stream.getAudioTracks()[0].stop();
this.callback([...this.chunks]);
setTimeout(() => {
this.chunks = [];
});
this.active = false;
};
this.recorder.ondataavailable = (event) => this.chunks.push(event.data);
this.active = true;
this.recorder.start();
}
stop() {
if (!this.active) {
throw new Error("recorder is already stop");
} else {
this.recorder.stop();
}
}
}
let rec = new Recorder(async (chunks) => {
//using chunks then to get the stream
});
chrome.tabCapture.capture(
{
audio: true,
video: false,
},
function (stream) {
rec.start(stream);
})
Sorry for lack of details, but I believe when you start an audio capture it disconnects the stream from the default output (speakers). By creating a secondary MediaStreamSource and connecting it to the default output (AudioContext.destination) you can allow the stream to continue outputting to speakers while being input to your recorder.
Sources
MDN: AudioContext
MDN: MediaStreamSource
Chrome extension I made 2 years ago

How to send and receive desktop capture stream generated via getUsermedia()

I am making a Screen sharing app with WebRTC + Socket.io and stuck at a place.
Connected with two browser using WebRTC + Socket.io and can send text
I am taking support from codelab but it is not for stream.(If solution is based on this link then highly helpful)
How can I send getUserMedia() stream:
dataChannel.send(stream);
And receive same stream on channel.onmessage():
I am getting event.data as '[object MediaStream]' not stream.
channel.onmessage = function(event){
// unable to get correct stream
// event.data is "[object MediaStream]" in string
}
function createPeerConnection(isInitiator, config) {
console.log('Creating Peer connection as initiator?', isInitiator, 'config:', config);
peerConn = new RTCPeerConnection(config);
// send any ice candidates to the other peer
peerConn.onicecandidate = function (event) {
console.log('onIceCandidate event:', event);
if (event.candidate) {
sendMessage({
type: 'candidate',
label: event.candidate.sdpMLineIndex,
id: event.candidate.sdpMid,
candidate: event.candidate.candidate
});
} else {
console.log('End of candidates.');
}
};
if (isInitiator) {
console.log('Creating Data Channel');
dataChannel = peerConn.createDataChannel("screen");
onDataChannelCreated(dataChannel);
console.log('Creating an offer');
peerConn.createOffer(onLocalSessionCreated, logError);
} else {
peerConn.ondatachannel = function (event) {
console.log('ondatachannel:', event.channel);
dataChannel = event.channel;
onDataChannelCreated(dataChannel);
};
}
}
It is working fine for string or json i.e. dataChannel.send('Hello');
I have created a wiki page for same: wiki
Please help.
Please try something like this: (explanation at the end of the code)
var btnShareYourCamera = document.querySelector('#share-your-camera');
var localVideo = document.querySelector('#local-video');
var remoteVideo = document.querySelector('#remote-video');
var websocket = new WebSocket('wss://path-to-server:port/');
websocket.onmessage = function(event) {
var data = JSON.parse(event.data);
if (data.sdp) {
if (data.sdp.type === 'offer') {
getUserMedia(function(video_stream) {
localVideo.srcObject = video_stream;
answererPeer(new RTCSessionDescription(data.sdp), video_stream);
});
}
if (data.sdp.type === 'answer') {
offerer.setRemoteDescription(new RTCSessionDescription(data.sdp));
}
}
if (data.candidate) {
addIceCandidate((offerer || answerer), new RTCIceCandidate(data.candidate));
}
};
var iceTransportPolicy = 'all';
var iceTransportLimitation = 'udp';
function addIceCandidate(peer, candidate) {
if (iceTransportLimitation === 'tcp') {
if (candidate.candidate.toLowerCase().indexOf('tcp') === -1) {
return; // ignore UDP
}
}
peer.addIceCandidate(candidate);
}
var offerer, answerer;
var iceServers = {
iceServers: [{
'urls': [
'stun:stun.l.google.com:19302',
'stun:stun1.l.google.com:19302',
'stun:stun2.l.google.com:19302',
'stun:stun.l.google.com:19302?transport=udp',
]
}],
iceTransportPolicy: iceTransportPolicy,
rtcpMuxPolicy: 'require',
bundlePolicy: 'max-bundle'
};
// https://https;//cdn.webrtc-experiment.com/IceServersHandler.js
if (typeof IceServersHandler !== 'undefined') {
iceServers.iceServers = IceServersHandler.getIceServers();
}
var mediaConstraints = {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
};
/* offerer */
function offererPeer(video_stream) {
offerer = new RTCPeerConnection(iceServers);
offerer.idx = 1;
video_stream.getTracks().forEach(function(track) {
offerer.addTrack(track, video_stream);
});
offerer.ontrack = function(event) {
remoteVideo.srcObject = event.streams[0];
};
offerer.onicecandidate = function(event) {
if (!event || !event.candidate) return;
websocket.send(JSON.stringify({
candidate: event.candidate
}));
};
offerer.createOffer(mediaConstraints).then(function(offer) {
offerer.setLocalDescription(offer).then(function() {
websocket.send(JSON.stringify({
sdp: offer
}));
});
});
}
/* answerer */
function answererPeer(offer, video_stream) {
answerer = new RTCPeerConnection(iceServers);
answerer.idx = 2;
video_stream.getTracks().forEach(function(track) {
answerer.addTrack(track, video_stream);
});
answerer.ontrack = function(event) {
remoteVideo.srcObject = event.streams[0];
};
answerer.onicecandidate = function(event) {
if (!event || !event.candidate) return;
websocket.send(JSON.stringify({
candidate: event.candidate
}));
};
answerer.setRemoteDescription(offer).then(function() {
answerer.createAnswer(mediaConstraints).then(function(answer) {
answerer.setLocalDescription(answer).then(function() {
websocket.send(JSON.stringify({
sdp: answer
}));
});
});
});
}
var video_constraints = {
mandatory: {},
optional: []
};
function getUserMedia(successCallback) {
function errorCallback(e) {
alert(JSON.stringify(e, null, '\t'));
}
var mediaConstraints = {
video: true,
audio: true
};
navigator.mediaDevices.getUserMedia(mediaConstraints).then(successCallback).catch(errorCallback);
}
btnShareYourCamera.onclick = function() {
getUserMedia(function(video_stream) {
localVideo.srcObject = video_stream;
offererPeer(video_stream);
});
};
You must attach stream using peer.addTrack as you can see in the above example
You must receive remote stream using peer.ontrack as you can see in the above example
i.e. use addTrack to attach your camera and use ontrack to receive remote camera.
You must never send your stream using dataChannel.send. Both are totally different protocols. A MediaStream must be shared using RTP; not SCTP. RTP is used only if you call peer.addTrack method to attach your camera stream.
This process happens before you open or join a room.
See single-page demo here: https://www.webrtc-experiment.com/getStats/
HTML for above code snippet:
<button id="share-your-camera"></button>
<video id="local-video" controls autoplay playsinline></video>
<video id="remote-video" controls autoplay playsinline></video>

Error simple WebRTC chat on react.js and meteor.js

I'm trying to make a simple chat using webrtc technology, react.js and meteor.js.
This is client code:
class Rtc extends Component {
constructor(props) {
super(props);
}
componentDidUpdate(){
let localVideo, remoteVideo, peerConnection, localStream;
$('#start').on('click', ()=>{ start(true) });
let id = Meteor.uuid();
localVideo = document.getElementById('localVideo');
remoteVideo = document.getElementById('remoteVideo');
if (!this.props.loadingRtc) {
this.props.messagesRtc.forEach((item, i ,arr)=>{
let signal = JSON.parse(item.text);
if(i == 0)return;
gotMessageFromServer(signal);
});
}
if(navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia( { video:true, audio:true}).then( ( stream )=> {
localStream = stream;
localVideo.src = window.URL.createObjectURL(stream);
}).catch(errorHandler);
} else { alert('Your browser does not support getUserMedia API'); }
function start(isCaller) {
peerConnection = new RTCPeerConnection( { 'iceServers': [{'urls': 'stun:stun.services.mozilla.com'}, {'urls': 'stun:stun.l.google.com:19302'},]});
peerConnection.onicecandidate = ( e ) => {
console.log('e.candidate', e.candidate);
if(e.candidate != null) {
Meteor.call('addMsgRtc', JSON.stringify({'ice': e.candidate, '_id':id}), id);
}
};
peerConnection.onaddstream = ( e )=>{
remoteVideo.src = window.URL.createObjectURL(e.stream);
};
peerConnection.addStream(localStream);
if(isCaller) {
peerConnection.createOffer().then(createdDescription).catch(errorHandler);
}
}
function gotMessageFromServer(signal) {
if(!peerConnection) start(false);
if(signal._id == id) return;
if(signal.sdp) {
peerConnection.setRemoteDescription(new RTCSessionDescription(signal.sdp)).then(()=> {
if(signal.sdp.type == 'offer') {
peerConnection.createAnswer().then(createdDescription).catch(errorHandler);
}
}).catch(errorHandler);
} else if(signal.ice) {
peerConnection.addIceCandidate(new RTCIceCandidate(signal.ice)).catch(errorHandler);
}
}
function createdDescription(description) {
peerConnection.setLocalDescription(description).then(()=> {
Meteor.call('addMsgRtc', JSON.stringify({'sdp':peerConnection.localDescription, '_id':id}), id);
}).catch(errorHandler);
}
function errorHandler(error) { console.log(error); }
}
render() {
return (
<div>
<video id="localVideo" autoPlay muted style={{width:"40%"}}></video>
<video id="remoteVideo" autoPlay style={{width:"40%"}}></video>
<br/>
<input type="button" id="start" value="Start Video"/>
</div>
);
}
}
export default createContainer( ()=> {
const subscriptionRtc = Meteor.subscribe('rtc');
const loadingRtc = !subscriptionRtc.ready();
return {
loadingRtc:loadingRtc,
messagesRtc: msgRtc.find().fetch(),
};
}, App);
Server code:
export const msgRtc = new Mongo.Collection('rtc');
let messagesRtc = [];
let clients = [];
Meteor.publish('rtc', function wsPub() {
clients.push(this);
_.each(messagesRtc, (message) => {
this.added('rtc', message._id, message);
});
this.ready();
});
Meteor.methods({
'addMsgRtc'(arr, id) {
let newMessage = {_id:id, 'text':arr};
messagesRtc.push(newMessage);
_.each(clients, (client) => {
client.added('rtc', id, newMessage);
});
},
The problem is that why after the initialization of getUserMedia does not want to further this videos and what not so I can't understand. Because it is actually a similar code with the usual websockets like this works and syncs fine.
UPD:
When you click on the button and call the function start.
TypeError: Argument 1 of RTCPeerConnection.addStream is not an object
If websockets to put in then then it turns out another error:
Code:
navigator.mediaDevices.getUserMedia( { video:true, audio:true}).then( ( stream )=> {
localStream = stream;
localVideo.src = window.URL.createObjectURL(stream);
}).catch(errorHandler).then(()=>{
if (!this.props.loadingRtc) {
for(let i of this.props.messagesRtc){
let signal = JSON.parse(i.text);
gotMessageFromServer(signal)
}
}
}).catch(errorHandler);
Error:
DOMException [InvalidStateError: "Cannot set remote offer or answer in
current state have-remote-offer" code: 11 nsresult: 0x8053000b]
app.js:11075:9 DOMException [InvalidStateError: "No outstanding offer"
code: 11 nsresult: 0x8053000b]

Categories