Cannot read property 'stop' of undefined error in mediaRecorder.stop(); - javascript

The fact is that before the code worked normally and it worked like this: when you click on the button (start), access was requested to (webcam and microphone) after a successful pass, video recording started now it does not work This script records a video and transmits it via ajax
The place where the error is marked with two ERROR comments
$(document).ready(function(){
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
// This code is adapted from
// https://rawgit.com/Miguelao/demos/master/mediarecorder.html
'use strict';
/* globals MediaRecorder */
let mediaRecorder;
let recordedBlobs;
// const errorMsgElement = document.querySelector('span#errorMsg');
const recordedVideo = document.querySelector('video#recorded');
const recordButton = document.querySelector('#record');
recordButton.addEventListener('click', () => {
document.getElementById("start").click();
if (recordButton.textContent === 'Start Recording') {
startRecording();
$('#start').css("display","none");
// setTimeout(stopRecording, 50000);
} else {
stopRecording();
recordButton.textContent = 'Start Recording';
playButton.disabled = false;
downloadButton.disabled = false;
}
});
const playButton = document.querySelector('button#play');
playButton.addEventListener('click', () => {
const superBuffer = new Blob(recordedBlobs, {type: 'video/webm'});
recordedVideo.src = null;
recordedVideo.srcObject = null;
recordedVideo.src = window.URL.createObjectURL(superBuffer);
recordedVideo.controls = true;
recordedVideo.play();
});
const downloadButton = document.querySelector('#download');
downloadButton.addEventListener('click', () => {
const blob = new Blob(recordedBlobs, {type: 'video/webm'});
const url = window.URL.createObjectURL(blob);
const a = document.createElement('a');
a.style.display = 'none';
a.href = url;
a.download = 'test.webm';
document.body.appendChild(a);
a.click();
setTimeout(() => {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
location.reload();
});
function handleDataAvailable(event) {
console.log('handleDataAvailable', event);
if (event.data && event.data.size > 0) {
recordedBlobs.push(event.data);
}
}
function startRecording() {
recordedBlobs = [];
let options = {mimeType: 'video/webm;codecs=vp9,opus'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.error(`${options.mimeType} is not supported`);
options = {mimeType: 'video/webm;codecs=vp8,opus'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.error(`${options.mimeType} is not supported`);
options = {mimeType: 'video/webm'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.error(`${options.mimeType} is not supported`);
options = {mimeType: ''};
}
}
}
try {
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e) {
console.error('Exception while creating MediaRecorder:', e);
errorMsgElement.innerHTML = `Exception while creating MediaRecorder: ${JSON.stringify(e)}`;
return;
}
console.log('Created MediaRecorder', mediaRecorder, 'with options', options);
recordButton.textContent = 'Stop Recording';
playButton.disabled = true;
downloadButton.disabled = true;
mediaRecorder.onstop = (event) => {
console.log('Recorder stopped: ', event);
console.log('Recorded Blobs: ', recordedBlobs);
};
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start();
console.log('MediaRecorder started', mediaRecorder);
}
function stopRecording() {
//ERROR
mediaRecorder.stop();
//ERROR
//document.getElementById("download").click();
$('#record').css( "display", "none" );
$('#download').attr( "style", '');
}
function handleSuccess(stream) {
recordButton.disabled = false;
console.log('getUserMedia() got stream:', stream);
window.stream = stream;
const gumVideo = document.querySelector('video#gum');
gumVideo.srcObject = stream;
}
async function init(constraints) {
try {
const stream = await navigator.mediaDevices.getUserMedia(constraints);
handleSuccess(stream);
} catch (e) {
console.error('navigator.getUserMedia error:', e);
// errorMsgElement.innerHTML = `navigator.getUserMedia error:${e.toString()}`;
}
}
document.querySelector('#start').addEventListener('click', async () => {
$('#main_img').css('display', 'none');
$('#gum').attr('style', '');
//const hasEchoCancellation = document.querySelector('#echoCancellation').checked;
const constraints = {
//audio: {
//echoCancellation: {exact: hasEchoCancellation}
//},
video: {
width: 1280, height: 720
}
};
console.log('Using media constraints:', constraints);
await init(constraints);
});
});

Related

Screen share to the connected peer with webRTC

I am exploring webRTC and trying to build a zoom alike web application. I have successfully sharing screen and showing it my device but I could show the shared screen to the peer networks. I want to share my screen with everyone who are connected to the same room. I don't know how to do that. Can anyone help me with the issue?
Below is the JavaScript code:
Script.js
const socket = io('/')
const videoGrid = document.getElementById('video-grid')
const myPeer = new Peer(undefined, {
host: '/',
port: '8000'
})
const myVideo = document.createElement('video')
myVideo.muted = true
const peers = {}
let videoStream
navigator.mediaDevices.getUserMedia({
video: true,
audio: true
}).then(stream => {
videoStream = stream
addVideoStream(myVideo, stream)
myPeer.on('call', call => {
call.answer(stream)
const video = document.createElement('video')
call.on('stream', userVideoStream => {
addVideoStream(video, userVideoStream)
})
})
socket.on('user-connected', userId => {
connectToNewUser(userId, stream)
})
socket.on('user-disconnected', userId => {
if (peers[userId]) peers[userId].close()
})
})
myPeer.on('open', id => {
socket.emit('join-room', ROOM_ID, id)
})
function connectToNewUser(userId, stream) {
const call = myPeer.call(userId, stream)
const video = document.createElement('video')
call.on('stream', userVideoStream => {
addVideoStream(video, userVideoStream)
})
call.on('close', () => {
video.remove()
})
peers[userId] = call
}
function addVideoStream(video, stream) {
video.srcObject = stream
video.addEventListener('loadedmetadata', () => {
video.play()
})
videoGrid.append(video)
}
//====================================== Front-end styling logics =================================
const audio = document.getElementById('audio')
const audio_mute = document.getElementById('audio-mute')
const video = document.getElementById('video')
const video_mute = document.getElementById('video-mute')
const screen_share = document.getElementById('screen-share')
const record = document.getElementById('record')
const record_stop = document.getElementById('record-stop')
const leave_btn = document.getElementById('leave-btn')
const message_view_box = document.getElementById('message-view-box')
audio.addEventListener('click', function () {
const track = videoStream.getAudioTracks()[0].enabled
console.log(videoStream.getAudioTracks());
if (track) {
videoStream.getAudioTracks()[0].enabled = false
}
else {
videoStream.getAudioTracks()[0].enabled = true
}
audio.style.display = 'none';
audio_mute.style.display = 'inline-block';
})
audio_mute.addEventListener('click', function () {
const track = videoStream.getAudioTracks()[0].enabled
console.log(videoStream.getAudioTracks());
if (track) {
videoStream.getAudioTracks()[0].enabled = false
}
else {
videoStream.getAudioTracks()[0].enabled = true
}
audio_mute.style.display = 'none';
audio.style.display = 'inline-block';
})
video.addEventListener('click', function () {
const track = videoStream.getVideoTracks()[0].enabled
console.log(videoStream.getVideoTracks()[0].enabled);
if (track) {
videoStream.getVideoTracks()[0].enabled = false
}
else {
videoStream.getVideoTracks()[0].enabled = true
}
video.style.display = 'none';
video_mute.style.display = 'inline-block';
})
video_mute.addEventListener('click', function () {
const track = videoStream.getVideoTracks()[0].enabled
console.log(videoStream.getVideoTracks()[0].enabled);
if (track) {
videoStream.getVideoTracks()[0].enabled = false
}
else {
videoStream.getVideoTracks()[0].enabled = true
}
video_mute.style.display = 'none';
video.style.display = 'inline-block';
})
// ============================= Chat box logics ===============================
let chat_box = document.getElementById('chat_box');
let chat_box_input = document.getElementById('chat_box_input');
let send_icon = document.getElementById('send-icon');
chat_box.addEventListener('submit', function (e) {
e.preventDefault()
// console.log(e.target.chat_box_input.value);
if (chat_box_input.value) {
socket.emit('chat message', chat_box_input.value);
chat_box_input.value = '';
}
// e.target.chat_box_input.value = ''
})
socket.on('chat message', function (msg) {
const item = document.createElement('li');
item.textContent = msg;
message_view_box.appendChild(item);
message_view_box.scrollTop = message_view_box.scrollHeight - message_view_box.clientHeight;
});
// =============================================================================
//================================== Screen share logics =======================
const videoElem = document.getElementById('screen')
screen_share.addEventListener('click',async function () {
startCapture();
})
// Options for getDisplayMedia()
const displayMediaOptions = {
video: {
cursor: "always"
},
audio: true
};
async function startCapture() {
try {
videoElem.srcObject = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
// const videoStreamTrack = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
// call.peerConnection.getSenders()[1].replaceTrack(videoStreamTrack)
dumpOptionsInfo();
} catch (err) {
console.error(`Error: ${err}`);
}
}
function dumpOptionsInfo() {
const videoTrack = videoElem.srcObject.getVideoTracks()[0];
}
//==============================================================================
//==============================================================================
record.addEventListener('click', async function () {
record.style.display = 'none';
record_stop.style.display = 'inline-block';
let stream = await navigator.mediaDevices.getDisplayMedia({
video: true,
audio: true
})
//needed for better browser support
const mime = MediaRecorder.isTypeSupported("video/webm; codecs=vp9")
? "video/webm; codecs=vp9"
: "video/webm"
let mediaRecorder = new MediaRecorder(stream, {
mimeType: mime
})
let chunks = []
mediaRecorder.addEventListener('dataavailable', function(e) {
chunks.push(e.data)
})
mediaRecorder.addEventListener('stop', function(){
let blob = new Blob(chunks, {
type: chunks[0].type
})
let url = URL.createObjectURL(blob)
// let video = document.querySelector("video")
// video.src = url
let a = document.createElement('a')
a.href = url
a.download = 'video.webm'
a.click()
})
//we have to start the recorder manually
mediaRecorder.start()
})
record_stop.addEventListener('click', function () {
record_stop.style.display = 'none';
record.style.display = 'inline-block';
mediaRecorder.stop();
})
//==============================================================================
screen sharing functionality below
//================================== Screen share logics =======================
const videoElem = document.getElementById('screen')
screen_share.addEventListener('click',async function () {
startCapture();
})
// Options for getDisplayMedia()
const displayMediaOptions = {
video: {
cursor: "always"
},
audio: true
};
async function startCapture() {
try {
videoElem.srcObject = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
// const videoStreamTrack = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
// call.peerConnection.getSenders()[1].replaceTrack(videoStreamTrack)
dumpOptionsInfo();
} catch (err) {
console.error(`Error: ${err}`);
}
}
function dumpOptionsInfo() {
const videoTrack = videoElem.srcObject.getVideoTracks()[0];
}
//==============================================================================

MediaRecorder class Not Available in Electron APP

I was following Fireships' Electron tutorial to build a desktop capturer.
One thing I know is, as of now there is a huge difference between the version I used and his.
The only problem I am having is during the instantiation of the MediaRecorder class.
The class is not identified at all.
Is there a way I can fix it?
Render.js - Source Code
// Buttons
const videoElement = document.querySelector('video');
const startBtn = document.getElementById('startBtn');
startBtn.onclick = e => {
mediaRecorder.start();
startBtn.classList.add('is-danger');
startBtn.innerText = 'Recording';
};
const stopBtn = document.getElementById('stopBtn');
stopBtn.onclick = e => {
mediaRecorder.stop();
startBtn.classList.remove('is-danger');
startBtn.innerText = 'Start';
};
const videoSelectBtn = document.getElementById('videoSelectBtn');
videoSelectBtn.onclick = getVideoSources;
const { desktopCapturer, remote } = require('electron');
const { dialog, Menu } = remote;
// Get the available video sources
async function getVideoSources() {
const inputSources = await desktopCapturer.getSources({
types: ['window', 'screen']
});
const videoOptionsMenu = Menu.buildFromTemplate(
inputSources.map(source => {
return {
label: source.name,
click: () => selectSource(source)
};
})
);
videoOptionsMenu.popup();
}
let mediaRecorder; //MediaRecorder instance to capture footage
const recordedChunks = [];
// Change the videoSources window to record
async function selectSource(source) {
videoSelectBtn.innerText = source.name;
const constraints = {
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: source.id
}
}
};
// Create a Stream
const stream = await navigator.mediaDevices.getUserMedia(constraints);
//Preview the source in a video element
videoElement.srcObject = stream;
videoElement.play();
// Create the Media Recorder
const options = { mimeType: 'video/webm; codecs=vp9' };
mediaRecorder = new MediaRecorder(stream, options);
// Register Event Handlers
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.onStop = handleStop;
}
// Captures allrecorded chunks
function handleDataAvailable(e) {
console.log('video data available')
recordedChunks.push(e.data);
}
const { writeFile } = require('fs');
//Saves the video file on stop
async function handleStop(e) {
const blob = new Blob(recordedChunks,{
type: 'video/webm; codecs=vp9'
});
const buffer = Buffer.from(await blob.arrayBuffer());
const { filePath } = await dialog.showSaveDialog({
buttonLabel: 'Save Video',
defaultPath: `vid -${Date.now()}.webm`
});
console.log(filePath);
writeFile(filePath, buffer, () => console.log('Video Saved Successfully!'));
}
Web Preferences - Index.js
const mainWindow = new BrowserWindow({
width: 800,
height: 600,
webPreferences: {
nodeIntegration: true,
contextIsolation: false,
enableRemoteModule: true,
}
Try this in the render.js file, using electron": "10.2.0
const { desktopCapturer, remote, dialog } = require('electron');
const { writeFile } = require('fs');
const { Menu } = remote;
//Buttons
const videoElement = document.querySelector('video');
const startBtn = document.getElementById('startBtn');
const stopBtn = document.getElementById('stopBtn');
const videoSelectBtn = document.getElementById('videoSelectBtn');
videoSelectBtn.onclick = getVideoSources();
//Get all available video sources
async function getVideoSources() {
const inputSources = await desktopCapturer.getSources({
types: ['window', 'screen'],
});
const videoOptionsMenu = Menu.buildFromTemplate(
inputSources.map((source) => {
return {
label: source.name,
click: () => selectSource(source),
};
})
);
videoOptionsMenu.popup();
}
let mediaRecorder; //Mediarecorder instance to capture footage
const recordedChunks = [];
async function selectSource(source) {
videoSelectBtn.innerText = source.name;
const constraints = {
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: source.id,
},
},
};
//Create a stream
const stream = await navigator.mediaDevices.getUserMedia(constraints);
//Preview the source in a video element
videoElement.srcObject = stream;
videoElement.play();
//Create the Media Recorder
const options = { mimeType: 'video/webm; codecs=vp9' };
mediaRecorder = new mediaRecorder(stream, options);
//Register Event Handlers
mediaRecorder.ondataavailable = handleAvailableData;
mediaRecorder.onstop = handleStop;
}
async function handleAvailableData(e) {
console.log('Video data available');
recordedChunks.push(e.data);
}
//Save video on stop
async function handleStop(e) {
const blob = new Blob(recordedChunks, {
type: 'video/webm; codecs=vp9',
});
const buffer = Buffer.from(await blob.arrayBuffer());
const { filePath } = await dialog.showSaveDialog({
buttonLabel: 'Save Video',
defaultPath: `vid-${Date.now()}.webm`
})
console.log(filePath);
writeFile(filePath, buffer, () => console.log('Saved Successfully'))
}

send data file to another computer with RTCPeerConnection?

how can I change the code so that the computer1 code connects to the code of computer 2(computer1 and computer2 are not the same computers but on the same network).
It works locally but not when it's two different computers
computer 1 and computer2 code that is used for the connection is defined below
this is the code that does the networking on computer1
async function createConnection() {
abortButton.disabled = false;
sendFileButton.disabled = true;
localConnection = new RTCPeerConnection();//this is the line I think I need to change
console.log('Created local peer connection object localConnection');
sendChannel = localConnection.createDataChannel('sendDataChannel');
sendChannel.binaryType = 'arraybuffer';
console.log('Created send data channel');
sendChannel.addEventListener('open', onSendChannelStateChange);
sendChannel.addEventListener('close', onSendChannelStateChange);
sendChannel.addEventListener('error', onError);
localConnection.addEventListener('icecandidate', async event => {
console.log('Local ICE candidate: ', event.candidate);
await localConnection.addIceCandidate(event.candidate);
});
this is the part of the program that have the role to receive the request and the file data on the computer2
async function server(){
remoteConnection = new RTCPeerConnection();
alert("start");
console.log('Created remote peer connection object remoteConnection');
remoteConnection.addEventListener('icecandidate', async event => {
console.log('Remote ICE candidate: ', event.candidate);
await localConnection.addIceCandidate(event.candidate);
});
remoteConnection.addEventListener('datachannel', receiveChannelCallback);
}
the code that I modified(main.js)
/* eslint no-unused-expressions: 0 */
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
'use strict';
let localConnection;
let remoteConnection;
let sendChannel;
let receiveChannel;
let fileReader;
const bitrateDiv = document.querySelector('div#bitrate');
const fileInput = document.querySelector('input#fileInput');
const abortButton = document.querySelector('button#abortButton');
const downloadAnchor = document.querySelector('a#download');
const sendProgress = document.querySelector('progress#sendProgress');
const receiveProgress = document.querySelector('progress#receiveProgress');
const statusMessage = document.querySelector('span#status');
const sendFileButton = document.querySelector('button#sendFile');
let receiveBuffer = [];
let receivedSize = 0;
let bytesPrev = 0;
let timestampPrev = 0;
let timestampStart;
let statsInterval = null;
let bitrateMax = 0;
server();
sendFileButton.addEventListener('click', () => createConnection());
fileInput.addEventListener('change', handleFileInputChange, false);
abortButton.addEventListener('click', () => {
if (fileReader && fileReader.readyState === 1) {
console.log('Abort read!');
fileReader.abort();
}
});
async function handleFileInputChange() {
const file = fileInput.files[0];
if (!file) {
console.log('No file chosen');
} else {
sendFileButton.disabled = false;
}
}
async function server(){
//const servers = {
//iceServers: [
// {
// urls: ['stun:stun1.l.google.com:19302', //'stun:stun2.l.google.com:19302'],
// },
//],
//iceCandidatePoolSize: 10,
//};
remoteConnection = new RTCPeerConnection();
alert("start");
console.log('Created remote peer connection object remoteConnection');
remoteConnection.addEventListener('icecandidate', async event => {
console.log('Remote ICE candidate: ', event.candidate);
await localConnection.addIceCandidate(event.candidate);
});
remoteConnection.addEventListener('datachannel', receiveChannelCallback);
}
async function createConnection() {
abortButton.disabled = false;
sendFileButton.disabled = true;
//const servers = {
//iceServers: [
// {
// urls: ['stun:stun1.l.google.com:19302', //'stun:stun2.l.google.com:19302'],
// },
//],
//iceCandidatePoolSize: 10,
//};
localConnection = new RTCPeerConnection();
console.log('Created local peer connection object localConnection');
sendChannel = localConnection.createDataChannel('sendDataChannel');
sendChannel.binaryType = 'arraybuffer';
console.log('Created send data channel');
sendChannel.addEventListener('open', onSendChannelStateChange);
sendChannel.addEventListener('close', onSendChannelStateChange);
sendChannel.addEventListener('error', onError);
localConnection.addEventListener('icecandidate', async event => {
console.log('Local ICE candidate: ', event.candidate);
await localConnection.addIceCandidate(event.candidate);
});
try {
const offer = await localConnection.createOffer();
await gotLocalDescription(offer);
} catch (e) {
console.log('Failed to create session description: ', e);
}
fileInput.disabled = true;
}
function sendData() {
const file = fileInput.files[0];
console.log(`File is ${[file.name, file.size, file.type, file.lastModified].join(' ')}`);
// Handle 0 size files.
statusMessage.textContent = '';
downloadAnchor.textContent = '';
if (file.size === 0) {
bitrateDiv.innerHTML = '';
statusMessage.textContent = 'File is empty, please select a non-empty file';
closeDataChannels();
return;
}
sendProgress.max = file.size;
receiveProgress.max = file.size;
const chunkSize = 16384;
fileReader = new FileReader();
let offset = 0;
fileReader.addEventListener('error', error => console.error('Error reading file:', error));
fileReader.addEventListener('abort', event => console.log('File reading aborted:', event));
fileReader.addEventListener('load', e => {
console.log('FileRead.onload ', e);
sendChannel.send(e.target.result);
offset += e.target.result.byteLength;
sendProgress.value = offset;
if (offset < file.size) {
readSlice(offset);
}
});
const readSlice = o => {
console.log('readSlice ', o);
const slice = file.slice(offset, o + chunkSize);
fileReader.readAsArrayBuffer(slice);
};
readSlice(0);
}
function closeDataChannels() {
console.log('Closing data channels');
sendChannel.close();
console.log(`Closed data channel with label: ${sendChannel.label}`);
sendChannel = null;
if (receiveChannel) {
receiveChannel.close();
console.log(`Closed data channel with label: ${receiveChannel.label}`);
receiveChannel = null;
}
localConnection.close();
remoteConnection.close();
localConnection = null;
remoteConnection = null;
console.log('Closed peer connections');
// re-enable the file select
fileInput.disabled = false;
abortButton.disabled = true;
sendFileButton.disabled = false;
}
async function gotLocalDescription(desc) {
await localConnection.setLocalDescription(desc);
console.log(`Offer from localConnection\n ${desc.sdp}`);
await remoteConnection.setRemoteDescription(desc);
try {
const answer = await remoteConnection.createAnswer();
await gotRemoteDescription(answer);
} catch (e) {
console.log('Failed to create session description: ', e);
}
}
async function gotRemoteDescription(desc) {
await remoteConnection.setLocalDescription(desc);
console.log(`Answer from remoteConnection\n ${desc.sdp}`);
await localConnection.setRemoteDescription(desc);
}
function receiveChannelCallback(event) {
console.log('Receive Channel Callback');
receiveChannel = event.channel;
receiveChannel.binaryType = 'arraybuffer';
receiveChannel.onmessage = onReceiveMessageCallback;
receiveChannel.onopen = onReceiveChannelStateChange;
receiveChannel.onclose = onReceiveChannelStateChange;
receivedSize = 0;
bitrateMax = 0;
downloadAnchor.textContent = '';
downloadAnchor.removeAttribute('download');
if (downloadAnchor.href) {
URL.revokeObjectURL(downloadAnchor.href);
downloadAnchor.removeAttribute('href');
}
}
function onReceiveMessageCallback(event) {
console.log(`Received Message ${event.data.byteLength}`);
receiveBuffer.push(event.data);
receivedSize += event.data.byteLength;
receiveProgress.value = receivedSize;
// we are assuming that our signaling protocol told
// about the expected file size (and name, hash, etc).
const file = fileInput.files[0];
if (receivedSize === file.size) {
const received = new Blob(receiveBuffer);
receiveBuffer = [];
downloadAnchor.href = URL.createObjectURL(received);
downloadAnchor.download = file.name;
downloadAnchor.textContent =
`Click to download '${file.name}' (${file.size} bytes)`;
downloadAnchor.style.display = 'block';
const bitrate = Math.round(receivedSize * 8 /
((new Date()).getTime() - timestampStart));
bitrateDiv.innerHTML =
`<strong>Average Bitrate:</strong> ${bitrate} kbits/sec (max: ${bitrateMax} kbits/sec)`;
if (statsInterval) {
clearInterval(statsInterval);
statsInterval = null;
}
closeDataChannels();
}
}
function onSendChannelStateChange() {
if (sendChannel) {
const {readyState} = sendChannel;
console.log(`Send channel state is: ${readyState}`);
if (readyState === 'open') {
sendData();
}
}
}
function onError(error) {
if (sendChannel) {
console.error('Error in sendChannel:', error);
return;
}
console.log('Error in sendChannel which is already closed:', error);
}
async function onReceiveChannelStateChange() {
if (receiveChannel) {
const readyState = receiveChannel.readyState;
console.log(`Receive channel state is: ${readyState}`);
if (readyState === 'open') {
timestampStart = (new Date()).getTime();
timestampPrev = timestampStart;
statsInterval = setInterval(displayStats, 500);
await displayStats();
}
}
}
// display bitrate statistics.
async function displayStats() {
if (remoteConnection && remoteConnection.iceConnectionState === 'connected') {
const stats = await remoteConnection.getStats();
let activeCandidatePair;
stats.forEach(report => {
if (report.type === 'transport') {
activeCandidatePair = stats.get(report.selectedCandidatePairId);
}
});
if (activeCandidatePair) {
if (timestampPrev === activeCandidatePair.timestamp) {
return;
}
// calculate current bitrate
const bytesNow = activeCandidatePair.bytesReceived;
const bitrate = Math.round((bytesNow - bytesPrev) * 8 /
(activeCandidatePair.timestamp - timestampPrev));
bitrateDiv.innerHTML = `<strong>Current Bitrate:</strong> ${bitrate} kbits/sec`;
timestampPrev = activeCandidatePair.timestamp;
bytesPrev = bytesNow;
if (bitrate > bitrateMax) {
bitrateMax = bitrate;
}
}
}
}
Thanks for helping
You can read about peer connections where every peer connection is handled by the RTCPeerConnection object and it defines how the peer connection is set up and how it should include the information about the ICE servers to use.
You can define the ICE servers as following:
localConnection = new RTCPeerConnection({iceServers: [{ url: "stun:"+ ip +":8003" }]})
or
localConnection = new RTCPeerConnection({iceServers: [{ url: + ip + ':port' }]})
or
var servers = {
iceTransportPolicy: 'relay',
iceServers: [{
urls: "turn:[" + ip + "]:3478",
username: "username",
credential: "password"
}
]};
var localConnection = new RTCPeerConnection(servers);
Moreover, You can find the full code here or
here under the folder name filetransfer .

MediaRecorder only record one frame

I'm developing a react native app and there I'm recording a canvas and make 5 seconds video files to upload to the server. Everything works great except all my webm files have only one frame. Here is my code. Please help me to understand what's wrong here. Thanks!
initMediaRecorder = () => {
const promise = new Promise((resolve) => {
const stream = this.selfieCanvas.captureStream(10);
let mediaRecorder = null;
let options;
if (MediaRecorder.isTypeSupported('video/webm; codecs=vp9')) {
options = { mimeType: 'video/webm; codecs=vp9', videoBitsPerSecond: 2500000 };
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=vp8')) {
options = { mimeType: 'video/webm; codecs=vp8', videoBitsPerSecond: 2500000 };
} else {
options = 'video/vp8'; // Chrome 47
}
try {
mediaRecorder = new MediaRecorder(stream, options);
} catch (e0) {
resolve(null);
}
mediaRecorder.ondataavailable = (event) => {
console.log(`LOG - Data available ${event.data.size}`);
this.sendToSaveVideo(event.data);
};
resolve(mediaRecorder);
});
return promise;
}
captureVideo = async (oldMediaRecorder) => {
this.initMediaRecorder().then((mediaRecorder) => {
if (oldMediaRecorder !== null && typeof oldMediaRecorder !== 'undefined') {
// I don't want to stop previous recorder until I init the next recorder
oldMediaRecorder.stop();
}
if (mediaRecorder !== null) {
mediaRecorder.start();
}
this.captureVideoTimer = setTimeout(() => {
this.captureVideo(mediaRecorder);
}, 5000);
});
}
sendToSaveVideo = async (eventData) => {
const blobChunk = [];
blobChunk.push(eventData);
const video = new Blob(blobChunk, { type: 'video/webm' });
saveBlobToCloud(video); // save the file to cloud
}```
You are not setting up start(), this (probably) makes ondataavailable run every frame.
Also try avoiding using ondataavailable like that, onstop exists exactly for that purpose.
If this doesn't work try checking if the canvas is actually changing the frames.
initMediaRecorder = () => {
const promise = new Promise((resolve) => {
const stream = this.selfieCanvas.captureStream(10);
let chunks = [];
let mediaRecorder = null;
let options;
if (MediaRecorder.isTypeSupported('video/webm; codecs=vp9')) {
options = { mimeType: 'video/webm; codecs=vp9', videoBitsPerSecond: 2500000 };
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=vp8')) {
options = { mimeType: 'video/webm; codecs=vp8', videoBitsPerSecond: 2500000 };
} else {
options = 'video/vp8'; // Chrome 47
}
try {
mediaRecorder = new MediaRecorder(stream, options);
} catch (e0) {
resolve(null);
}
mediaRecorder.ondataavailable = (event) => {
chunks.push(event.data);
};
mediaRecorder.onstop = (event) => {
this.sendToSaveVideo(chunks);
};
resolve(mediaRecorder);
});
return promise;
}
captureVideo = async (oldMediaRecorder) => {
this.initMediaRecorder().then((mediaRecorder) => {
if (oldMediaRecorder !== null && typeof oldMediaRecorder !== 'undefined') {
// I don't want to stop previous recorder until I init the next recorder
oldMediaRecorder.stop();
}
if (mediaRecorder !== null) {
// make ondataavailable run every second.
// ondataavailable should not be used as a stop!
mediaRecorder.start(1000);
}
this.captureVideoTimer = setTimeout(() => {
this.captureVideo(mediaRecorder);
}, 5000);
});
}
sendToSaveVideo = async (chuncks) => {
const video = new Blob(chunks, { type: 'video/webm' });
saveBlobToCloud(video); // save the file to cloud
}
Edit
Also you do not need to declare the mediarecorder every single time...
Something like this would be better:
const stream = selfieCanvas.captureStream(10);
let mediaRecorder = null;
let options;
if (MediaRecorder.isTypeSupported('video/webm; codecs=vp9')) {
options = { mimeType: 'video/webm; codecs=vp9', videoBitsPerSecond: 2500000 };
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=vp8')) {
options = { mimeType: 'video/webm; codecs=vp8', videoBitsPerSecond: 2500000 };
} else {
options = 'video/vp8'; // Chrome 47
}
try {
mediaRecorder = new MediaRecorder(stream, options);
} catch (e0) {
resolve(null);
}
mediaRecorder.ondataavailable = (event) => {
chunks.push(event.data);
};
mediaRecorder.onstop = (event) => {
const video = new Blob(chunks, { type: 'video/webm' });
saveBlobToCloud(video);
chunks = [];
};
// makes ondataavailable run every 5 seconds
mediaRecorder.start(1000);
// a video is made for every 5 seconds
setInterval(function(){
mediaRecorder.stop();
// ondataavailable should be running more often than stop
mediaRecorder.start(1000);
}, 5000);
Here are some other useful links:
https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/dataavailable_event
MediaRecorder ondataavailable work successfully once

Original audio of tab gets muted while using chrome.tabCapture.capture() and MediaRecorder()

when i use chrome.tabCapture.capture() with MediaRecorder API to record stream original audio of tabs which i am capturing gets muted but the audio comes OK in recorded stream, i want the audio in the tab to run normally ....
class Recorder {
constructor(onChunksAvailable) {
this.chunks = [];
this.active = false;
this.callback = onChunksAvailable;
}
start(stream) {
if (this.active) {
throw new Error("recorder is already running");
}
this.recorder = new MediaRecorder(stream, {
mimeType: "audio/webm",
});
this.recorder.onstop = () => {
stream.getAudioTracks()[0].stop();
this.callback([...this.chunks]);
setTimeout(() => {
this.chunks = [];
});
this.active = false;
};
this.recorder.ondataavailable = (event) => this.chunks.push(event.data);
this.active = true;
this.recorder.start();
}
stop() {
if (!this.active) {
throw new Error("recorder is already stop");
} else {
this.recorder.stop();
}
}
}
let rec = new Recorder(async (chunks) => {
//using chunks then to get the stream
});
chrome.tabCapture.capture(
{
audio: true,
video: false,
},
function (stream) {
rec.start(stream);
}
Forgive me for lack of documentation as I last played with these APIs years ago, but MDN has some stuff.
In my case adding these 3 lines to the start function was fixed.
this.context = new AudioContext();
this.stream = this.context.createMediaStreamSource(stream);
this.stream.connect(this.context.destination);
class Recorder {
constructor(onChunksAvailable) {
this.chunks = [];
this.active = false;
this.callback = onChunksAvailable;
this.context = new AudioContext();
}
start(stream) {
if (this.active) {
throw new Error("recorder is already running");
}
// Reconnect the stream to actual output
this.stream = this.context.createMediaStreamSource(stream);
this.stream.connect(this.context.destination);
this.recorder = new MediaRecorder(stream, {
mimeType: "audio/webm",
});
this.recorder.onstop = () => {
stream.getAudioTracks()[0].stop();
this.callback([...this.chunks]);
setTimeout(() => {
this.chunks = [];
});
this.active = false;
};
this.recorder.ondataavailable = (event) => this.chunks.push(event.data);
this.active = true;
this.recorder.start();
}
stop() {
if (!this.active) {
throw new Error("recorder is already stop");
} else {
this.recorder.stop();
}
}
}
let rec = new Recorder(async (chunks) => {
//using chunks then to get the stream
});
chrome.tabCapture.capture(
{
audio: true,
video: false,
},
function (stream) {
rec.start(stream);
})
Sorry for lack of details, but I believe when you start an audio capture it disconnects the stream from the default output (speakers). By creating a secondary MediaStreamSource and connecting it to the default output (AudioContext.destination) you can allow the stream to continue outputting to speakers while being input to your recorder.
Sources
MDN: AudioContext
MDN: MediaStreamSource
Chrome extension I made 2 years ago

Categories