MediaRecorder class Not Available in Electron APP - javascript

I was following Fireships' Electron tutorial to build a desktop capturer.
One thing I know is, as of now there is a huge difference between the version I used and his.
The only problem I am having is during the instantiation of the MediaRecorder class.
The class is not identified at all.
Is there a way I can fix it?
Render.js - Source Code
// Buttons
const videoElement = document.querySelector('video');
const startBtn = document.getElementById('startBtn');
startBtn.onclick = e => {
mediaRecorder.start();
startBtn.classList.add('is-danger');
startBtn.innerText = 'Recording';
};
const stopBtn = document.getElementById('stopBtn');
stopBtn.onclick = e => {
mediaRecorder.stop();
startBtn.classList.remove('is-danger');
startBtn.innerText = 'Start';
};
const videoSelectBtn = document.getElementById('videoSelectBtn');
videoSelectBtn.onclick = getVideoSources;
const { desktopCapturer, remote } = require('electron');
const { dialog, Menu } = remote;
// Get the available video sources
async function getVideoSources() {
const inputSources = await desktopCapturer.getSources({
types: ['window', 'screen']
});
const videoOptionsMenu = Menu.buildFromTemplate(
inputSources.map(source => {
return {
label: source.name,
click: () => selectSource(source)
};
})
);
videoOptionsMenu.popup();
}
let mediaRecorder; //MediaRecorder instance to capture footage
const recordedChunks = [];
// Change the videoSources window to record
async function selectSource(source) {
videoSelectBtn.innerText = source.name;
const constraints = {
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: source.id
}
}
};
// Create a Stream
const stream = await navigator.mediaDevices.getUserMedia(constraints);
//Preview the source in a video element
videoElement.srcObject = stream;
videoElement.play();
// Create the Media Recorder
const options = { mimeType: 'video/webm; codecs=vp9' };
mediaRecorder = new MediaRecorder(stream, options);
// Register Event Handlers
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.onStop = handleStop;
}
// Captures allrecorded chunks
function handleDataAvailable(e) {
console.log('video data available')
recordedChunks.push(e.data);
}
const { writeFile } = require('fs');
//Saves the video file on stop
async function handleStop(e) {
const blob = new Blob(recordedChunks,{
type: 'video/webm; codecs=vp9'
});
const buffer = Buffer.from(await blob.arrayBuffer());
const { filePath } = await dialog.showSaveDialog({
buttonLabel: 'Save Video',
defaultPath: `vid -${Date.now()}.webm`
});
console.log(filePath);
writeFile(filePath, buffer, () => console.log('Video Saved Successfully!'));
}
Web Preferences - Index.js
const mainWindow = new BrowserWindow({
width: 800,
height: 600,
webPreferences: {
nodeIntegration: true,
contextIsolation: false,
enableRemoteModule: true,
}

Try this in the render.js file, using electron": "10.2.0
const { desktopCapturer, remote, dialog } = require('electron');
const { writeFile } = require('fs');
const { Menu } = remote;
//Buttons
const videoElement = document.querySelector('video');
const startBtn = document.getElementById('startBtn');
const stopBtn = document.getElementById('stopBtn');
const videoSelectBtn = document.getElementById('videoSelectBtn');
videoSelectBtn.onclick = getVideoSources();
//Get all available video sources
async function getVideoSources() {
const inputSources = await desktopCapturer.getSources({
types: ['window', 'screen'],
});
const videoOptionsMenu = Menu.buildFromTemplate(
inputSources.map((source) => {
return {
label: source.name,
click: () => selectSource(source),
};
})
);
videoOptionsMenu.popup();
}
let mediaRecorder; //Mediarecorder instance to capture footage
const recordedChunks = [];
async function selectSource(source) {
videoSelectBtn.innerText = source.name;
const constraints = {
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: source.id,
},
},
};
//Create a stream
const stream = await navigator.mediaDevices.getUserMedia(constraints);
//Preview the source in a video element
videoElement.srcObject = stream;
videoElement.play();
//Create the Media Recorder
const options = { mimeType: 'video/webm; codecs=vp9' };
mediaRecorder = new mediaRecorder(stream, options);
//Register Event Handlers
mediaRecorder.ondataavailable = handleAvailableData;
mediaRecorder.onstop = handleStop;
}
async function handleAvailableData(e) {
console.log('Video data available');
recordedChunks.push(e.data);
}
//Save video on stop
async function handleStop(e) {
const blob = new Blob(recordedChunks, {
type: 'video/webm; codecs=vp9',
});
const buffer = Buffer.from(await blob.arrayBuffer());
const { filePath } = await dialog.showSaveDialog({
buttonLabel: 'Save Video',
defaultPath: `vid-${Date.now()}.webm`
})
console.log(filePath);
writeFile(filePath, buffer, () => console.log('Saved Successfully'))
}

Related

Screen share to the connected peer with webRTC

I am exploring webRTC and trying to build a zoom alike web application. I have successfully sharing screen and showing it my device but I could show the shared screen to the peer networks. I want to share my screen with everyone who are connected to the same room. I don't know how to do that. Can anyone help me with the issue?
Below is the JavaScript code:
Script.js
const socket = io('/')
const videoGrid = document.getElementById('video-grid')
const myPeer = new Peer(undefined, {
host: '/',
port: '8000'
})
const myVideo = document.createElement('video')
myVideo.muted = true
const peers = {}
let videoStream
navigator.mediaDevices.getUserMedia({
video: true,
audio: true
}).then(stream => {
videoStream = stream
addVideoStream(myVideo, stream)
myPeer.on('call', call => {
call.answer(stream)
const video = document.createElement('video')
call.on('stream', userVideoStream => {
addVideoStream(video, userVideoStream)
})
})
socket.on('user-connected', userId => {
connectToNewUser(userId, stream)
})
socket.on('user-disconnected', userId => {
if (peers[userId]) peers[userId].close()
})
})
myPeer.on('open', id => {
socket.emit('join-room', ROOM_ID, id)
})
function connectToNewUser(userId, stream) {
const call = myPeer.call(userId, stream)
const video = document.createElement('video')
call.on('stream', userVideoStream => {
addVideoStream(video, userVideoStream)
})
call.on('close', () => {
video.remove()
})
peers[userId] = call
}
function addVideoStream(video, stream) {
video.srcObject = stream
video.addEventListener('loadedmetadata', () => {
video.play()
})
videoGrid.append(video)
}
//====================================== Front-end styling logics =================================
const audio = document.getElementById('audio')
const audio_mute = document.getElementById('audio-mute')
const video = document.getElementById('video')
const video_mute = document.getElementById('video-mute')
const screen_share = document.getElementById('screen-share')
const record = document.getElementById('record')
const record_stop = document.getElementById('record-stop')
const leave_btn = document.getElementById('leave-btn')
const message_view_box = document.getElementById('message-view-box')
audio.addEventListener('click', function () {
const track = videoStream.getAudioTracks()[0].enabled
console.log(videoStream.getAudioTracks());
if (track) {
videoStream.getAudioTracks()[0].enabled = false
}
else {
videoStream.getAudioTracks()[0].enabled = true
}
audio.style.display = 'none';
audio_mute.style.display = 'inline-block';
})
audio_mute.addEventListener('click', function () {
const track = videoStream.getAudioTracks()[0].enabled
console.log(videoStream.getAudioTracks());
if (track) {
videoStream.getAudioTracks()[0].enabled = false
}
else {
videoStream.getAudioTracks()[0].enabled = true
}
audio_mute.style.display = 'none';
audio.style.display = 'inline-block';
})
video.addEventListener('click', function () {
const track = videoStream.getVideoTracks()[0].enabled
console.log(videoStream.getVideoTracks()[0].enabled);
if (track) {
videoStream.getVideoTracks()[0].enabled = false
}
else {
videoStream.getVideoTracks()[0].enabled = true
}
video.style.display = 'none';
video_mute.style.display = 'inline-block';
})
video_mute.addEventListener('click', function () {
const track = videoStream.getVideoTracks()[0].enabled
console.log(videoStream.getVideoTracks()[0].enabled);
if (track) {
videoStream.getVideoTracks()[0].enabled = false
}
else {
videoStream.getVideoTracks()[0].enabled = true
}
video_mute.style.display = 'none';
video.style.display = 'inline-block';
})
// ============================= Chat box logics ===============================
let chat_box = document.getElementById('chat_box');
let chat_box_input = document.getElementById('chat_box_input');
let send_icon = document.getElementById('send-icon');
chat_box.addEventListener('submit', function (e) {
e.preventDefault()
// console.log(e.target.chat_box_input.value);
if (chat_box_input.value) {
socket.emit('chat message', chat_box_input.value);
chat_box_input.value = '';
}
// e.target.chat_box_input.value = ''
})
socket.on('chat message', function (msg) {
const item = document.createElement('li');
item.textContent = msg;
message_view_box.appendChild(item);
message_view_box.scrollTop = message_view_box.scrollHeight - message_view_box.clientHeight;
});
// =============================================================================
//================================== Screen share logics =======================
const videoElem = document.getElementById('screen')
screen_share.addEventListener('click',async function () {
startCapture();
})
// Options for getDisplayMedia()
const displayMediaOptions = {
video: {
cursor: "always"
},
audio: true
};
async function startCapture() {
try {
videoElem.srcObject = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
// const videoStreamTrack = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
// call.peerConnection.getSenders()[1].replaceTrack(videoStreamTrack)
dumpOptionsInfo();
} catch (err) {
console.error(`Error: ${err}`);
}
}
function dumpOptionsInfo() {
const videoTrack = videoElem.srcObject.getVideoTracks()[0];
}
//==============================================================================
//==============================================================================
record.addEventListener('click', async function () {
record.style.display = 'none';
record_stop.style.display = 'inline-block';
let stream = await navigator.mediaDevices.getDisplayMedia({
video: true,
audio: true
})
//needed for better browser support
const mime = MediaRecorder.isTypeSupported("video/webm; codecs=vp9")
? "video/webm; codecs=vp9"
: "video/webm"
let mediaRecorder = new MediaRecorder(stream, {
mimeType: mime
})
let chunks = []
mediaRecorder.addEventListener('dataavailable', function(e) {
chunks.push(e.data)
})
mediaRecorder.addEventListener('stop', function(){
let blob = new Blob(chunks, {
type: chunks[0].type
})
let url = URL.createObjectURL(blob)
// let video = document.querySelector("video")
// video.src = url
let a = document.createElement('a')
a.href = url
a.download = 'video.webm'
a.click()
})
//we have to start the recorder manually
mediaRecorder.start()
})
record_stop.addEventListener('click', function () {
record_stop.style.display = 'none';
record.style.display = 'inline-block';
mediaRecorder.stop();
})
//==============================================================================
screen sharing functionality below
//================================== Screen share logics =======================
const videoElem = document.getElementById('screen')
screen_share.addEventListener('click',async function () {
startCapture();
})
// Options for getDisplayMedia()
const displayMediaOptions = {
video: {
cursor: "always"
},
audio: true
};
async function startCapture() {
try {
videoElem.srcObject = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
// const videoStreamTrack = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
// call.peerConnection.getSenders()[1].replaceTrack(videoStreamTrack)
dumpOptionsInfo();
} catch (err) {
console.error(`Error: ${err}`);
}
}
function dumpOptionsInfo() {
const videoTrack = videoElem.srcObject.getVideoTracks()[0];
}
//==============================================================================

How do you initialize a WebRTC call using the transceiver API but only enable audio&video later after signaling completed?

I am trying to first connect two WebRTC peers. Once the connection is established I want to give the users on both sides the option to enable/disable video and audio. This should happen without triggering the signaling process again.
I do run into an issue though: If I call replaceTrack(audioTack) the remote peer will not playback audio until I also call replaceTrack(video).
I am unsure why this happen and can not find any clue in the documentation. It does play fine after 10 seconds once I also attach the video track. Without video track there is no audio playback. Why?
function createVideoElement() {
const vid = document.createElement("video")
vid.width = 320;
vid.controls = true;
vid.autoplay = true;
const root = document.body;
document.body.appendChild(vid);
return vid;
}
async function RunTestInit() {
console.log("get media access");
const p1_stream_out = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});
const p2_stream_out = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});
console.log("stream setup");
const p1_stream_in = new MediaStream();
const p2_stream_in = new MediaStream();
const p1_video_in = createVideoElement();
const p2_video_in = createVideoElement();
console.log("peer setup");
const p1 = new RTCPeerConnection();
const p2 = new RTCPeerConnection();
const p1_tca = p1.addTransceiver("audio", {
direction: "sendrecv"
});
const p1_tcv = p1.addTransceiver("video", {
direction: "sendrecv"
});
p1.onicecandidate = (ev) => {
p2.addIceCandidate(ev.candidate);
}
p2.onicecandidate = (ev) => {
p1.addIceCandidate(ev.candidate);
}
p1.onconnectionstatechange = (ev) => {
console.log("p1 state: ", p1.connectionState);
}
p2.onconnectionstatechange = async (ev) => {
console.log("p2 state: ", p2.connectionState);
}
p1.onnegotiationneeded = () => {
//triggers once
console.warn("p1.onnegotiationneeded");
}
p2.onnegotiationneeded = () => {
//should never trigger
console.warn("p2.onnegotiationneeded");
}
p1.ontrack = (ev) => {
console.log("p1.ontrack", ev);
p1_stream_in.addTrack(ev.track);
p1_video_in.srcObject = p1_stream_in;
}
p2.ontrack = (ev) => {
console.log("p2.ontrack", ev);
p2_stream_in.addTrack(ev.track);
p2_video_in.srcObject = p2_stream_in;
}
console.log("signaling");
const offer = await p1.createOffer();
await p1.setLocalDescription(offer);
await p2.setRemoteDescription(offer);
const p2_tca = p2.getTransceivers()[0];
const p2_tcv = p2.getTransceivers()[1];
p2_tca.direction = "sendrecv"
p2_tcv.direction = "sendrecv"
const answer = await p2.createAnswer();
await p2.setLocalDescription(answer);
await p1.setRemoteDescription(answer);
console.log("signaling done");
//send audio from p2 to p1 (direction doesn't matter)
//after this runs nothing will happen and no audio plays
setTimeout(async () => {
await p2_tca.sender.replaceTrack(p2_stream_out.getAudioTracks()[0]);
console.warn("audio playback should start now but nothing happens");
}, 1000);
//audio starts playing once this runs
setTimeout(async () => {
//uncomment this and it works just fine
await p2_tcv.sender.replaceTrack(p2_stream_out.getVideoTracks()[0]);
console.warn("now audio playback starts");
}, 10000);
}
function start() {
setTimeout(async () => {
console.log("Init test case");
await RunTestInit();
}, 1);
}
Same example in the js fiddle (needs camera and microphone access):
https://jsfiddle.net/vnztcx5p/5/
Once audio works this will cause an echo.
that is a known issue. https://bugs.chromium.org/p/chromium/issues/detail?id=813243 and https://bugs.chromium.org/p/chromium/issues/detail?id=403710 have some background information.
In a nutshell the video element expect you to send audio and video data and these need to be synchronized. But you don't send any video data and the element needs to fire a loadedmetadata and resize event because that is what the specification says. Hence it will block audio indefinitely
You can enable/disable audio and video tracks, so you dont have to renegotiate. Note that this tracks have to be added before negotiation starts. You can achieve it with:
mediaStream.getAudioTracks()[0].enabled = false; // or true to enable it.
Or if you want to disable video:
mediaStream.getVideoTracks()[0].enabled = false; // or true to enable it.
Here is the documentation
getAudioTracks()
getVideoTracks()
I got this working. It looks like more a problem with how HTMLVideoElement works rather than WebRTC.
If I set
p1_video_in.srcObject = p1_stream_in;
p2_video_in.srcObject = p2_stream_in;
before I add the tracks to the stream it works.
Complete example looks like this:
function createVideoElement() {
const vid = document.createElement("video")
vid.width = 320;
vid.controls = true;
vid.autoplay = true;
const root = document.body;
document.body.appendChild(vid);
return vid;
}
async function RunTestInit() {
console.log("get media access");
const p1_stream_out = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});
const p2_stream_out = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});
console.log("stream setup");
const p1_stream_in = new MediaStream();
const p2_stream_in = new MediaStream();
const p1_video_in = createVideoElement();
const p2_video_in = createVideoElement();
p1_video_in.srcObject = p1_stream_in;
p2_video_in.srcObject = p2_stream_in;
console.log("peer setup");
const p1 = new RTCPeerConnection();
const p2 = new RTCPeerConnection();
const p1_tca = p1.addTransceiver("audio", {
direction: "sendrecv"
});
const p1_tcv = p1.addTransceiver("video", {
direction: "sendrecv"
});
p1.onicecandidate = (ev) => {
p2.addIceCandidate(ev.candidate);
}
p2.onicecandidate = (ev) => {
p1.addIceCandidate(ev.candidate);
}
p1.onconnectionstatechange = (ev) => {
console.log("p1 state: ", p1.connectionState);
}
p2.onconnectionstatechange = async (ev) => {
console.log("p2 state: ", p2.connectionState);
}
p1.onnegotiationneeded = () => {
//triggers once
console.warn("p1.onnegotiationneeded");
}
p2.onnegotiationneeded = () => {
//should never trigger
console.warn("p2.onnegotiationneeded");
}
p1.ontrack = (ev) => {
console.log("p1.ontrack", ev);
p1_stream_in.addTrack(ev.track);
}
p2.ontrack = (ev) => {
console.log("p2.ontrack", ev);
p2_stream_in.addTrack(ev.track);
}
console.log("signaling");
const offer = await p1.createOffer();
await p1.setLocalDescription(offer);
await p2.setRemoteDescription(offer);
const p2_tca = p2.getTransceivers()[0];
const p2_tcv = p2.getTransceivers()[1];
p2_tca.direction = "sendrecv"
p2_tcv.direction = "sendrecv"
const answer = await p2.createAnswer();
await p2.setLocalDescription(answer);
await p1.setRemoteDescription(answer);
console.log("signaling done");
//send audio from p2 to p1 (direction doesn't matter)
//after this runs nothing will happen and no audio plays
setTimeout(async () => {
await p2_tca.sender.replaceTrack(p2_stream_out.getAudioTracks()[0]);
console.warn("audio playback should start now but nothing happens");
}, 1000);
//audio starts playing once this runs
setTimeout(async () => {
//uncomment this and it works just fine
await p2_tcv.sender.replaceTrack(p2_stream_out.getVideoTracks()[0]);
console.warn("now audio playback starts");
}, 10000);
}
function start() {
setTimeout(async () => {
console.log("Init test case");
await RunTestInit();
}, 1);
}

MediaRecorder: Record from Microphones and Play audio from Element (music playback)

I like to record my voice with some mp3 music that I choose from the video tag, When I change the src audio element, the new music doesn't record, MediaRecorder just records the first music, so I like to record all music coming from the video tag src.
let constraintObj = {
audio: 1,
video: 0}
navigator.mediaDevices
.getUserMedia(constraintObj)
.then((mediaStreamObj) => {
let start = document.getElementById("btnStart");
let stop = document.getElementById("btnStop");
let autostop = document.getElementById("autobtnStop");
let playAudio = document.getElementById("pma");
var playAudioStream = playAudio.captureStream();
let audioContext = new AudioContext();
const acsource = audioContext.createMediaElementSource(playAudio);
acsource && acsource.connect(audioContext.destination);
let dest = audioContext.createMediaStreamDestination();
let audioIn_01 = audioContext.createMediaStreamSource(mediaStreamObj);
let audioIn_02 = audioContext.createMediaStreamSource(playAudioStream);
audioIn_01.connect(dest);
audioIn_02 && audioIn_02.connect(dest);
let mediaRecorder = new MediaRecorder(dest.stream);
let chunks = [];
start.addEventListener("click", () => {
mediaRecorder.start();
console.log(mediaRecorder.state);
});
stop.addEventListener("click", () => {
mediaRecorder.stop();
});
autostop.addEventListener("click", () => {
if (mediaRecorder.state !== "inactive") {
mediaRecorder.stop();
}
});
mediaRecorder.ondataavailable = (ev) => {
chunks.push(ev.data);
if (selectedEpisodes !== null && sameDate <= 0) {
let blobFile = new Blob(chunks, { type: "audio/ogg; codecs=opus" });
chunks = [];
const datas = {
name,
episode_id,
record_datetime: record_datetime,
playtime_sec: playtime_sec,
files: blobFile,
};
dispatch(editEpisodeStatus(epSelected_ID, { status: "finished" }));
dispatch(createPodcast(datas));
}
};
mediaRecorder.onstop = () => {
let blobFile = new Blob(chunks, { type: "audio/ogg; codecs=opus" });
chunks = [];
const datas = {
name,
episode_id,
record_datetime: record_datetime,
playtime_sec: playtime_sec,
files: blobFile
};
dispatch(editEpisodeStatus(epSelected_ID, { status: "finished" }));
dispatch(createPodcast(datas));
};
})
.catch((err) => {
console.log("catch startLive", err.name, err.message);
});
Hi, I like to record my voice with some mp3 music that I choose from the video tag, When I change the src audio element, the new music doesn't record, MediaRecorder just records the first music, so I like to record all music coming from the video tag src.

Uncaught (in promise) TypeError: Cannot read property 'buildFromTemplate' of undefined at HTMLButtonElement.getVideoSources

i follow along with the tutorial from firespace which is a electron tutorial and i get this massage saying that was an error even though the tutorial doesn't
Uncaught (in promise) TypeError: Cannot read property 'buildFromTemplate' of undefined
at HTMLButtonElement.getVideoSources
const { writeFile } = require("fs");
const dialog = remote;
const Menu = remote;
// Global state
let mediaRecorder; // MediaRecorder instance to capture footage
const recordedChunks = [];
// Buttons
const videoElement = document.querySelector("video");
const startBtn = document.getElementById("startBtn");
startBtn.onclick = (e) => {
mediaRecorder.start();
startBtn.classList.add("is-danger");
startBtn.innerText = "Recording";
};
const stopBtn = document.getElementById("stopBtn");
stopBtn.onclick = (e) => {
mediaRecorder.stop();
startBtn.classList.remove("is-danger");
startBtn.innerText = "Start";
};
const videoSelectBtn = document.getElementById("videoSelectBtn");
videoSelectBtn.onclick = getVideoSources;
// Get the available video sources
async function getVideoSources() {
const inputSources = await desktopCapturer.getSources({
types: ["window", "screen"],
});
const videoOptionsMenu = Menu.buildFromTemplate(
inputSources.map((source) => {
return {
label: source.name,
click: () => selectSource(source),
};
})
);
videoOptionsMenu.popup();
}
// Change the videoSource window to record
async function selectSource(source) {
videoSelectBtn.innerText = source.name;
const constraints = {
audio: false,
video: {
mandatory: {
chromeMediaSource: "desktop",
chromeMediaSourceId: source.id,
},
},
};
// Create a Stream
const stream = await navigator.mediaDevices.getUserMedia(constraints);
// Preview the source in a video element
videoElement.srcObject = stream;
videoElement.play();
// Create the Media Recorder
const options = { mimeType: "video/webm; codecs=vp9" };
mediaRecorder = new MediaRecorder(stream, options);
// Register Event Handlers
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.onstop = handleStop;
// Updates the UI
}
// Captures all recorded chunks
function handleDataAvailable(e) {
console.log("video data available");
recordedChunks.push(e.data);
}
// Saves the video file on stop
async function handleStop(e) {
const blob = new Blob(recordedChunks, {
type: "video/webm; codecs=vp9",
});
const buffer = Buffer.from(await blob.arrayBuffer());
const { filePath } = await dialog.showSaveDialog({
buttonLabel: "Save video",
defaultPath: `vid-${Date.now()}.webm`,
});
if (filePath) {
writeFile(filePath, buffer, () => console.log("video saved successfully!"));
}
}
This code is from the sourcefile from the tutorial github.
can you help or fix it, thank you
const {remote} = require("electron");
const {dialog, Menu} = remote;
not
const dialog = remote;
const Menu = remote;
And I believe you are using the newer version of Electron and you need to create BrowserWindow like this
const mainWindow = new BrowserWindow({
width: 800,
height: 600,
webPreferences: {
nodeIntegration: true,
enableRemoteModule: true
}
})
As you are not allowed to use remote on renderer as enableRemoteModuel is false by default from v9

Cannot read property 'stop' of undefined error in mediaRecorder.stop();

The fact is that before the code worked normally and it worked like this: when you click on the button (start), access was requested to (webcam and microphone) after a successful pass, video recording started now it does not work This script records a video and transmits it via ajax
The place where the error is marked with two ERROR comments
$(document).ready(function(){
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
// This code is adapted from
// https://rawgit.com/Miguelao/demos/master/mediarecorder.html
'use strict';
/* globals MediaRecorder */
let mediaRecorder;
let recordedBlobs;
// const errorMsgElement = document.querySelector('span#errorMsg');
const recordedVideo = document.querySelector('video#recorded');
const recordButton = document.querySelector('#record');
recordButton.addEventListener('click', () => {
document.getElementById("start").click();
if (recordButton.textContent === 'Start Recording') {
startRecording();
$('#start').css("display","none");
// setTimeout(stopRecording, 50000);
} else {
stopRecording();
recordButton.textContent = 'Start Recording';
playButton.disabled = false;
downloadButton.disabled = false;
}
});
const playButton = document.querySelector('button#play');
playButton.addEventListener('click', () => {
const superBuffer = new Blob(recordedBlobs, {type: 'video/webm'});
recordedVideo.src = null;
recordedVideo.srcObject = null;
recordedVideo.src = window.URL.createObjectURL(superBuffer);
recordedVideo.controls = true;
recordedVideo.play();
});
const downloadButton = document.querySelector('#download');
downloadButton.addEventListener('click', () => {
const blob = new Blob(recordedBlobs, {type: 'video/webm'});
const url = window.URL.createObjectURL(blob);
const a = document.createElement('a');
a.style.display = 'none';
a.href = url;
a.download = 'test.webm';
document.body.appendChild(a);
a.click();
setTimeout(() => {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
location.reload();
});
function handleDataAvailable(event) {
console.log('handleDataAvailable', event);
if (event.data && event.data.size > 0) {
recordedBlobs.push(event.data);
}
}
function startRecording() {
recordedBlobs = [];
let options = {mimeType: 'video/webm;codecs=vp9,opus'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.error(`${options.mimeType} is not supported`);
options = {mimeType: 'video/webm;codecs=vp8,opus'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.error(`${options.mimeType} is not supported`);
options = {mimeType: 'video/webm'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.error(`${options.mimeType} is not supported`);
options = {mimeType: ''};
}
}
}
try {
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e) {
console.error('Exception while creating MediaRecorder:', e);
errorMsgElement.innerHTML = `Exception while creating MediaRecorder: ${JSON.stringify(e)}`;
return;
}
console.log('Created MediaRecorder', mediaRecorder, 'with options', options);
recordButton.textContent = 'Stop Recording';
playButton.disabled = true;
downloadButton.disabled = true;
mediaRecorder.onstop = (event) => {
console.log('Recorder stopped: ', event);
console.log('Recorded Blobs: ', recordedBlobs);
};
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start();
console.log('MediaRecorder started', mediaRecorder);
}
function stopRecording() {
//ERROR
mediaRecorder.stop();
//ERROR
//document.getElementById("download").click();
$('#record').css( "display", "none" );
$('#download').attr( "style", '');
}
function handleSuccess(stream) {
recordButton.disabled = false;
console.log('getUserMedia() got stream:', stream);
window.stream = stream;
const gumVideo = document.querySelector('video#gum');
gumVideo.srcObject = stream;
}
async function init(constraints) {
try {
const stream = await navigator.mediaDevices.getUserMedia(constraints);
handleSuccess(stream);
} catch (e) {
console.error('navigator.getUserMedia error:', e);
// errorMsgElement.innerHTML = `navigator.getUserMedia error:${e.toString()}`;
}
}
document.querySelector('#start').addEventListener('click', async () => {
$('#main_img').css('display', 'none');
$('#gum').attr('style', '');
//const hasEchoCancellation = document.querySelector('#echoCancellation').checked;
const constraints = {
//audio: {
//echoCancellation: {exact: hasEchoCancellation}
//},
video: {
width: 1280, height: 720
}
};
console.log('Using media constraints:', constraints);
await init(constraints);
});
});

Categories