Video camera starts then stops in react - javascript

It worked fine, before, but now,
it doesn't show the video on the video HTML element.
it starts the camera by indicating on the chrome tab, then on the white light beside the camera, but after 5 sec, it goes off
The recorded file also downloaded when I stopped the stream, with another button
Below is the code
await navigator.mediaDevices
.getUserMedia({
audio: true,
video: { width: 1280, height: 720, facingMode: "user" },
})
.then(function(stream) {
remotevideo.current.srcObject = stream;
remotevideo.current.play();
var streams = remotevideo.current.captureStream(25);
var recordedChunks = [];
var mediaRecorder = null
console.log(streams);
var options = { mimeType: "video/webm; codecs=vp9" };
mediaRecorder = new MediaRecorder(streams, options);
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start();
function handleDataAvailable(event) {
console.log("data-available");
if (event.data.size > 0) {
recordedChunks.push(event.data);
console.log(recordedChunks);
download();
} else {
// ...
}
}
function download() {
var blob = new Blob(recordedChunks, {
type: "video/webm"
});
var url = URL.createObjectURL(blob);
var a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
a.href = url;
a.download = "test.webm";
a.click();
window.URL.revokeObjectURL(url);
}
if (!remotevideo.current.srcObject) {
mediaRecorder.stop();
}
})
.catch(function(err) {
console.log("An error occurred: " + err);
});

Related

MediaRecorder class Not Available in Electron APP

I was following Fireships' Electron tutorial to build a desktop capturer.
One thing I know is, as of now there is a huge difference between the version I used and his.
The only problem I am having is during the instantiation of the MediaRecorder class.
The class is not identified at all.
Is there a way I can fix it?
Render.js - Source Code
// Buttons
const videoElement = document.querySelector('video');
const startBtn = document.getElementById('startBtn');
startBtn.onclick = e => {
mediaRecorder.start();
startBtn.classList.add('is-danger');
startBtn.innerText = 'Recording';
};
const stopBtn = document.getElementById('stopBtn');
stopBtn.onclick = e => {
mediaRecorder.stop();
startBtn.classList.remove('is-danger');
startBtn.innerText = 'Start';
};
const videoSelectBtn = document.getElementById('videoSelectBtn');
videoSelectBtn.onclick = getVideoSources;
const { desktopCapturer, remote } = require('electron');
const { dialog, Menu } = remote;
// Get the available video sources
async function getVideoSources() {
const inputSources = await desktopCapturer.getSources({
types: ['window', 'screen']
});
const videoOptionsMenu = Menu.buildFromTemplate(
inputSources.map(source => {
return {
label: source.name,
click: () => selectSource(source)
};
})
);
videoOptionsMenu.popup();
}
let mediaRecorder; //MediaRecorder instance to capture footage
const recordedChunks = [];
// Change the videoSources window to record
async function selectSource(source) {
videoSelectBtn.innerText = source.name;
const constraints = {
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: source.id
}
}
};
// Create a Stream
const stream = await navigator.mediaDevices.getUserMedia(constraints);
//Preview the source in a video element
videoElement.srcObject = stream;
videoElement.play();
// Create the Media Recorder
const options = { mimeType: 'video/webm; codecs=vp9' };
mediaRecorder = new MediaRecorder(stream, options);
// Register Event Handlers
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.onStop = handleStop;
}
// Captures allrecorded chunks
function handleDataAvailable(e) {
console.log('video data available')
recordedChunks.push(e.data);
}
const { writeFile } = require('fs');
//Saves the video file on stop
async function handleStop(e) {
const blob = new Blob(recordedChunks,{
type: 'video/webm; codecs=vp9'
});
const buffer = Buffer.from(await blob.arrayBuffer());
const { filePath } = await dialog.showSaveDialog({
buttonLabel: 'Save Video',
defaultPath: `vid -${Date.now()}.webm`
});
console.log(filePath);
writeFile(filePath, buffer, () => console.log('Video Saved Successfully!'));
}
Web Preferences - Index.js
const mainWindow = new BrowserWindow({
width: 800,
height: 600,
webPreferences: {
nodeIntegration: true,
contextIsolation: false,
enableRemoteModule: true,
}
Try this in the render.js file, using electron": "10.2.0
const { desktopCapturer, remote, dialog } = require('electron');
const { writeFile } = require('fs');
const { Menu } = remote;
//Buttons
const videoElement = document.querySelector('video');
const startBtn = document.getElementById('startBtn');
const stopBtn = document.getElementById('stopBtn');
const videoSelectBtn = document.getElementById('videoSelectBtn');
videoSelectBtn.onclick = getVideoSources();
//Get all available video sources
async function getVideoSources() {
const inputSources = await desktopCapturer.getSources({
types: ['window', 'screen'],
});
const videoOptionsMenu = Menu.buildFromTemplate(
inputSources.map((source) => {
return {
label: source.name,
click: () => selectSource(source),
};
})
);
videoOptionsMenu.popup();
}
let mediaRecorder; //Mediarecorder instance to capture footage
const recordedChunks = [];
async function selectSource(source) {
videoSelectBtn.innerText = source.name;
const constraints = {
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: source.id,
},
},
};
//Create a stream
const stream = await navigator.mediaDevices.getUserMedia(constraints);
//Preview the source in a video element
videoElement.srcObject = stream;
videoElement.play();
//Create the Media Recorder
const options = { mimeType: 'video/webm; codecs=vp9' };
mediaRecorder = new mediaRecorder(stream, options);
//Register Event Handlers
mediaRecorder.ondataavailable = handleAvailableData;
mediaRecorder.onstop = handleStop;
}
async function handleAvailableData(e) {
console.log('Video data available');
recordedChunks.push(e.data);
}
//Save video on stop
async function handleStop(e) {
const blob = new Blob(recordedChunks, {
type: 'video/webm; codecs=vp9',
});
const buffer = Buffer.from(await blob.arrayBuffer());
const { filePath } = await dialog.showSaveDialog({
buttonLabel: 'Save Video',
defaultPath: `vid-${Date.now()}.webm`
})
console.log(filePath);
writeFile(filePath, buffer, () => console.log('Saved Successfully'))
}

Cannot read property 'stop' of undefined error in mediaRecorder.stop();

The fact is that before the code worked normally and it worked like this: when you click on the button (start), access was requested to (webcam and microphone) after a successful pass, video recording started now it does not work This script records a video and transmits it via ajax
The place where the error is marked with two ERROR comments
$(document).ready(function(){
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
// This code is adapted from
// https://rawgit.com/Miguelao/demos/master/mediarecorder.html
'use strict';
/* globals MediaRecorder */
let mediaRecorder;
let recordedBlobs;
// const errorMsgElement = document.querySelector('span#errorMsg');
const recordedVideo = document.querySelector('video#recorded');
const recordButton = document.querySelector('#record');
recordButton.addEventListener('click', () => {
document.getElementById("start").click();
if (recordButton.textContent === 'Start Recording') {
startRecording();
$('#start').css("display","none");
// setTimeout(stopRecording, 50000);
} else {
stopRecording();
recordButton.textContent = 'Start Recording';
playButton.disabled = false;
downloadButton.disabled = false;
}
});
const playButton = document.querySelector('button#play');
playButton.addEventListener('click', () => {
const superBuffer = new Blob(recordedBlobs, {type: 'video/webm'});
recordedVideo.src = null;
recordedVideo.srcObject = null;
recordedVideo.src = window.URL.createObjectURL(superBuffer);
recordedVideo.controls = true;
recordedVideo.play();
});
const downloadButton = document.querySelector('#download');
downloadButton.addEventListener('click', () => {
const blob = new Blob(recordedBlobs, {type: 'video/webm'});
const url = window.URL.createObjectURL(blob);
const a = document.createElement('a');
a.style.display = 'none';
a.href = url;
a.download = 'test.webm';
document.body.appendChild(a);
a.click();
setTimeout(() => {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
location.reload();
});
function handleDataAvailable(event) {
console.log('handleDataAvailable', event);
if (event.data && event.data.size > 0) {
recordedBlobs.push(event.data);
}
}
function startRecording() {
recordedBlobs = [];
let options = {mimeType: 'video/webm;codecs=vp9,opus'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.error(`${options.mimeType} is not supported`);
options = {mimeType: 'video/webm;codecs=vp8,opus'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.error(`${options.mimeType} is not supported`);
options = {mimeType: 'video/webm'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.error(`${options.mimeType} is not supported`);
options = {mimeType: ''};
}
}
}
try {
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e) {
console.error('Exception while creating MediaRecorder:', e);
errorMsgElement.innerHTML = `Exception while creating MediaRecorder: ${JSON.stringify(e)}`;
return;
}
console.log('Created MediaRecorder', mediaRecorder, 'with options', options);
recordButton.textContent = 'Stop Recording';
playButton.disabled = true;
downloadButton.disabled = true;
mediaRecorder.onstop = (event) => {
console.log('Recorder stopped: ', event);
console.log('Recorded Blobs: ', recordedBlobs);
};
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start();
console.log('MediaRecorder started', mediaRecorder);
}
function stopRecording() {
//ERROR
mediaRecorder.stop();
//ERROR
//document.getElementById("download").click();
$('#record').css( "display", "none" );
$('#download').attr( "style", '');
}
function handleSuccess(stream) {
recordButton.disabled = false;
console.log('getUserMedia() got stream:', stream);
window.stream = stream;
const gumVideo = document.querySelector('video#gum');
gumVideo.srcObject = stream;
}
async function init(constraints) {
try {
const stream = await navigator.mediaDevices.getUserMedia(constraints);
handleSuccess(stream);
} catch (e) {
console.error('navigator.getUserMedia error:', e);
// errorMsgElement.innerHTML = `navigator.getUserMedia error:${e.toString()}`;
}
}
document.querySelector('#start').addEventListener('click', async () => {
$('#main_img').css('display', 'none');
$('#gum').attr('style', '');
//const hasEchoCancellation = document.querySelector('#echoCancellation').checked;
const constraints = {
//audio: {
//echoCancellation: {exact: hasEchoCancellation}
//},
video: {
width: 1280, height: 720
}
};
console.log('Using media constraints:', constraints);
await init(constraints);
});
});

Web MediaRecorder API cannot record audio and video simultaneously

I've been trying to record video and audio with the MediaRecorder API but it will only let me record my screen without audio. Do I need to have two separate streams and merge them into one? But why would it be possible to set { audio: true, video: true } in the navigator.mediaDevices.getDisplayMedia() method in this case?
This is my code:
async function startRecording() {
let mimeType = "video/webm;codecs=vp9";
try {
const mediaDevices = navigator.mediaDevices as any;
const stream = await mediaDevices.getDisplayMedia({
audio: true,
video: true,
});
const options = {
mimeType: mimeType,
bitsPerSecond: 500000,
};
let recorder = new MediaRecorder(stream, options);
const chunks = [];
recorder.ondataavailable = (e) => {
if (e.data.size > 0) {
chunks.push(e.data);
} else {
console.log("no data to push");
}
};
recorder.onstop = (e) => {
const completeBlob = new Blob(chunks, {
type: chunks[0].type
});
stream.getTracks().forEach((track) => {
track.stop();
console.log(track);
});
setVideoData({
recorded: true,
localVideoURL: URL.createObjectURL(completeBlob),
blob: completeBlob,
});
};
recorder.start();
} catch (error) {
console.log(error);
}
}
Any pointers greatly appreciated.
Most browsers don't support capturing audio with display media. Even in Chrome and Chromium variants, capture support depends on the OS.
https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getDisplayMedia#Browser_compatibility

Duration of recordered file WebRTC

I implement a video streaming service with WebRTC an signaling WebSockets-Server. It is a electron-App. I want to record some video fragments and get a webm file to download. It works fine if i record a local stream (from navigator.mediaDevices.getUserMedia), but if I try to record a remote stream, it will be a video file generated with duration < 1 sec, this trouble is only in chrome (and electron) in Firefox works it fine. If I call console.log(blob) in stopRecording () it gives somethink like this:
Blob(1875185) {size: 1875185, type: "video/webm"}
So blob size is correct and after downloading is file size is also correct, what in this case?
class WebRTC {
...........
constructor () {
navigator.mediaDevices.getUserMedia({
'audio': true,
'video': false
}).then(
(stream) => {
this.peerConnection = new RTCPeerConnection(peerConnectionConfig)
this.peerConnection.addEventListener('addstream', event => this.gotRemoteMediaStream(event))
this.peerConnection.addEventListener('icecandidate', e => this.handleConnection(e))
this.peerConnection.addStream(stream)
}
).catch(e => console.log('error', e))
}
gotRemoteMediaStream (event) {
console.log(event)
const mediaStream = event.stream
this.video.srcObject = mediaStream
this.video.play()
this.remoteStream = mediaStream
console.log('Remote peer connection received remote stream.')
}
startRecording () {
console.log('RECORD')
this.recorder = new MediaRecorder(this.remoteStream)
this.recorder.addEventListener('dataavailable', e => this.onRecordingReady(e))
this.recorder.start(15)
}
onRecordingReady (e) {
console.log(e)
this.chunks.push(e.data)
}
stopRecording () {
this.recorder.stop()
var a = document.createElement('a')
document.body.appendChild(a)
a.style = 'display: none'
a.download = 'test.webm'
setTimeout(() => {
console.log(this)
var blob = new Blob(this.chunks, { type: 'video/webm' })
console.log(this.chunks)
console.log(blob)
let url = URL.createObjectURL(blob)
a.href = url
a.click()
window.URL.revokeObjectURL(url)
}, 500)
}
}

Saving desktopCapturer to video file in Electron

The desktopCapturer api example shows how to write a screen capture stream to a <video> element.
// In the renderer process.
var desktopCapturer = require('electron').desktopCapturer;
desktopCapturer.getSources({types: ['window', 'screen']}, function(error, sources) {
if (error) throw error;
for (var i = 0; i < sources.length; ++i) {
if (sources[i].name == "Electron") {
navigator.webkitGetUserMedia({
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: sources[i].id,
minWidth: 1280,
maxWidth: 1280,
minHeight: 720,
maxHeight: 720
}
}
}, gotStream, getUserMediaError);
return;
}
}
});
function gotStream(stream) {
document.querySelector('video').src = URL.createObjectURL(stream);
}
function getUserMediaError(e) {
console.log('getUserMediaError');
}
I tried to replace the gotStream function with the following:
function gotStream(stream) {
var fs = require('fs');
fs.writeFileSync('vid.mp4', stream);
}
This creates a text file with [object MediaStream] as the contents.
How can I record this stream and save to a file on disk?
I answered my own question with the help of Demian's link to MediaRecorder as well as other related questions.
Below is an excerpt from magnemite with some minor simplifications and converted from TypeScript to JavaScript ES5 for better understanding to most readers.
var fs = require('fs');
var electron = require('electron');
var SECRET_KEY = 'Magnemite';
var recorder;
var blobs = [];
function startRecording() {
var title = document.title;
document.title = SECRET_KEY;
electron.desktopCapturer.getSources({ types: ['window', 'screen'] }, function(error, sources) {
if (error) throw error;
for (let i = 0; i < sources.length; i++) {
let src = sources[i];
if (src.name === SECRET_KEY) {
document.title = title;
navigator.webkitGetUserMedia({
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: src.id,
minWidth: 800,
maxWidth: 1280,
minHeight: 600,
maxHeight: 720
}
}
}, handleStream, handleUserMediaError);
return;
}
}
});
}
function handleStream(stream) {
recorder = new MediaRecorder(stream);
blobs = [];
recorder.ondataavailable = function(event) {
blobs.push(event.data);
};
recorder.start();
}
function stopRecording() {
recorder.stop();
toArrayBuffer(new Blob(blobs, {type: 'video/webm'}), function(ab) {
var buffer = toBuffer(ab);
var file = `./videos/example.webm`;
fs.writeFile(file, buffer, function(err) {
if (err) {
console.error('Failed to save video ' + err);
} else {
console.log('Saved video: ' + file);
}
});
});
}
function handleUserMediaError(e) {
console.error('handleUserMediaError', e);
}
function toArrayBuffer(blob, cb) {
let fileReader = new FileReader();
fileReader.onload = function() {
let arrayBuffer = this.result;
cb(arrayBuffer);
};
fileReader.readAsArrayBuffer(blob);
}
function toBuffer(ab) {
return Buffer.from(ab);
}
// Record for 7 seconds and save to disk
startRecording();
setTimeout(function() { stopRecording() }, 7000);
This will record the current electron window for 7 seconds and save to disk.
Take a look at the MediaRecorder API
You should convert that stream to binary chunks.
The desktopCapturer example shows how to get a Blob. You then need to convert the Blob to something accepted by fs.writeFile. The following is short and efficient:
fs.writeFile(filepath, Buffer.from(await blob.arrayBuffer()), ...);

Categories