The desktopCapturer api example shows how to write a screen capture stream to a <video> element.
// In the renderer process.
var desktopCapturer = require('electron').desktopCapturer;
desktopCapturer.getSources({types: ['window', 'screen']}, function(error, sources) {
if (error) throw error;
for (var i = 0; i < sources.length; ++i) {
if (sources[i].name == "Electron") {
navigator.webkitGetUserMedia({
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: sources[i].id,
minWidth: 1280,
maxWidth: 1280,
minHeight: 720,
maxHeight: 720
}
}
}, gotStream, getUserMediaError);
return;
}
}
});
function gotStream(stream) {
document.querySelector('video').src = URL.createObjectURL(stream);
}
function getUserMediaError(e) {
console.log('getUserMediaError');
}
I tried to replace the gotStream function with the following:
function gotStream(stream) {
var fs = require('fs');
fs.writeFileSync('vid.mp4', stream);
}
This creates a text file with [object MediaStream] as the contents.
How can I record this stream and save to a file on disk?
I answered my own question with the help of Demian's link to MediaRecorder as well as other related questions.
Below is an excerpt from magnemite with some minor simplifications and converted from TypeScript to JavaScript ES5 for better understanding to most readers.
var fs = require('fs');
var electron = require('electron');
var SECRET_KEY = 'Magnemite';
var recorder;
var blobs = [];
function startRecording() {
var title = document.title;
document.title = SECRET_KEY;
electron.desktopCapturer.getSources({ types: ['window', 'screen'] }, function(error, sources) {
if (error) throw error;
for (let i = 0; i < sources.length; i++) {
let src = sources[i];
if (src.name === SECRET_KEY) {
document.title = title;
navigator.webkitGetUserMedia({
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: src.id,
minWidth: 800,
maxWidth: 1280,
minHeight: 600,
maxHeight: 720
}
}
}, handleStream, handleUserMediaError);
return;
}
}
});
}
function handleStream(stream) {
recorder = new MediaRecorder(stream);
blobs = [];
recorder.ondataavailable = function(event) {
blobs.push(event.data);
};
recorder.start();
}
function stopRecording() {
recorder.stop();
toArrayBuffer(new Blob(blobs, {type: 'video/webm'}), function(ab) {
var buffer = toBuffer(ab);
var file = `./videos/example.webm`;
fs.writeFile(file, buffer, function(err) {
if (err) {
console.error('Failed to save video ' + err);
} else {
console.log('Saved video: ' + file);
}
});
});
}
function handleUserMediaError(e) {
console.error('handleUserMediaError', e);
}
function toArrayBuffer(blob, cb) {
let fileReader = new FileReader();
fileReader.onload = function() {
let arrayBuffer = this.result;
cb(arrayBuffer);
};
fileReader.readAsArrayBuffer(blob);
}
function toBuffer(ab) {
return Buffer.from(ab);
}
// Record for 7 seconds and save to disk
startRecording();
setTimeout(function() { stopRecording() }, 7000);
This will record the current electron window for 7 seconds and save to disk.
Take a look at the MediaRecorder API
You should convert that stream to binary chunks.
The desktopCapturer example shows how to get a Blob. You then need to convert the Blob to something accepted by fs.writeFile. The following is short and efficient:
fs.writeFile(filepath, Buffer.from(await blob.arrayBuffer()), ...);
Related
I was following Fireships' Electron tutorial to build a desktop capturer.
One thing I know is, as of now there is a huge difference between the version I used and his.
The only problem I am having is during the instantiation of the MediaRecorder class.
The class is not identified at all.
Is there a way I can fix it?
Render.js - Source Code
// Buttons
const videoElement = document.querySelector('video');
const startBtn = document.getElementById('startBtn');
startBtn.onclick = e => {
mediaRecorder.start();
startBtn.classList.add('is-danger');
startBtn.innerText = 'Recording';
};
const stopBtn = document.getElementById('stopBtn');
stopBtn.onclick = e => {
mediaRecorder.stop();
startBtn.classList.remove('is-danger');
startBtn.innerText = 'Start';
};
const videoSelectBtn = document.getElementById('videoSelectBtn');
videoSelectBtn.onclick = getVideoSources;
const { desktopCapturer, remote } = require('electron');
const { dialog, Menu } = remote;
// Get the available video sources
async function getVideoSources() {
const inputSources = await desktopCapturer.getSources({
types: ['window', 'screen']
});
const videoOptionsMenu = Menu.buildFromTemplate(
inputSources.map(source => {
return {
label: source.name,
click: () => selectSource(source)
};
})
);
videoOptionsMenu.popup();
}
let mediaRecorder; //MediaRecorder instance to capture footage
const recordedChunks = [];
// Change the videoSources window to record
async function selectSource(source) {
videoSelectBtn.innerText = source.name;
const constraints = {
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: source.id
}
}
};
// Create a Stream
const stream = await navigator.mediaDevices.getUserMedia(constraints);
//Preview the source in a video element
videoElement.srcObject = stream;
videoElement.play();
// Create the Media Recorder
const options = { mimeType: 'video/webm; codecs=vp9' };
mediaRecorder = new MediaRecorder(stream, options);
// Register Event Handlers
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.onStop = handleStop;
}
// Captures allrecorded chunks
function handleDataAvailable(e) {
console.log('video data available')
recordedChunks.push(e.data);
}
const { writeFile } = require('fs');
//Saves the video file on stop
async function handleStop(e) {
const blob = new Blob(recordedChunks,{
type: 'video/webm; codecs=vp9'
});
const buffer = Buffer.from(await blob.arrayBuffer());
const { filePath } = await dialog.showSaveDialog({
buttonLabel: 'Save Video',
defaultPath: `vid -${Date.now()}.webm`
});
console.log(filePath);
writeFile(filePath, buffer, () => console.log('Video Saved Successfully!'));
}
Web Preferences - Index.js
const mainWindow = new BrowserWindow({
width: 800,
height: 600,
webPreferences: {
nodeIntegration: true,
contextIsolation: false,
enableRemoteModule: true,
}
Try this in the render.js file, using electron": "10.2.0
const { desktopCapturer, remote, dialog } = require('electron');
const { writeFile } = require('fs');
const { Menu } = remote;
//Buttons
const videoElement = document.querySelector('video');
const startBtn = document.getElementById('startBtn');
const stopBtn = document.getElementById('stopBtn');
const videoSelectBtn = document.getElementById('videoSelectBtn');
videoSelectBtn.onclick = getVideoSources();
//Get all available video sources
async function getVideoSources() {
const inputSources = await desktopCapturer.getSources({
types: ['window', 'screen'],
});
const videoOptionsMenu = Menu.buildFromTemplate(
inputSources.map((source) => {
return {
label: source.name,
click: () => selectSource(source),
};
})
);
videoOptionsMenu.popup();
}
let mediaRecorder; //Mediarecorder instance to capture footage
const recordedChunks = [];
async function selectSource(source) {
videoSelectBtn.innerText = source.name;
const constraints = {
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: source.id,
},
},
};
//Create a stream
const stream = await navigator.mediaDevices.getUserMedia(constraints);
//Preview the source in a video element
videoElement.srcObject = stream;
videoElement.play();
//Create the Media Recorder
const options = { mimeType: 'video/webm; codecs=vp9' };
mediaRecorder = new mediaRecorder(stream, options);
//Register Event Handlers
mediaRecorder.ondataavailable = handleAvailableData;
mediaRecorder.onstop = handleStop;
}
async function handleAvailableData(e) {
console.log('Video data available');
recordedChunks.push(e.data);
}
//Save video on stop
async function handleStop(e) {
const blob = new Blob(recordedChunks, {
type: 'video/webm; codecs=vp9',
});
const buffer = Buffer.from(await blob.arrayBuffer());
const { filePath } = await dialog.showSaveDialog({
buttonLabel: 'Save Video',
defaultPath: `vid-${Date.now()}.webm`
})
console.log(filePath);
writeFile(filePath, buffer, () => console.log('Saved Successfully'))
}
I'm developing a react native app and there I'm recording a canvas and make 5 seconds video files to upload to the server. Everything works great except all my webm files have only one frame. Here is my code. Please help me to understand what's wrong here. Thanks!
initMediaRecorder = () => {
const promise = new Promise((resolve) => {
const stream = this.selfieCanvas.captureStream(10);
let mediaRecorder = null;
let options;
if (MediaRecorder.isTypeSupported('video/webm; codecs=vp9')) {
options = { mimeType: 'video/webm; codecs=vp9', videoBitsPerSecond: 2500000 };
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=vp8')) {
options = { mimeType: 'video/webm; codecs=vp8', videoBitsPerSecond: 2500000 };
} else {
options = 'video/vp8'; // Chrome 47
}
try {
mediaRecorder = new MediaRecorder(stream, options);
} catch (e0) {
resolve(null);
}
mediaRecorder.ondataavailable = (event) => {
console.log(`LOG - Data available ${event.data.size}`);
this.sendToSaveVideo(event.data);
};
resolve(mediaRecorder);
});
return promise;
}
captureVideo = async (oldMediaRecorder) => {
this.initMediaRecorder().then((mediaRecorder) => {
if (oldMediaRecorder !== null && typeof oldMediaRecorder !== 'undefined') {
// I don't want to stop previous recorder until I init the next recorder
oldMediaRecorder.stop();
}
if (mediaRecorder !== null) {
mediaRecorder.start();
}
this.captureVideoTimer = setTimeout(() => {
this.captureVideo(mediaRecorder);
}, 5000);
});
}
sendToSaveVideo = async (eventData) => {
const blobChunk = [];
blobChunk.push(eventData);
const video = new Blob(blobChunk, { type: 'video/webm' });
saveBlobToCloud(video); // save the file to cloud
}```
You are not setting up start(), this (probably) makes ondataavailable run every frame.
Also try avoiding using ondataavailable like that, onstop exists exactly for that purpose.
If this doesn't work try checking if the canvas is actually changing the frames.
initMediaRecorder = () => {
const promise = new Promise((resolve) => {
const stream = this.selfieCanvas.captureStream(10);
let chunks = [];
let mediaRecorder = null;
let options;
if (MediaRecorder.isTypeSupported('video/webm; codecs=vp9')) {
options = { mimeType: 'video/webm; codecs=vp9', videoBitsPerSecond: 2500000 };
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=vp8')) {
options = { mimeType: 'video/webm; codecs=vp8', videoBitsPerSecond: 2500000 };
} else {
options = 'video/vp8'; // Chrome 47
}
try {
mediaRecorder = new MediaRecorder(stream, options);
} catch (e0) {
resolve(null);
}
mediaRecorder.ondataavailable = (event) => {
chunks.push(event.data);
};
mediaRecorder.onstop = (event) => {
this.sendToSaveVideo(chunks);
};
resolve(mediaRecorder);
});
return promise;
}
captureVideo = async (oldMediaRecorder) => {
this.initMediaRecorder().then((mediaRecorder) => {
if (oldMediaRecorder !== null && typeof oldMediaRecorder !== 'undefined') {
// I don't want to stop previous recorder until I init the next recorder
oldMediaRecorder.stop();
}
if (mediaRecorder !== null) {
// make ondataavailable run every second.
// ondataavailable should not be used as a stop!
mediaRecorder.start(1000);
}
this.captureVideoTimer = setTimeout(() => {
this.captureVideo(mediaRecorder);
}, 5000);
});
}
sendToSaveVideo = async (chuncks) => {
const video = new Blob(chunks, { type: 'video/webm' });
saveBlobToCloud(video); // save the file to cloud
}
Edit
Also you do not need to declare the mediarecorder every single time...
Something like this would be better:
const stream = selfieCanvas.captureStream(10);
let mediaRecorder = null;
let options;
if (MediaRecorder.isTypeSupported('video/webm; codecs=vp9')) {
options = { mimeType: 'video/webm; codecs=vp9', videoBitsPerSecond: 2500000 };
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=vp8')) {
options = { mimeType: 'video/webm; codecs=vp8', videoBitsPerSecond: 2500000 };
} else {
options = 'video/vp8'; // Chrome 47
}
try {
mediaRecorder = new MediaRecorder(stream, options);
} catch (e0) {
resolve(null);
}
mediaRecorder.ondataavailable = (event) => {
chunks.push(event.data);
};
mediaRecorder.onstop = (event) => {
const video = new Blob(chunks, { type: 'video/webm' });
saveBlobToCloud(video);
chunks = [];
};
// makes ondataavailable run every 5 seconds
mediaRecorder.start(1000);
// a video is made for every 5 seconds
setInterval(function(){
mediaRecorder.stop();
// ondataavailable should be running more often than stop
mediaRecorder.start(1000);
}, 5000);
Here are some other useful links:
https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/dataavailable_event
MediaRecorder ondataavailable work successfully once
I've been trying to record video and audio with the MediaRecorder API but it will only let me record my screen without audio. Do I need to have two separate streams and merge them into one? But why would it be possible to set { audio: true, video: true } in the navigator.mediaDevices.getDisplayMedia() method in this case?
This is my code:
async function startRecording() {
let mimeType = "video/webm;codecs=vp9";
try {
const mediaDevices = navigator.mediaDevices as any;
const stream = await mediaDevices.getDisplayMedia({
audio: true,
video: true,
});
const options = {
mimeType: mimeType,
bitsPerSecond: 500000,
};
let recorder = new MediaRecorder(stream, options);
const chunks = [];
recorder.ondataavailable = (e) => {
if (e.data.size > 0) {
chunks.push(e.data);
} else {
console.log("no data to push");
}
};
recorder.onstop = (e) => {
const completeBlob = new Blob(chunks, {
type: chunks[0].type
});
stream.getTracks().forEach((track) => {
track.stop();
console.log(track);
});
setVideoData({
recorded: true,
localVideoURL: URL.createObjectURL(completeBlob),
blob: completeBlob,
});
};
recorder.start();
} catch (error) {
console.log(error);
}
}
Any pointers greatly appreciated.
Most browsers don't support capturing audio with display media. Even in Chrome and Chromium variants, capture support depends on the OS.
https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getDisplayMedia#Browser_compatibility
I currently have a function which receives a MediaStream from navigator.getUserMedia(), which works great. I would like to give the option to upload an audio file and mimic it going through the same function. I was wondering whether it would be possible to upload an audio file and create a Mediastream object and pass it through the below function?
startUserMedia(stream) {
this.setState({ audio: stream });
var audioContext = new AudioContext();
var source = audioContext.createMediaStreamSource(stream);
var processor = audioContext.createScriptProcessor(8192, 1, 1);
source.connect(processor);
processor.connect(audioContext.destination);
const that = this;
let audioBuffers = [];
this.setState({ currentDuration: 0 });
processor.onaudioprocess = function(e) {
// Do something with the data, i.e Convert this to WAV
if (that.state.currentDuration < that.state.chunkDuration) {
that.setState({
currentDuration: that.state.currentDuration + e.inputBuffer.duration
});
resampler(e.inputBuffer, 16000, function(event) {
const buffer = event.getAudioBuffer();
if (that.state.voiceActive) {
audioBuffers.push(buffer);
}
});
} else {
if (!that.state.voiceActive) {
that.mergeAndSendAudio(audioBuffers, audioContext);
that.setState({ currentDuration: 0 });
audioBuffers = [];
audioBuffers.push(e.inputBuffer);
} else {
audioBuffers.push(e.inputBuffer);
}
}
};
var options = {
onVoiceStart: function() {
console.log("voice start");
that.setState({ voiceActive: true });
},
onVoiceStop: function() {
console.log("voice stop");
that.setState({ voiceActive: false });
},
onUpdate: function(val) {
// console.log('curr val:', val);
}
};
vad(audioContext, stream, options);
}
Found the answer:
handleselectedFile = event => {
this.setState({
selectedFile: event.target.files[0],
loaded: 0
});
const objectURL = window.URL.createObjectURL(event.target.files[0]);
const audio = new Audio(objectURL);
const stream = audio.captureStream();
audio.play().then(_ => {
this.startUserMedia(stream);
}); // stream now has input
};
I have added audio to chrome extension on screen cast of desktop. But audio is not in good quality. As, I record screen of the tab. I got good quality video and audio. How can I get good quality audio in desktop recording. Here is what I am using
chrome.desktopCapture.chooseDesktopMedia(['screen', 'window', 'audio'], function (streamId) {
if (streamId) {
var obj = {
audio: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: streamId
}
},
video: {
optional: [],
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: streamId,
maxWidth: 2560,
maxHeight: 1440
}
}
};
countdownRun(function () {
window.navigator.mediaDevices.getUserMedia(obj).then(function(stream) {
$streamVideo = stream;
$timeStart = Date.now();
var audio = stream.getAudioTracks()[0];
var video = stream.getVideoTracks()[0];
alert(JSON.stringify(audio))
alert(JSON.stringify(video))
if (micSound) {
window.navigator.webkitGetUserMedia({audio: true}, function (s) {
$streamAudio = s;
audio = s.getAudioTracks()[0];
captureUseNacl(audio, video);
}, function (e) {
chrome.tabs.create({url: 'mic.html'});
})
} else {
captureUseNacl(audio, video);
(function () {
var v = document.createElement('video');
document.body.appendChild(v);
v.setAttribute('autoplay', '');
v.addEventListener('canplay', function () {
console.log('play video');
}, false);
v.src = window.URL.createObjectURL(stream);
$streamElement = v;
})()
}
}).catch(function(err) {
alert(err)
alert(JSON.stringify(err));
});;
})
}
});
You can do this by removing document.createElement and createObject part. It nothing but adding nose to the recording. If you check on the readystate of audio it is end. It should be live. Again, add maxFrameRate to video section to synchronize audio and video. Here is your code
chrome.desktopCapture.chooseDesktopMedia(['screen', 'window', 'audio'], function (streamId) {
if (streamId) {
var obj = {
audio: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: streamId
}
},
video: {
optional: [],
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: streamId,
maxWidth: 2560,
maxHeight: 1440,
maxFrameRate:30
}
}
};
countdownRun(function () {
navigator.mediaDevices.getUserMedia(obj).then(function(stream) {
$streamVideo = stream;
$timeStart = Date.now();
var audio = stream.getAudioTracks()[0];
var video = stream.getVideoTracks()[0];
console.log(stream);
if (micSound) {
window.navigator.webkitGetUserMedia({audio: true}, function (s) {
$streamAudio = s;
audio = s.getAudioTracks()[0];
captureUseNacl(audio, video);
}, function (e) {
chrome.tabs.create({url: 'mic.html'});
})
} else {
captureUseNacl(audio, video);
}
}).catch(function(err) {
console.log(err)
});
})
}
});