How to add audio in desktop capture in chrome extension - javascript

I have added audio to chrome extension on screen cast of desktop. But audio is not in good quality. As, I record screen of the tab. I got good quality video and audio. How can I get good quality audio in desktop recording. Here is what I am using
chrome.desktopCapture.chooseDesktopMedia(['screen', 'window', 'audio'], function (streamId) {
if (streamId) {
var obj = {
audio: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: streamId
}
},
video: {
optional: [],
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: streamId,
maxWidth: 2560,
maxHeight: 1440
}
}
};
countdownRun(function () {
window.navigator.mediaDevices.getUserMedia(obj).then(function(stream) {
$streamVideo = stream;
$timeStart = Date.now();
var audio = stream.getAudioTracks()[0];
var video = stream.getVideoTracks()[0];
alert(JSON.stringify(audio))
alert(JSON.stringify(video))
if (micSound) {
window.navigator.webkitGetUserMedia({audio: true}, function (s) {
$streamAudio = s;
audio = s.getAudioTracks()[0];
captureUseNacl(audio, video);
}, function (e) {
chrome.tabs.create({url: 'mic.html'});
})
} else {
captureUseNacl(audio, video);
(function () {
var v = document.createElement('video');
document.body.appendChild(v);
v.setAttribute('autoplay', '');
v.addEventListener('canplay', function () {
console.log('play video');
}, false);
v.src = window.URL.createObjectURL(stream);
$streamElement = v;
})()
}
}).catch(function(err) {
alert(err)
alert(JSON.stringify(err));
});;
})
}
});

You can do this by removing document.createElement and createObject part. It nothing but adding nose to the recording. If you check on the readystate of audio it is end. It should be live. Again, add maxFrameRate to video section to synchronize audio and video. Here is your code
chrome.desktopCapture.chooseDesktopMedia(['screen', 'window', 'audio'], function (streamId) {
if (streamId) {
var obj = {
audio: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: streamId
}
},
video: {
optional: [],
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: streamId,
maxWidth: 2560,
maxHeight: 1440,
maxFrameRate:30
}
}
};
countdownRun(function () {
navigator.mediaDevices.getUserMedia(obj).then(function(stream) {
$streamVideo = stream;
$timeStart = Date.now();
var audio = stream.getAudioTracks()[0];
var video = stream.getVideoTracks()[0];
console.log(stream);
if (micSound) {
window.navigator.webkitGetUserMedia({audio: true}, function (s) {
$streamAudio = s;
audio = s.getAudioTracks()[0];
captureUseNacl(audio, video);
}, function (e) {
chrome.tabs.create({url: 'mic.html'});
})
} else {
captureUseNacl(audio, video);
}
}).catch(function(err) {
console.log(err)
});
})
}
});

Related

How to pass a stream from page to page while the user navigates? Chrome extension Manifest V3

I'm developing a manifest v3 Google Chrome extension that aims to take screenshots and record user browsing.
Problem: When I navigate from one page to another while screen sharing the stream is lost.
Question 1: how to pass the stream between navigation pages?
Question 2: how to store the stream so that when loading the next page the stream is maintained?
Note:
In the service worker I use chrome.desktopCapture.chooseDesktopMedia to bring up the screen sharing window, with this I get the stream id which is then sent to the content script found on the page, where I use navigator.mediaDevices.getUserMedia to get the stream which I then place inside a video element. This video object allows me to take screenshots with canvas.
Viewing some extensions with manifest v2 use the background pages to store the video object and the scripts that create the stream. But now with the service worker in manifest v3, how could I do it?
My code:
Service worker:
chrome.runtime.onConnect.addListener(function (port) {
port.onMessage.addListener(function (msg) {
if (msg.type === 'SS_UI_REQUEST') {
requestScreenSharing(port, msg)
}
if (msg.type === 'SS_UI_TAKESCREENSHOT') {
console.log(msg.top);
StartCountDown(port, msg);
}
});
});
// const sources = ['screen', 'window', 'tab', 'audio'];
async function requestScreenSharing(port, msg) {
const sources = ['screen'];
const tab = port.sender.tab;
chrome.desktopCapture.chooseDesktopMedia(
sources,
port.sender.tab,
streamId => {
if (streamId) {
msg.type = 'SS_DIALOG_SUCCESS';
msg.streamId = streamId;
msg.text = "sharing";
} else {
msg.type = 'SS_DIALOG_CANCEL';
msg.text = "cancel";
}
var tab = getTabId();
tab.then((value) => {
const respuesta = chrome.tabs.connect(value.id, {
name: "respuesta",
});
respuesta.postMessage(msg);
});
}
);
}
let seconds = 3;
let id;
async function StartCountDown(port, msg) {
const css1 = 'body::after{ display: flex; height: 100vh;justify-content: center;align-items: center; background:rgb(76,76,76); opacity: 0.7; content: "Capturando pantalla.. 3"; color:white;font-size: 50px;font-weight: 500; width:100%; position: absolute;top:'+msg.top+'px;left: 0; z-index: 2040;}'
const css2 = 'body::after{ content: "Capturando pantalla.. 2";}'
const css3 = 'body::after{ content: "Capturando pantalla.. 1";}'
myIntervalId = setInterval(function () {
IntervalId = myIntervalId;
if (seconds > 0) {
chrome.action.setBadgeBackgroundColor({
color: [255, 0, 0, 255]
});
chrome.action.setBadgeText({
text: seconds.toString()
});
if (seconds == 3) {
var csstab = getTabId();
csstab.then((value) => {
chrome.scripting.insertCSS({
target: {
tabId: value.id
},
css: css1,
});
});
}
if (seconds == 2) {
var csstab = getTabId();
csstab.then((value) => {
chrome.scripting.insertCSS({
target: {
tabId: value.id
},
css: css2,
});
});
}
if (seconds == 1) {
var csstab = getTabId();
csstab.then((value) => {
chrome.scripting.insertCSS({
target: {
tabId: value.id
},
css: css3,
});
});
}
seconds--;
} else {
chrome.action.setBadgeText({
'text': ''
});
seconds = 3;
clearInterval(IntervalId);
msg.type = 'SS_UI_TAKESCREENSHOT_SUCCESS';
msg.text = "takingScreenShot";
var tab = getTabId();
tab.then((value) => {
chrome.scripting.removeCSS({
target: {
tabId: value.id
},
css: css1,
});
chrome.scripting.removeCSS({
target: {
tabId: value.id
},
css: css2,
});
chrome.scripting.removeCSS({
target: {
tabId: value.id
},
css: css3,
});
const respuesta = chrome.tabs.connect(value.id, {
name: "respuesta",
});
respuesta.postMessage(msg);
});
}
}, 1000);
}
async function getTabId() {
let queryOptions = {
active: true,
currentWindow: true
};
let [tab] = await chrome.tabs.query(queryOptions);
return tab;
}
content-script
chrome.runtime.onConnect.addListener(function (port) {
port.onMessage.addListener(function(msg){
if (msg.type === 'SS_UI_REQUEST') {
console.log(msg);
if(!window.stream)
{
var messenger = chrome.runtime.connect();
messenger.postMessage(msg);
}else
{
console.log(msg);
var messenger = chrome.runtime.connect();
messenger.postMessage({ type: 'SS_UI_TAKESCREENSHOT', top: window.scrollY.toString() },'*');
}
}
if (msg.type === 'SS_DIALOG_SUCCESS') {
startScreenStreamFrom(msg.streamId);
console.log(msg);
var messenger = chrome.runtime.connect();
messenger.postMessage({ type: 'SS_UI_TAKESCREENSHOT', top: window.scrollY.toString() },'*');
}
if (msg.type === 'SS_UI_CHANGE_CSS') {
console.log(msg);
}
if (msg.type === 'SS_DIALOG_CANCEL') {
console.log(msg);
}
if(msg.type === "SS_UI_TAKESCREENSHOT_SUCCESS")
{
console.log(msg);
setTimeout(function() {
TakeScreenShot();
}, 300);
}
if(msg.type === "SS_UI_RECORDSCREEN")
{
console.log("Grabar pantalla");
RecordScreen();
}
});
});
function startScreenStreamFrom(streamId) {
console.log("compartiendo pantalla");
navigator.mediaDevices
.getUserMedia({
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: streamId
}
}
})
.then(stream => {
var video = document.getElementById("ElementNotExist");
if(video)
{
video.remove();
}
window.stream = stream;
window.stream.getVideoTracks()[0].onended = function () {
window.stream = null;
};
var video = document.createElement('video');
video.srcObject = stream;
video.type = 'video/ogg';
video.autoplay = true;
video.setAttribute("id","ElementNotExist");
video.style.display = "none";
document.body.appendChild(video);
});
}
Index.js Extension script.
document.getElementById("btn-capture").addEventListener("click", function () {
var tab = getTabId();
tab.then((value) => {
chrome.storage.local.set({ 'pestaƱa': value.id });
const port = chrome.tabs.connect(value.id, {
name: "conexion",
});
port.postMessage({ type: 'SS_UI_REQUEST', text: 'start' }, '*');
window.close();
chrome.action.setPopup({popup: "capture.html"});
});
})
async function getTabId() {
let queryOptions = { active: true, currentWindow: true };
let [tab] = await chrome.tabs.query(queryOptions);
return tab;
}

Video camera starts then stops in react

It worked fine, before, but now,
it doesn't show the video on the video HTML element.
it starts the camera by indicating on the chrome tab, then on the white light beside the camera, but after 5 sec, it goes off
The recorded file also downloaded when I stopped the stream, with another button
Below is the code
await navigator.mediaDevices
.getUserMedia({
audio: true,
video: { width: 1280, height: 720, facingMode: "user" },
})
.then(function(stream) {
remotevideo.current.srcObject = stream;
remotevideo.current.play();
var streams = remotevideo.current.captureStream(25);
var recordedChunks = [];
var mediaRecorder = null
console.log(streams);
var options = { mimeType: "video/webm; codecs=vp9" };
mediaRecorder = new MediaRecorder(streams, options);
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start();
function handleDataAvailable(event) {
console.log("data-available");
if (event.data.size > 0) {
recordedChunks.push(event.data);
console.log(recordedChunks);
download();
} else {
// ...
}
}
function download() {
var blob = new Blob(recordedChunks, {
type: "video/webm"
});
var url = URL.createObjectURL(blob);
var a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
a.href = url;
a.download = "test.webm";
a.click();
window.URL.revokeObjectURL(url);
}
if (!remotevideo.current.srcObject) {
mediaRecorder.stop();
}
})
.catch(function(err) {
console.log("An error occurred: " + err);
});

How to record screen+audio+microphone with RecordRTC

I am working on a project where I need the user to be able to record screen, audio and microphone. At the moment I could only make it recognize screen and audio.
First I am capturing the screen and the audio from it and saving it to a variable. And then I am capturing that variable to show the a video component.
invokeGetDisplayMedia(success, error) {
let displaymediastreamconstraints = {
video: {
displaySurface: 'monitor', // monitor, window, application, browser
logicalSurface: true,
cursor: 'always' // never, always, motion
}
};
// above constraints are NOT supported YET
// that's why overridnig them
displaymediastreamconstraints = {
video: true,
audio:true
};
if (navigator.mediaDevices.getDisplayMedia) {
navigator.mediaDevices.getDisplayMedia(displaymediastreamconstraints).then(success).catch(error);
}
else {
navigator.getDisplayMedia(displaymediastreamconstraints).then(success).catch(error);
}
},
captureScreen(callback) {
this.invokeGetDisplayMedia((screen) => {
this.addStreamStopListener(screen, () => {
//
});
callback(screen);
}, function (error) {
console.error(error);
alert('Unable to capture your screen. Please check console logs.\n' + error);
});
},
startRecording() {
this.captureScreen(screen=>{
this.audioStream = audio
console.log(audio)
this.video=this.$refs.videoScreen
this.video.srcObject = screen;
this.recorder = RecordRTC(screen, {
type: 'video'
});
this.recorder.startRecording();
// release screen on stopRecording
this.recorder.screen = screen;
this.videoStart = true;
});
},
I fixed it by increasing a function where I capture the audio from the microphone
captureAudio(success, error) {
let displayuserstreamconstraints = {
audio:true
};
if (navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia(displayuserstreamconstraints).then(success).catch(error);
}
else {
navigator.getUserMedia(displayuserstreamconstraints).then(success).catch(error);
}
},
And adding a function in the startRecording method
startRecording() {
this.captureAudio((audio) => {
this.captureScreen(screen=>{
this.video=this.$refs.videoScreen
this.audioStream=audio
this.video.srcObject = screen;
this.recorder = RecordRTC(screen, {
type: 'video'
});
this.recorder.startRecording();
// release screen on stopRecording
this.recorder.screen = screen;
this.videoStart = true;
});
})
},
And adding a function in the stopRecording method
stopRecordingCallback() {
this.video.src = this.video.srcObject = null;
this.video=this.$refs.videoScreen
this.video.src = URL.createObjectURL(this.recorder.getBlob());
this.recorder.screen.stop();
this.audioStream.stop();
this.recorder.destroy();
this.recorder = null;
},

Web MediaRecorder API cannot record audio and video simultaneously

I've been trying to record video and audio with the MediaRecorder API but it will only let me record my screen without audio. Do I need to have two separate streams and merge them into one? But why would it be possible to set { audio: true, video: true } in the navigator.mediaDevices.getDisplayMedia() method in this case?
This is my code:
async function startRecording() {
let mimeType = "video/webm;codecs=vp9";
try {
const mediaDevices = navigator.mediaDevices as any;
const stream = await mediaDevices.getDisplayMedia({
audio: true,
video: true,
});
const options = {
mimeType: mimeType,
bitsPerSecond: 500000,
};
let recorder = new MediaRecorder(stream, options);
const chunks = [];
recorder.ondataavailable = (e) => {
if (e.data.size > 0) {
chunks.push(e.data);
} else {
console.log("no data to push");
}
};
recorder.onstop = (e) => {
const completeBlob = new Blob(chunks, {
type: chunks[0].type
});
stream.getTracks().forEach((track) => {
track.stop();
console.log(track);
});
setVideoData({
recorded: true,
localVideoURL: URL.createObjectURL(completeBlob),
blob: completeBlob,
});
};
recorder.start();
} catch (error) {
console.log(error);
}
}
Any pointers greatly appreciated.
Most browsers don't support capturing audio with display media. Even in Chrome and Chromium variants, capture support depends on the OS.
https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getDisplayMedia#Browser_compatibility

Saving desktopCapturer to video file in Electron

The desktopCapturer api example shows how to write a screen capture stream to a <video> element.
// In the renderer process.
var desktopCapturer = require('electron').desktopCapturer;
desktopCapturer.getSources({types: ['window', 'screen']}, function(error, sources) {
if (error) throw error;
for (var i = 0; i < sources.length; ++i) {
if (sources[i].name == "Electron") {
navigator.webkitGetUserMedia({
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: sources[i].id,
minWidth: 1280,
maxWidth: 1280,
minHeight: 720,
maxHeight: 720
}
}
}, gotStream, getUserMediaError);
return;
}
}
});
function gotStream(stream) {
document.querySelector('video').src = URL.createObjectURL(stream);
}
function getUserMediaError(e) {
console.log('getUserMediaError');
}
I tried to replace the gotStream function with the following:
function gotStream(stream) {
var fs = require('fs');
fs.writeFileSync('vid.mp4', stream);
}
This creates a text file with [object MediaStream] as the contents.
How can I record this stream and save to a file on disk?
I answered my own question with the help of Demian's link to MediaRecorder as well as other related questions.
Below is an excerpt from magnemite with some minor simplifications and converted from TypeScript to JavaScript ES5 for better understanding to most readers.
var fs = require('fs');
var electron = require('electron');
var SECRET_KEY = 'Magnemite';
var recorder;
var blobs = [];
function startRecording() {
var title = document.title;
document.title = SECRET_KEY;
electron.desktopCapturer.getSources({ types: ['window', 'screen'] }, function(error, sources) {
if (error) throw error;
for (let i = 0; i < sources.length; i++) {
let src = sources[i];
if (src.name === SECRET_KEY) {
document.title = title;
navigator.webkitGetUserMedia({
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: src.id,
minWidth: 800,
maxWidth: 1280,
minHeight: 600,
maxHeight: 720
}
}
}, handleStream, handleUserMediaError);
return;
}
}
});
}
function handleStream(stream) {
recorder = new MediaRecorder(stream);
blobs = [];
recorder.ondataavailable = function(event) {
blobs.push(event.data);
};
recorder.start();
}
function stopRecording() {
recorder.stop();
toArrayBuffer(new Blob(blobs, {type: 'video/webm'}), function(ab) {
var buffer = toBuffer(ab);
var file = `./videos/example.webm`;
fs.writeFile(file, buffer, function(err) {
if (err) {
console.error('Failed to save video ' + err);
} else {
console.log('Saved video: ' + file);
}
});
});
}
function handleUserMediaError(e) {
console.error('handleUserMediaError', e);
}
function toArrayBuffer(blob, cb) {
let fileReader = new FileReader();
fileReader.onload = function() {
let arrayBuffer = this.result;
cb(arrayBuffer);
};
fileReader.readAsArrayBuffer(blob);
}
function toBuffer(ab) {
return Buffer.from(ab);
}
// Record for 7 seconds and save to disk
startRecording();
setTimeout(function() { stopRecording() }, 7000);
This will record the current electron window for 7 seconds and save to disk.
Take a look at the MediaRecorder API
You should convert that stream to binary chunks.
The desktopCapturer example shows how to get a Blob. You then need to convert the Blob to something accepted by fs.writeFile. The following is short and efficient:
fs.writeFile(filepath, Buffer.from(await blob.arrayBuffer()), ...);

Categories