MediaStream Capture Canvas and Audio Simultaneously - javascript

I'm working on a project in which I'd like to:
Load a video js and display it on the canvas.
Use filters to alter the appearance of the canvas (and therefore the video).
Use the MediaStream captureStream() method and a MediaRecorder object to record the surface of the canvas and the audio of the original video.
Play the stream of both the canvas and the audio in an HTML video element.
I've been able to display the canvas recording in a video element by tweaking this WebRTC demo code: https://webrtc.github.io/samples/src/content/capture/canvas-record/
That said, I can't figure out how to record the video's audio alongside the canvas. Is it possible to create a MediaStream containing MediaStreamTrack instances from two different sources/elements?
According to the MediaStream API's specs there should theoretically be some way to accomplish this:
https://w3c.github.io/mediacapture-main/#introduction
"The two main components in the MediaStream API are the MediaStreamTrack and MediaStream interfaces. The MediaStreamTrack object represents media of a single type that originates from one media source in the User Agent, e.g. video produced by a web camera. A MediaStream is used to group several MediaStreamTrack objects into one unit that can be recorded or rendered in a media element."

Is it possible to create a MediaStream containing MediaStreamTrack instances from two different sources/elements?
Yes, you can do it using the MediaStream.addTrack() method, or new MediaStream([track1, track2]).
OP already known how to get all of it, but here is a reminder for future readers :
To get a video stream track from a <canvas>, you can call canvas.captureStream(framerate) method.
To get an audio stream track from a <video> element you can use the Web Audio API and it's createMediaStreamDestination method.
This will return a MediaStreamAudioDestinationNode node (dest) containing our audio stream. You'll then have to connect a MediaElementAudioSourceNode created from your <video> element, to this dest.
If you need to add more audio tracks to this stream, you should connect all these sources to dest.
Now that we've got two streams, one for the <canvas> video and one for the audio, we can either add the audio track to the canvas stream before we initialize the recorder:
canvasStream.addTrack(audioStream.getAudioTracks()[0]);
const recorder = new MediaRecorder(canvasStream)
or we can create a third MediaStream object from these two tracks:
const [videoTrack] = canvasStream.getVideoTracks();
const [audioTrack] = audioStream.getAudioTracks();
const recordedStream = new MediaStream(videoTrack, audioTrack)
const recorder = new MediaRecorder(recordedStream);
Here is a complete example:
var
btn = document.querySelector("button"),
canvas,
cStream,
aStream,
vid,
recorder,
analyser,
dataArray,
bufferLength,
chunks = [];
function clickHandler() {
btn.textContent = 'stop recording';
if (!aStream) {
initAudioStream();
}
cStream = canvas.captureStream(30);
cStream.addTrack(aStream.getAudioTracks()[0]);
recorder = new MediaRecorder(cStream);
recorder.start();
recorder.ondataavailable = saveChunks;
recorder.onstop = exportStream;
btn.onclick = stopRecording;
};
function exportStream(e) {
if (chunks.length) {
var blob = new Blob(chunks, { type: chunks[0].type });
var vidURL = URL.createObjectURL(blob);
var vid = document.createElement('video');
vid.controls = true;
vid.src = vidURL;
vid.onend = function() {
URL.revokeObjectURL(vidURL);
}
document.body.insertBefore(vid, canvas);
} else {
document.body.insertBefore(document.createTextNode('no data saved'), canvas);
}
}
function saveChunks(e) {
e.data.size && chunks.push(e.data);
}
function stopRecording() {
vid.pause();
btn.remove();
recorder.stop();
}
function initAudioStream() {
var audioCtx = new AudioContext();
// create a stream from our AudioContext
var dest = audioCtx.createMediaStreamDestination();
aStream = dest.stream;
// connect our video element's output to the stream
var sourceNode = audioCtx.createMediaElementSource(vid);
sourceNode.connect(dest)
// start the video
vid.play();
// just for the fancy canvas drawings
analyser = audioCtx.createAnalyser();
sourceNode.connect(analyser);
analyser.fftSize = 2048;
bufferLength = analyser.frequencyBinCount;
dataArray = new Uint8Array(bufferLength);
analyser.getByteTimeDomainData(dataArray);
// output to our headphones
sourceNode.connect(audioCtx.destination)
startCanvasAnim();
}
function enableButton() {
vid.oncanplay = null;
btn.onclick = clickHandler;
btn.disabled = false;
};
var loadVideo = function() {
vid = document.createElement('video');
vid.crossOrigin = 'anonymous';
vid.oncanplay = enableButton;
vid.src = 'https://dl.dropboxusercontent.com/s/bch2j17v6ny4ako/movie720p.mp4';
}
function startCanvasAnim() {
// from MDN https://developer.mozilla.org/en/docs/Web/API/AnalyserNode#Examples
canvas = Object.assign(document.createElement("canvas"), { width: 500, height: 200});
document.body.prepend(canvas);
var canvasCtx = canvas.getContext('2d');
canvasCtx.fillStyle = 'rgb(200, 200, 200)';
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = 'rgb(0, 0, 0)';
var draw = function() {
var drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
canvasCtx.beginPath();
var sliceWidth = canvas.width * 1.0 / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * canvas.height / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
draw();
}
loadVideo();
button { vertical-align: top }
<button disabled>record</button>

Kaiido's demo is brilliant. For those just looking for the tl;dr code to add an audio stream to their existing canvas stream:
let videoOrAudioElement = /* your audio source element */;
// get the audio track:
let ctx = new AudioContext();
let dest = ctx.createMediaStreamDestination();
let sourceNode = ctx.createMediaElementSource(videoOrAudioElement);
sourceNode.connect(dest);
sourceNode.connect(ctx.destination);
let audioTrack = dest.stream.getAudioTracks()[0];
// add it to your canvas stream:
canvasStream.addTrack(audioTrack);
// use your canvas stream like you would normally:
let recorder = new MediaRecorder(canvasStream);
// ...

Related

Javascript captureStream with audio [duplicate]

I'm working on a project in which I'd like to:
Load a video js and display it on the canvas.
Use filters to alter the appearance of the canvas (and therefore the video).
Use the MediaStream captureStream() method and a MediaRecorder object to record the surface of the canvas and the audio of the original video.
Play the stream of both the canvas and the audio in an HTML video element.
I've been able to display the canvas recording in a video element by tweaking this WebRTC demo code: https://webrtc.github.io/samples/src/content/capture/canvas-record/
That said, I can't figure out how to record the video's audio alongside the canvas. Is it possible to create a MediaStream containing MediaStreamTrack instances from two different sources/elements?
According to the MediaStream API's specs there should theoretically be some way to accomplish this:
https://w3c.github.io/mediacapture-main/#introduction
"The two main components in the MediaStream API are the MediaStreamTrack and MediaStream interfaces. The MediaStreamTrack object represents media of a single type that originates from one media source in the User Agent, e.g. video produced by a web camera. A MediaStream is used to group several MediaStreamTrack objects into one unit that can be recorded or rendered in a media element."
Is it possible to create a MediaStream containing MediaStreamTrack instances from two different sources/elements?
Yes, you can do it using the MediaStream.addTrack() method, or new MediaStream([track1, track2]).
OP already known how to get all of it, but here is a reminder for future readers :
To get a video stream track from a <canvas>, you can call canvas.captureStream(framerate) method.
To get an audio stream track from a <video> element you can use the Web Audio API and it's createMediaStreamDestination method.
This will return a MediaStreamAudioDestinationNode node (dest) containing our audio stream. You'll then have to connect a MediaElementAudioSourceNode created from your <video> element, to this dest.
If you need to add more audio tracks to this stream, you should connect all these sources to dest.
Now that we've got two streams, one for the <canvas> video and one for the audio, we can either add the audio track to the canvas stream before we initialize the recorder:
canvasStream.addTrack(audioStream.getAudioTracks()[0]);
const recorder = new MediaRecorder(canvasStream)
or we can create a third MediaStream object from these two tracks:
const [videoTrack] = canvasStream.getVideoTracks();
const [audioTrack] = audioStream.getAudioTracks();
const recordedStream = new MediaStream(videoTrack, audioTrack)
const recorder = new MediaRecorder(recordedStream);
Here is a complete example:
var
btn = document.querySelector("button"),
canvas,
cStream,
aStream,
vid,
recorder,
analyser,
dataArray,
bufferLength,
chunks = [];
function clickHandler() {
btn.textContent = 'stop recording';
if (!aStream) {
initAudioStream();
}
cStream = canvas.captureStream(30);
cStream.addTrack(aStream.getAudioTracks()[0]);
recorder = new MediaRecorder(cStream);
recorder.start();
recorder.ondataavailable = saveChunks;
recorder.onstop = exportStream;
btn.onclick = stopRecording;
};
function exportStream(e) {
if (chunks.length) {
var blob = new Blob(chunks, { type: chunks[0].type });
var vidURL = URL.createObjectURL(blob);
var vid = document.createElement('video');
vid.controls = true;
vid.src = vidURL;
vid.onend = function() {
URL.revokeObjectURL(vidURL);
}
document.body.insertBefore(vid, canvas);
} else {
document.body.insertBefore(document.createTextNode('no data saved'), canvas);
}
}
function saveChunks(e) {
e.data.size && chunks.push(e.data);
}
function stopRecording() {
vid.pause();
btn.remove();
recorder.stop();
}
function initAudioStream() {
var audioCtx = new AudioContext();
// create a stream from our AudioContext
var dest = audioCtx.createMediaStreamDestination();
aStream = dest.stream;
// connect our video element's output to the stream
var sourceNode = audioCtx.createMediaElementSource(vid);
sourceNode.connect(dest)
// start the video
vid.play();
// just for the fancy canvas drawings
analyser = audioCtx.createAnalyser();
sourceNode.connect(analyser);
analyser.fftSize = 2048;
bufferLength = analyser.frequencyBinCount;
dataArray = new Uint8Array(bufferLength);
analyser.getByteTimeDomainData(dataArray);
// output to our headphones
sourceNode.connect(audioCtx.destination)
startCanvasAnim();
}
function enableButton() {
vid.oncanplay = null;
btn.onclick = clickHandler;
btn.disabled = false;
};
var loadVideo = function() {
vid = document.createElement('video');
vid.crossOrigin = 'anonymous';
vid.oncanplay = enableButton;
vid.src = 'https://dl.dropboxusercontent.com/s/bch2j17v6ny4ako/movie720p.mp4';
}
function startCanvasAnim() {
// from MDN https://developer.mozilla.org/en/docs/Web/API/AnalyserNode#Examples
canvas = Object.assign(document.createElement("canvas"), { width: 500, height: 200});
document.body.prepend(canvas);
var canvasCtx = canvas.getContext('2d');
canvasCtx.fillStyle = 'rgb(200, 200, 200)';
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = 'rgb(0, 0, 0)';
var draw = function() {
var drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
canvasCtx.beginPath();
var sliceWidth = canvas.width * 1.0 / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * canvas.height / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
draw();
}
loadVideo();
button { vertical-align: top }
<button disabled>record</button>
Kaiido's demo is brilliant. For those just looking for the tl;dr code to add an audio stream to their existing canvas stream:
let videoOrAudioElement = /* your audio source element */;
// get the audio track:
let ctx = new AudioContext();
let dest = ctx.createMediaStreamDestination();
let sourceNode = ctx.createMediaElementSource(videoOrAudioElement);
sourceNode.connect(dest);
sourceNode.connect(ctx.destination);
let audioTrack = dest.stream.getAudioTracks()[0];
// add it to your canvas stream:
canvasStream.addTrack(audioTrack);
// use your canvas stream like you would normally:
let recorder = new MediaRecorder(canvasStream);
// ...

unable to double the volume of audio

I want to play only the audio of a mp4 video file. So far I'm here and this function works fine without any errors:
function MovieAudio() {
const video = document.createElement('video');
document.body.appendChild(video);
video.id = 'clip';
const clip = document.getElementById("clip");
clip.style.visibility = "hidden";
const source = document.createElement('source');
source.src = 'myvideo.mp4';
source.type = 'video/mp4';
video.appendChild(source);
video.load();
clip.volume = 1;
clip.play();
} // end of MovieAudio function
The problem is I want to double the volume of the audio and if I set the volume like this I get an error:
clip.volume = 2;
I find a solution here but I can't make the code work...
https://stackoverflow.com/a/43794379/10715551
// create an audio context and hook up the video element as the source
var audioCtx = new AudioContext();
var source = audioCtx.createMediaElementSource(clip);
// create a gain node
var gainNode = audioCtx.createGain();
gainNode.gain.value = 2; // double the volume
source.connect(gainNode);
// connect the gain node to an output destination
gainNode.connect(audioCtx.destination);
How can I double the volume of audio with the given code?

Is there a way to perform video rendering on a worker thread? [duplicate]

Basically I want to be able to perform effectively this same code:
const video = document.getElementById('video');
const canvas = document.getElementById('canvas');
const context = canvas.getContext('2d');
const draw = () => {
context.drawImage(video, 0, 0);
requestAnimationFrame(draw);
}
video.onplay = () => {
requestAnimationFrame(draw);
}
only using an offscreen canvas. I can send images over messages to the worker the offscreen canvas is on, but not video as it's directly tied to an HTMLElement. Is there currently a way to somehow still render video data or a MediaStream in an offscreen canvas?
You can send frames of a video to an OffscreenCanvas in a Web Worker by modifying your script with the following changes:
const worker = new Worker('my-worker.js');
const video = document.getElementById('video');
const stream = video.captureStream();
const [track] = stream.getVideoTracks();
const imageCapture = new ImageCapture(track);
const canvas = document.getElementById('canvas');
const offscreen = canvas.transferControlToOffscreen();
worker.postMessage({ offscreen }, [offscreen]);
const draw = () => {
imageCapture.grabFrame().then(imageBitmap => {
worker.postMessage({ imageBitmap }, [imageBitmap]);
});
requestAnimationFrame(draw);
};
video.onplay = () => {
requestAnimationFrame(draw);
};
my-worker.js
let canvas;
let context;
addEventListener('message', event => {
if (event.data.offscreen) {
canvas = event.data.offscreen;
context = canvas.getContext('2d');
} else if (event.data.imageBitmap && context) {
context.drawImage(event.data.imageBitmap, 0, 0);
// do something with frame
}
});
References
HTMLMediaElement.prototype.captureStream()
MediaStream.prototype.getVideoTracks()
new ImageCapture()
ImageCapture.prototype.grabFrame()

Get fft data of a audio file in JavaScript?

In real-time we do this to get the frequency of the audio while audio is playing.
Window.onload{
audio.load();
audio.play();
Var context = new audioContext();
Context.createMediaElementSource(audio);
Var analyser = context.createAnalyser();
analyser.fftsize = 512;
Var array = new uintarray(analyser.frequencyBinCount);
Function render(){
RequestAnimationFrame(render);
analyser.getFrequencyBinCount(array);
//Process frequency details here
}
I want to get a frequency of an audio clip 30 times a second in nonreal time.
for example
Var subSecond = 1/30;
Var frequency = getFrequencyAt(64*subSecond);
Function getFrequencyAt(s){
//Logic to get the frequency here
}
How can I achieve this efficiently?

Stereo convolution with web audio

I'm trying to convolve a mono impulse with a stereo audio file using the web audio api. The problem is instead of getting true stereo output, I'm getting what looks and sounds like the same track duplicated to both channels. Here's my code:
var context = new AudioContext();
var source = context.createBufferSource();
source.buffer = BUFFERS.user; // stereo file
var splitter = context.createChannelSplitter(2);
var convolverL = context.createConvolver();
convolverL.normalize = false;
convolverL.buffer = BUFFERS.impulse; // mono impulse
var convolverR = context.createConvolver();
convolverR.normalize = false;
convolverR.buffer = BUFFERS.impulse; // same mono impulse
var merger = context.createChannelMerger(2);
var gain = context.createGain();
gain.gain.value = 0.75;
// make connections
source.connect(splitter);
splitter.connect(convolverL, 0);
splitter.connect(convolverR, 1);
convolverL.connect(merger, 0, 0);
convolverR.connect(merger, 0, 1);
merger.connect(gain);
gain.connect(context.destination);
source.start(0);

Categories