I'm working on a project in which I'd like to:
Load a video js and display it on the canvas.
Use filters to alter the appearance of the canvas (and therefore the video).
Use the MediaStream captureStream() method and a MediaRecorder object to record the surface of the canvas and the audio of the original video.
Play the stream of both the canvas and the audio in an HTML video element.
I've been able to display the canvas recording in a video element by tweaking this WebRTC demo code: https://webrtc.github.io/samples/src/content/capture/canvas-record/
That said, I can't figure out how to record the video's audio alongside the canvas. Is it possible to create a MediaStream containing MediaStreamTrack instances from two different sources/elements?
According to the MediaStream API's specs there should theoretically be some way to accomplish this:
https://w3c.github.io/mediacapture-main/#introduction
"The two main components in the MediaStream API are the MediaStreamTrack and MediaStream interfaces. The MediaStreamTrack object represents media of a single type that originates from one media source in the User Agent, e.g. video produced by a web camera. A MediaStream is used to group several MediaStreamTrack objects into one unit that can be recorded or rendered in a media element."
Is it possible to create a MediaStream containing MediaStreamTrack instances from two different sources/elements?
Yes, you can do it using the MediaStream.addTrack() method, or new MediaStream([track1, track2]).
OP already known how to get all of it, but here is a reminder for future readers :
To get a video stream track from a <canvas>, you can call canvas.captureStream(framerate) method.
To get an audio stream track from a <video> element you can use the Web Audio API and it's createMediaStreamDestination method.
This will return a MediaStreamAudioDestinationNode node (dest) containing our audio stream. You'll then have to connect a MediaElementAudioSourceNode created from your <video> element, to this dest.
If you need to add more audio tracks to this stream, you should connect all these sources to dest.
Now that we've got two streams, one for the <canvas> video and one for the audio, we can either add the audio track to the canvas stream before we initialize the recorder:
canvasStream.addTrack(audioStream.getAudioTracks()[0]);
const recorder = new MediaRecorder(canvasStream)
or we can create a third MediaStream object from these two tracks:
const [videoTrack] = canvasStream.getVideoTracks();
const [audioTrack] = audioStream.getAudioTracks();
const recordedStream = new MediaStream(videoTrack, audioTrack)
const recorder = new MediaRecorder(recordedStream);
Here is a complete example:
var
btn = document.querySelector("button"),
canvas,
cStream,
aStream,
vid,
recorder,
analyser,
dataArray,
bufferLength,
chunks = [];
function clickHandler() {
btn.textContent = 'stop recording';
if (!aStream) {
initAudioStream();
}
cStream = canvas.captureStream(30);
cStream.addTrack(aStream.getAudioTracks()[0]);
recorder = new MediaRecorder(cStream);
recorder.start();
recorder.ondataavailable = saveChunks;
recorder.onstop = exportStream;
btn.onclick = stopRecording;
};
function exportStream(e) {
if (chunks.length) {
var blob = new Blob(chunks, { type: chunks[0].type });
var vidURL = URL.createObjectURL(blob);
var vid = document.createElement('video');
vid.controls = true;
vid.src = vidURL;
vid.onend = function() {
URL.revokeObjectURL(vidURL);
}
document.body.insertBefore(vid, canvas);
} else {
document.body.insertBefore(document.createTextNode('no data saved'), canvas);
}
}
function saveChunks(e) {
e.data.size && chunks.push(e.data);
}
function stopRecording() {
vid.pause();
btn.remove();
recorder.stop();
}
function initAudioStream() {
var audioCtx = new AudioContext();
// create a stream from our AudioContext
var dest = audioCtx.createMediaStreamDestination();
aStream = dest.stream;
// connect our video element's output to the stream
var sourceNode = audioCtx.createMediaElementSource(vid);
sourceNode.connect(dest)
// start the video
vid.play();
// just for the fancy canvas drawings
analyser = audioCtx.createAnalyser();
sourceNode.connect(analyser);
analyser.fftSize = 2048;
bufferLength = analyser.frequencyBinCount;
dataArray = new Uint8Array(bufferLength);
analyser.getByteTimeDomainData(dataArray);
// output to our headphones
sourceNode.connect(audioCtx.destination)
startCanvasAnim();
}
function enableButton() {
vid.oncanplay = null;
btn.onclick = clickHandler;
btn.disabled = false;
};
var loadVideo = function() {
vid = document.createElement('video');
vid.crossOrigin = 'anonymous';
vid.oncanplay = enableButton;
vid.src = 'https://dl.dropboxusercontent.com/s/bch2j17v6ny4ako/movie720p.mp4';
}
function startCanvasAnim() {
// from MDN https://developer.mozilla.org/en/docs/Web/API/AnalyserNode#Examples
canvas = Object.assign(document.createElement("canvas"), { width: 500, height: 200});
document.body.prepend(canvas);
var canvasCtx = canvas.getContext('2d');
canvasCtx.fillStyle = 'rgb(200, 200, 200)';
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = 'rgb(0, 0, 0)';
var draw = function() {
var drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
canvasCtx.beginPath();
var sliceWidth = canvas.width * 1.0 / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * canvas.height / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
draw();
}
loadVideo();
button { vertical-align: top }
<button disabled>record</button>
Kaiido's demo is brilliant. For those just looking for the tl;dr code to add an audio stream to their existing canvas stream:
let videoOrAudioElement = /* your audio source element */;
// get the audio track:
let ctx = new AudioContext();
let dest = ctx.createMediaStreamDestination();
let sourceNode = ctx.createMediaElementSource(videoOrAudioElement);
sourceNode.connect(dest);
sourceNode.connect(ctx.destination);
let audioTrack = dest.stream.getAudioTracks()[0];
// add it to your canvas stream:
canvasStream.addTrack(audioTrack);
// use your canvas stream like you would normally:
let recorder = new MediaRecorder(canvasStream);
// ...
Related
Wondering if anyone can help me out here!
I'm currently saving a HTML5 Canvas as an MP4 file via the MediaRecorder API. Now my canvas doesn't contain any audio but I need an audio channel built in as the file with just h.264 and no audio codec isn't compatible with a piece of software I am using.
Is there anyway to force Safari to bake in an audio codec into the stream even though there is no audio being used in the canvas?
Essentially I'm trying to achieve the following: AAC, H.264
Rather than what I have right now: H.264
Here is what I have so far (minus some other details):
// setup media recording
const recordedChunks = [];
const stream = canvas.captureStream(60);
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = (e) => recordedChunks.push(e.data);
mediaRecorder.onstop = async (e) => {
const download = (fileName, url) => {
const a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
a.href = url;
a.download = fileName;
a.click();
window.URL.revokeObjectURL(url);
};
// download video
const videoData = new Blob(recordedChunks, { type: "video/mp4" });
download("1.mp4", URL.createObjectURL(videoData));
}
// start recording
mediaRecorder.start();
// do some canvas related operations
// ...
mediaRecorder.stop();
I guess if there's no work around here I might just resort to adding a silent audio channel to the video via FFMPEG.
UPDATE:
The accepted answer didn't actually work for me so I resorted to adding the audio channel through FFMPEG which worked. Accepted anyway as it does add an audio channel to the outputted file.
Thank you!
I'm not familiar with codecs and such but you can add a silent audio channel to a video stream as follows:
const stream = canvas.captureStream(60);
const audioContext = new AudioContext();
const oscillator = audioContext.createOscillator();
oscillator.frequency.value = 0;
const streamAudioDestination = audioContext.createMediaStreamDestination();
oscillator.connect(streamAudioDestination);
oscillator.start();
// add audio track
const audioStream = streamAudioDestination.stream;
const audioTracks = audioStream.getAudioTracks();
const firstAudioTrack = audioTracks[0];
stream.addTrack(firstAudioTrack);
const mediaRecorder = new MediaRecorder(stream);
Note that initialization of AudioContext should happen in response to a user action (e.g. within a click handler). Thank you #Nikola Lukic for noticing this!
If anybody have The AudioContext was not allowed to start. It must be resumed (or created) after a user gesture on the page.
Here is adapted example used #ziz Yokubjonov code:
function LOAD() {
var canvas = document.getElementById('canvas')
var ctx = canvas.getContext("2d")
// setup media recording
const recordedChunks = [];
const stream = canvas.captureStream(60);
// let combined = new MediaStream([...videoStream.getTracks(), ...audioStream.getTracks()]);
let options = {
audio: true,
audioBitsPerSecond: 64000,
};
const audioContext = new AudioContext();
const oscillator = audioContext.createOscillator();
oscillator.frequency.value = 0;
const streamAudioDestination = audioContext.createMediaStreamDestination();
oscillator.connect(streamAudioDestination);
oscillator.start();
// add audio track
const audioStream = streamAudioDestination.stream;
const audioTracks = audioStream.getAudioTracks();
const firstAudioTrack = audioTracks[0];
stream.addTrack(firstAudioTrack);
mediaRecorder = new MediaRecorder(stream, options);
mediaRecorder.ondataavailable = (e) => recordedChunks.push(e.data);
mediaRecorder.onstop = async (e) => {
const download = (fileName, url) => {
const a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: block";
a.innerHTML = 'LINK';
a.href = url;
a.download = fileName;
a.click();
window.URL.revokeObjectURL(url);
};
// download video
const videoData = new Blob(recordedChunks, {type: "video/mp4"});
download("1.mp4", URL.createObjectURL(videoData));
}
// start recording
mediaRecorder.start();
// do some canvas related operations
var x = 10
setInterval(function() {
x += 20
ctx.fillStyle = 'red'
ctx.fillText("TEST TEST", x, 50, 200, 50);
}, 1000)
ctx.fillText("TEST TEST", 10, 50, 200, 50);
setTimeout(function() {
mediaRecorder.stop();
}, 10000)
}
function attachFunction() {
LOAD();
window.removeEventListener("click", attachFunction)
}
window.addEventListener("click", attachFunction)
<canvas id='canvas'></canvas>
Thanks, maybe this will help?
```a.href``` =
url:
````a.download``` =
fileName:
```a.click```(.264):
():
```c.AAC```
```H.264```
I want to play only the audio of a mp4 video file. So far I'm here and this function works fine without any errors:
function MovieAudio() {
const video = document.createElement('video');
document.body.appendChild(video);
video.id = 'clip';
const clip = document.getElementById("clip");
clip.style.visibility = "hidden";
const source = document.createElement('source');
source.src = 'myvideo.mp4';
source.type = 'video/mp4';
video.appendChild(source);
video.load();
clip.volume = 1;
clip.play();
} // end of MovieAudio function
The problem is I want to double the volume of the audio and if I set the volume like this I get an error:
clip.volume = 2;
I find a solution here but I can't make the code work...
https://stackoverflow.com/a/43794379/10715551
// create an audio context and hook up the video element as the source
var audioCtx = new AudioContext();
var source = audioCtx.createMediaElementSource(clip);
// create a gain node
var gainNode = audioCtx.createGain();
gainNode.gain.value = 2; // double the volume
source.connect(gainNode);
// connect the gain node to an output destination
gainNode.connect(audioCtx.destination);
How can I double the volume of audio with the given code?
Basically I want to be able to perform effectively this same code:
const video = document.getElementById('video');
const canvas = document.getElementById('canvas');
const context = canvas.getContext('2d');
const draw = () => {
context.drawImage(video, 0, 0);
requestAnimationFrame(draw);
}
video.onplay = () => {
requestAnimationFrame(draw);
}
only using an offscreen canvas. I can send images over messages to the worker the offscreen canvas is on, but not video as it's directly tied to an HTMLElement. Is there currently a way to somehow still render video data or a MediaStream in an offscreen canvas?
You can send frames of a video to an OffscreenCanvas in a Web Worker by modifying your script with the following changes:
const worker = new Worker('my-worker.js');
const video = document.getElementById('video');
const stream = video.captureStream();
const [track] = stream.getVideoTracks();
const imageCapture = new ImageCapture(track);
const canvas = document.getElementById('canvas');
const offscreen = canvas.transferControlToOffscreen();
worker.postMessage({ offscreen }, [offscreen]);
const draw = () => {
imageCapture.grabFrame().then(imageBitmap => {
worker.postMessage({ imageBitmap }, [imageBitmap]);
});
requestAnimationFrame(draw);
};
video.onplay = () => {
requestAnimationFrame(draw);
};
my-worker.js
let canvas;
let context;
addEventListener('message', event => {
if (event.data.offscreen) {
canvas = event.data.offscreen;
context = canvas.getContext('2d');
} else if (event.data.imageBitmap && context) {
context.drawImage(event.data.imageBitmap, 0, 0);
// do something with frame
}
});
References
HTMLMediaElement.prototype.captureStream()
MediaStream.prototype.getVideoTracks()
new ImageCapture()
ImageCapture.prototype.grabFrame()
In real-time we do this to get the frequency of the audio while audio is playing.
Window.onload{
audio.load();
audio.play();
Var context = new audioContext();
Context.createMediaElementSource(audio);
Var analyser = context.createAnalyser();
analyser.fftsize = 512;
Var array = new uintarray(analyser.frequencyBinCount);
Function render(){
RequestAnimationFrame(render);
analyser.getFrequencyBinCount(array);
//Process frequency details here
}
I want to get a frequency of an audio clip 30 times a second in nonreal time.
for example
Var subSecond = 1/30;
Var frequency = getFrequencyAt(64*subSecond);
Function getFrequencyAt(s){
//Logic to get the frequency here
}
How can I achieve this efficiently?
I'm working on a project in which I'd like to:
Load a video js and display it on the canvas.
Use filters to alter the appearance of the canvas (and therefore the video).
Use the MediaStream captureStream() method and a MediaRecorder object to record the surface of the canvas and the audio of the original video.
Play the stream of both the canvas and the audio in an HTML video element.
I've been able to display the canvas recording in a video element by tweaking this WebRTC demo code: https://webrtc.github.io/samples/src/content/capture/canvas-record/
That said, I can't figure out how to record the video's audio alongside the canvas. Is it possible to create a MediaStream containing MediaStreamTrack instances from two different sources/elements?
According to the MediaStream API's specs there should theoretically be some way to accomplish this:
https://w3c.github.io/mediacapture-main/#introduction
"The two main components in the MediaStream API are the MediaStreamTrack and MediaStream interfaces. The MediaStreamTrack object represents media of a single type that originates from one media source in the User Agent, e.g. video produced by a web camera. A MediaStream is used to group several MediaStreamTrack objects into one unit that can be recorded or rendered in a media element."
Is it possible to create a MediaStream containing MediaStreamTrack instances from two different sources/elements?
Yes, you can do it using the MediaStream.addTrack() method, or new MediaStream([track1, track2]).
OP already known how to get all of it, but here is a reminder for future readers :
To get a video stream track from a <canvas>, you can call canvas.captureStream(framerate) method.
To get an audio stream track from a <video> element you can use the Web Audio API and it's createMediaStreamDestination method.
This will return a MediaStreamAudioDestinationNode node (dest) containing our audio stream. You'll then have to connect a MediaElementAudioSourceNode created from your <video> element, to this dest.
If you need to add more audio tracks to this stream, you should connect all these sources to dest.
Now that we've got two streams, one for the <canvas> video and one for the audio, we can either add the audio track to the canvas stream before we initialize the recorder:
canvasStream.addTrack(audioStream.getAudioTracks()[0]);
const recorder = new MediaRecorder(canvasStream)
or we can create a third MediaStream object from these two tracks:
const [videoTrack] = canvasStream.getVideoTracks();
const [audioTrack] = audioStream.getAudioTracks();
const recordedStream = new MediaStream(videoTrack, audioTrack)
const recorder = new MediaRecorder(recordedStream);
Here is a complete example:
var
btn = document.querySelector("button"),
canvas,
cStream,
aStream,
vid,
recorder,
analyser,
dataArray,
bufferLength,
chunks = [];
function clickHandler() {
btn.textContent = 'stop recording';
if (!aStream) {
initAudioStream();
}
cStream = canvas.captureStream(30);
cStream.addTrack(aStream.getAudioTracks()[0]);
recorder = new MediaRecorder(cStream);
recorder.start();
recorder.ondataavailable = saveChunks;
recorder.onstop = exportStream;
btn.onclick = stopRecording;
};
function exportStream(e) {
if (chunks.length) {
var blob = new Blob(chunks, { type: chunks[0].type });
var vidURL = URL.createObjectURL(blob);
var vid = document.createElement('video');
vid.controls = true;
vid.src = vidURL;
vid.onend = function() {
URL.revokeObjectURL(vidURL);
}
document.body.insertBefore(vid, canvas);
} else {
document.body.insertBefore(document.createTextNode('no data saved'), canvas);
}
}
function saveChunks(e) {
e.data.size && chunks.push(e.data);
}
function stopRecording() {
vid.pause();
btn.remove();
recorder.stop();
}
function initAudioStream() {
var audioCtx = new AudioContext();
// create a stream from our AudioContext
var dest = audioCtx.createMediaStreamDestination();
aStream = dest.stream;
// connect our video element's output to the stream
var sourceNode = audioCtx.createMediaElementSource(vid);
sourceNode.connect(dest)
// start the video
vid.play();
// just for the fancy canvas drawings
analyser = audioCtx.createAnalyser();
sourceNode.connect(analyser);
analyser.fftSize = 2048;
bufferLength = analyser.frequencyBinCount;
dataArray = new Uint8Array(bufferLength);
analyser.getByteTimeDomainData(dataArray);
// output to our headphones
sourceNode.connect(audioCtx.destination)
startCanvasAnim();
}
function enableButton() {
vid.oncanplay = null;
btn.onclick = clickHandler;
btn.disabled = false;
};
var loadVideo = function() {
vid = document.createElement('video');
vid.crossOrigin = 'anonymous';
vid.oncanplay = enableButton;
vid.src = 'https://dl.dropboxusercontent.com/s/bch2j17v6ny4ako/movie720p.mp4';
}
function startCanvasAnim() {
// from MDN https://developer.mozilla.org/en/docs/Web/API/AnalyserNode#Examples
canvas = Object.assign(document.createElement("canvas"), { width: 500, height: 200});
document.body.prepend(canvas);
var canvasCtx = canvas.getContext('2d');
canvasCtx.fillStyle = 'rgb(200, 200, 200)';
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = 'rgb(0, 0, 0)';
var draw = function() {
var drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
canvasCtx.beginPath();
var sliceWidth = canvas.width * 1.0 / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * canvas.height / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
draw();
}
loadVideo();
button { vertical-align: top }
<button disabled>record</button>
Kaiido's demo is brilliant. For those just looking for the tl;dr code to add an audio stream to their existing canvas stream:
let videoOrAudioElement = /* your audio source element */;
// get the audio track:
let ctx = new AudioContext();
let dest = ctx.createMediaStreamDestination();
let sourceNode = ctx.createMediaElementSource(videoOrAudioElement);
sourceNode.connect(dest);
sourceNode.connect(ctx.destination);
let audioTrack = dest.stream.getAudioTracks()[0];
// add it to your canvas stream:
canvasStream.addTrack(audioTrack);
// use your canvas stream like you would normally:
let recorder = new MediaRecorder(canvasStream);
// ...