In real-time we do this to get the frequency of the audio while audio is playing.
Window.onload{
audio.load();
audio.play();
Var context = new audioContext();
Context.createMediaElementSource(audio);
Var analyser = context.createAnalyser();
analyser.fftsize = 512;
Var array = new uintarray(analyser.frequencyBinCount);
Function render(){
RequestAnimationFrame(render);
analyser.getFrequencyBinCount(array);
//Process frequency details here
}
I want to get a frequency of an audio clip 30 times a second in nonreal time.
for example
Var subSecond = 1/30;
Var frequency = getFrequencyAt(64*subSecond);
Function getFrequencyAt(s){
//Logic to get the frequency here
}
How can I achieve this efficiently?
Related
I'm working on a project in which I'd like to:
Load a video js and display it on the canvas.
Use filters to alter the appearance of the canvas (and therefore the video).
Use the MediaStream captureStream() method and a MediaRecorder object to record the surface of the canvas and the audio of the original video.
Play the stream of both the canvas and the audio in an HTML video element.
I've been able to display the canvas recording in a video element by tweaking this WebRTC demo code: https://webrtc.github.io/samples/src/content/capture/canvas-record/
That said, I can't figure out how to record the video's audio alongside the canvas. Is it possible to create a MediaStream containing MediaStreamTrack instances from two different sources/elements?
According to the MediaStream API's specs there should theoretically be some way to accomplish this:
https://w3c.github.io/mediacapture-main/#introduction
"The two main components in the MediaStream API are the MediaStreamTrack and MediaStream interfaces. The MediaStreamTrack object represents media of a single type that originates from one media source in the User Agent, e.g. video produced by a web camera. A MediaStream is used to group several MediaStreamTrack objects into one unit that can be recorded or rendered in a media element."
Is it possible to create a MediaStream containing MediaStreamTrack instances from two different sources/elements?
Yes, you can do it using the MediaStream.addTrack() method, or new MediaStream([track1, track2]).
OP already known how to get all of it, but here is a reminder for future readers :
To get a video stream track from a <canvas>, you can call canvas.captureStream(framerate) method.
To get an audio stream track from a <video> element you can use the Web Audio API and it's createMediaStreamDestination method.
This will return a MediaStreamAudioDestinationNode node (dest) containing our audio stream. You'll then have to connect a MediaElementAudioSourceNode created from your <video> element, to this dest.
If you need to add more audio tracks to this stream, you should connect all these sources to dest.
Now that we've got two streams, one for the <canvas> video and one for the audio, we can either add the audio track to the canvas stream before we initialize the recorder:
canvasStream.addTrack(audioStream.getAudioTracks()[0]);
const recorder = new MediaRecorder(canvasStream)
or we can create a third MediaStream object from these two tracks:
const [videoTrack] = canvasStream.getVideoTracks();
const [audioTrack] = audioStream.getAudioTracks();
const recordedStream = new MediaStream(videoTrack, audioTrack)
const recorder = new MediaRecorder(recordedStream);
Here is a complete example:
var
btn = document.querySelector("button"),
canvas,
cStream,
aStream,
vid,
recorder,
analyser,
dataArray,
bufferLength,
chunks = [];
function clickHandler() {
btn.textContent = 'stop recording';
if (!aStream) {
initAudioStream();
}
cStream = canvas.captureStream(30);
cStream.addTrack(aStream.getAudioTracks()[0]);
recorder = new MediaRecorder(cStream);
recorder.start();
recorder.ondataavailable = saveChunks;
recorder.onstop = exportStream;
btn.onclick = stopRecording;
};
function exportStream(e) {
if (chunks.length) {
var blob = new Blob(chunks, { type: chunks[0].type });
var vidURL = URL.createObjectURL(blob);
var vid = document.createElement('video');
vid.controls = true;
vid.src = vidURL;
vid.onend = function() {
URL.revokeObjectURL(vidURL);
}
document.body.insertBefore(vid, canvas);
} else {
document.body.insertBefore(document.createTextNode('no data saved'), canvas);
}
}
function saveChunks(e) {
e.data.size && chunks.push(e.data);
}
function stopRecording() {
vid.pause();
btn.remove();
recorder.stop();
}
function initAudioStream() {
var audioCtx = new AudioContext();
// create a stream from our AudioContext
var dest = audioCtx.createMediaStreamDestination();
aStream = dest.stream;
// connect our video element's output to the stream
var sourceNode = audioCtx.createMediaElementSource(vid);
sourceNode.connect(dest)
// start the video
vid.play();
// just for the fancy canvas drawings
analyser = audioCtx.createAnalyser();
sourceNode.connect(analyser);
analyser.fftSize = 2048;
bufferLength = analyser.frequencyBinCount;
dataArray = new Uint8Array(bufferLength);
analyser.getByteTimeDomainData(dataArray);
// output to our headphones
sourceNode.connect(audioCtx.destination)
startCanvasAnim();
}
function enableButton() {
vid.oncanplay = null;
btn.onclick = clickHandler;
btn.disabled = false;
};
var loadVideo = function() {
vid = document.createElement('video');
vid.crossOrigin = 'anonymous';
vid.oncanplay = enableButton;
vid.src = 'https://dl.dropboxusercontent.com/s/bch2j17v6ny4ako/movie720p.mp4';
}
function startCanvasAnim() {
// from MDN https://developer.mozilla.org/en/docs/Web/API/AnalyserNode#Examples
canvas = Object.assign(document.createElement("canvas"), { width: 500, height: 200});
document.body.prepend(canvas);
var canvasCtx = canvas.getContext('2d');
canvasCtx.fillStyle = 'rgb(200, 200, 200)';
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = 'rgb(0, 0, 0)';
var draw = function() {
var drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
canvasCtx.beginPath();
var sliceWidth = canvas.width * 1.0 / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * canvas.height / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
draw();
}
loadVideo();
button { vertical-align: top }
<button disabled>record</button>
Kaiido's demo is brilliant. For those just looking for the tl;dr code to add an audio stream to their existing canvas stream:
let videoOrAudioElement = /* your audio source element */;
// get the audio track:
let ctx = new AudioContext();
let dest = ctx.createMediaStreamDestination();
let sourceNode = ctx.createMediaElementSource(videoOrAudioElement);
sourceNode.connect(dest);
sourceNode.connect(ctx.destination);
let audioTrack = dest.stream.getAudioTracks()[0];
// add it to your canvas stream:
canvasStream.addTrack(audioTrack);
// use your canvas stream like you would normally:
let recorder = new MediaRecorder(canvasStream);
// ...
I want to get the frequencies of an audio file with JS in non-real-time, e.g before the file is played, and store it in an array.
I used this code:
var context = new AudioContext();
src = context.createMediaElementSource(source);
analyser = context.createAnalyser();
var listen = context.createGain();
src.connect(listen);
listen.connect(analyser);
analyser.connect(context.destination);
analyser.fftSize = 2 ** 12;
var frequencyBins = analyser.fftSize / 2;
var bufferLength = analyser.frequencyBinCount;
console.log(bufferLength);
dataArray = new Float32Array(bufferLength);
// dataArray = new Uint8Array(bufferLength);
var scale = bufferLength/WIDTH;
And then in an animation frame,
dataArraya = new Float32Array(2048);
analyser.getFloatFrequencyData(dataArraya)
This works great in real-time, but I'd like to get the audio data before, how can I do that?
I want to play only the audio of a mp4 video file. So far I'm here and this function works fine without any errors:
function MovieAudio() {
const video = document.createElement('video');
document.body.appendChild(video);
video.id = 'clip';
const clip = document.getElementById("clip");
clip.style.visibility = "hidden";
const source = document.createElement('source');
source.src = 'myvideo.mp4';
source.type = 'video/mp4';
video.appendChild(source);
video.load();
clip.volume = 1;
clip.play();
} // end of MovieAudio function
The problem is I want to double the volume of the audio and if I set the volume like this I get an error:
clip.volume = 2;
I find a solution here but I can't make the code work...
https://stackoverflow.com/a/43794379/10715551
// create an audio context and hook up the video element as the source
var audioCtx = new AudioContext();
var source = audioCtx.createMediaElementSource(clip);
// create a gain node
var gainNode = audioCtx.createGain();
gainNode.gain.value = 2; // double the volume
source.connect(gainNode);
// connect the gain node to an output destination
gainNode.connect(audioCtx.destination);
How can I double the volume of audio with the given code?
I'm having a bit of a trouble building an Audio frequency visualization. I am using HTML5 audio tag:
<audio id="music" src="https://cdn.glitch.com/02dcea11-9bd2-4462-ac38-eeb6a5ad9530%2F331_full_beautiful-minds_0171_preview.mp3?1522829295082" crossorigin="use-URL-credentials" controls="true"></audio>
(song is from www.premiumbeat.com)
used with AudioContext and Analyser as shown below:
const audio = document.getElementById('music');
audio.load();
audio.play();
const ctx = new AudioContext();
const audioSrc = ctx.createMediaElementSource(audio);
const analyser = ctx.createAnalyser();
audioSrc.connect(analyser);
analyser.connect(ctx.destination);
analyser.fftSize = 256;
const bufferLength = analyser.frequencyBinCount;
const frequencyData = new Uint8Array(bufferLength);
analyser.getByteFrequencyData(frequencyData);
setTimeout(() => {
analyser.getByteFrequencyData(frequencyData);
console.log(frequencyData);
}, 5000);
Output:
Even though the song is playing when I call the getByteFrequencyData(), the output is 128 items long array full of zeros.
Expected behaviour:
After 5 seconds console should output 128 items long array of current frequency data. (I do it this way because requestAnimationFrame would lag the window, however I tried it and the result is the same).
Any idea what I do wrong? (I'm using Firefox Quantum 59.0.2.)
Try it here: JSFiddle example
Thank you!
Following my experiments with the web audio api i modified your script to use getByteTimeDomainData instead of getByteFrequencyData.
https://developer.mozilla.org/en-US/docs/Web/API/AnalyserNode/getByteTimeDomainData
Using intervals instead of timeout for the demo.
const audio = document.getElementById('music');
audio.load();
audio.play();
const ctx = new AudioContext();
const audioSrc = ctx.createMediaElementSource(audio);
const analyser = ctx.createAnalyser();
audioSrc.connect(analyser);
analyser.connect(ctx.destination);
analyser.fftSize = 256;
const bufferLength = analyser.frequencyBinCount;
const frequencyData = new Uint8Array(bufferLength);
setInterval(() => {
analyser.getByteFrequencyData(frequencyData);
console.log(frequencyData);
}, 1000);
<audio id="music" src="https://cdn.glitch.com/02dcea11-9bd2-4462-ac38-eeb6a5ad9530%2F331_full_beautiful-minds_0171_preview.mp3?1522829295082" crossorigin="use-URL-credentials" controls="true"></audio>
I'm working on a project in which I'd like to:
Load a video js and display it on the canvas.
Use filters to alter the appearance of the canvas (and therefore the video).
Use the MediaStream captureStream() method and a MediaRecorder object to record the surface of the canvas and the audio of the original video.
Play the stream of both the canvas and the audio in an HTML video element.
I've been able to display the canvas recording in a video element by tweaking this WebRTC demo code: https://webrtc.github.io/samples/src/content/capture/canvas-record/
That said, I can't figure out how to record the video's audio alongside the canvas. Is it possible to create a MediaStream containing MediaStreamTrack instances from two different sources/elements?
According to the MediaStream API's specs there should theoretically be some way to accomplish this:
https://w3c.github.io/mediacapture-main/#introduction
"The two main components in the MediaStream API are the MediaStreamTrack and MediaStream interfaces. The MediaStreamTrack object represents media of a single type that originates from one media source in the User Agent, e.g. video produced by a web camera. A MediaStream is used to group several MediaStreamTrack objects into one unit that can be recorded or rendered in a media element."
Is it possible to create a MediaStream containing MediaStreamTrack instances from two different sources/elements?
Yes, you can do it using the MediaStream.addTrack() method, or new MediaStream([track1, track2]).
OP already known how to get all of it, but here is a reminder for future readers :
To get a video stream track from a <canvas>, you can call canvas.captureStream(framerate) method.
To get an audio stream track from a <video> element you can use the Web Audio API and it's createMediaStreamDestination method.
This will return a MediaStreamAudioDestinationNode node (dest) containing our audio stream. You'll then have to connect a MediaElementAudioSourceNode created from your <video> element, to this dest.
If you need to add more audio tracks to this stream, you should connect all these sources to dest.
Now that we've got two streams, one for the <canvas> video and one for the audio, we can either add the audio track to the canvas stream before we initialize the recorder:
canvasStream.addTrack(audioStream.getAudioTracks()[0]);
const recorder = new MediaRecorder(canvasStream)
or we can create a third MediaStream object from these two tracks:
const [videoTrack] = canvasStream.getVideoTracks();
const [audioTrack] = audioStream.getAudioTracks();
const recordedStream = new MediaStream(videoTrack, audioTrack)
const recorder = new MediaRecorder(recordedStream);
Here is a complete example:
var
btn = document.querySelector("button"),
canvas,
cStream,
aStream,
vid,
recorder,
analyser,
dataArray,
bufferLength,
chunks = [];
function clickHandler() {
btn.textContent = 'stop recording';
if (!aStream) {
initAudioStream();
}
cStream = canvas.captureStream(30);
cStream.addTrack(aStream.getAudioTracks()[0]);
recorder = new MediaRecorder(cStream);
recorder.start();
recorder.ondataavailable = saveChunks;
recorder.onstop = exportStream;
btn.onclick = stopRecording;
};
function exportStream(e) {
if (chunks.length) {
var blob = new Blob(chunks, { type: chunks[0].type });
var vidURL = URL.createObjectURL(blob);
var vid = document.createElement('video');
vid.controls = true;
vid.src = vidURL;
vid.onend = function() {
URL.revokeObjectURL(vidURL);
}
document.body.insertBefore(vid, canvas);
} else {
document.body.insertBefore(document.createTextNode('no data saved'), canvas);
}
}
function saveChunks(e) {
e.data.size && chunks.push(e.data);
}
function stopRecording() {
vid.pause();
btn.remove();
recorder.stop();
}
function initAudioStream() {
var audioCtx = new AudioContext();
// create a stream from our AudioContext
var dest = audioCtx.createMediaStreamDestination();
aStream = dest.stream;
// connect our video element's output to the stream
var sourceNode = audioCtx.createMediaElementSource(vid);
sourceNode.connect(dest)
// start the video
vid.play();
// just for the fancy canvas drawings
analyser = audioCtx.createAnalyser();
sourceNode.connect(analyser);
analyser.fftSize = 2048;
bufferLength = analyser.frequencyBinCount;
dataArray = new Uint8Array(bufferLength);
analyser.getByteTimeDomainData(dataArray);
// output to our headphones
sourceNode.connect(audioCtx.destination)
startCanvasAnim();
}
function enableButton() {
vid.oncanplay = null;
btn.onclick = clickHandler;
btn.disabled = false;
};
var loadVideo = function() {
vid = document.createElement('video');
vid.crossOrigin = 'anonymous';
vid.oncanplay = enableButton;
vid.src = 'https://dl.dropboxusercontent.com/s/bch2j17v6ny4ako/movie720p.mp4';
}
function startCanvasAnim() {
// from MDN https://developer.mozilla.org/en/docs/Web/API/AnalyserNode#Examples
canvas = Object.assign(document.createElement("canvas"), { width: 500, height: 200});
document.body.prepend(canvas);
var canvasCtx = canvas.getContext('2d');
canvasCtx.fillStyle = 'rgb(200, 200, 200)';
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = 'rgb(0, 0, 0)';
var draw = function() {
var drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
canvasCtx.beginPath();
var sliceWidth = canvas.width * 1.0 / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * canvas.height / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
draw();
}
loadVideo();
button { vertical-align: top }
<button disabled>record</button>
Kaiido's demo is brilliant. For those just looking for the tl;dr code to add an audio stream to their existing canvas stream:
let videoOrAudioElement = /* your audio source element */;
// get the audio track:
let ctx = new AudioContext();
let dest = ctx.createMediaStreamDestination();
let sourceNode = ctx.createMediaElementSource(videoOrAudioElement);
sourceNode.connect(dest);
sourceNode.connect(ctx.destination);
let audioTrack = dest.stream.getAudioTracks()[0];
// add it to your canvas stream:
canvasStream.addTrack(audioTrack);
// use your canvas stream like you would normally:
let recorder = new MediaRecorder(canvasStream);
// ...