The code below works perfectly, but the video needs to be loaded all the way until it is produced, is there any way to load it partially?
const video = document.querySelector('video');
const assetURL = '/e_dashinit.mp4';
const mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
const mediaSource = new MediaSource();
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
function sourceOpen(_) {
const mediaSource = this;
const sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
fetchAB(assetURL, function (buf) {
sourceBuffer.addEventListener('updateend', function (_) {
mediaSource.endOfStream();
video.play();
});
sourceBuffer.appendBuffer(buf);
});
};
function fetchAB(url, cb) {
const xhr = new XMLHttpRequest;
xhr.open('get', url);
xhr.responseType = 'arraybuffer';
xhr.onload = function () {
cb(xhr.response);
};
xhr.send();
};
return
Related
I have an animated canvas, I want to convert that into mp4. I am using MediaRecorder to capture the screen and then converting that Blob. I learned that MediaRecorder does not allow recording in mp4, so I am forced to get the canvas in webm. Here is what I have tried:
<canvas id="canvas"></canvas>
var recordedChunks = [];
var time = 0;
var canvas = document.getElementById("canvas");
return new Promise(function (res, rej) {
var stream = canvas.captureStream(60);
mediaRecorder = new MediaRecorder(stream, {
mimeType: "video/webm; codecs=vp9"
});
mediaRecorder.start(time);
mediaRecorder.ondataavailable = function (e) {
recordedChunks.push(event.data);
if (mediaRecorder.state === 'recording') {
mediaRecorder.stop();
}
}
mediaRecorder.onstop = function (event) {
var blob = new Blob(recordedChunks, {
"type": "video/webm"
});
var url = URL.createObjectURL(blob);
res(url);
var xhr = new XMLHttpRequest;
xhr.responseType = 'blob';
xhr.onload = function() {
var recoveredBlob = xhr.response;
var reader = new FileReader;
reader.onload = function() {
var blobAsDataUrl = reader.result;
document.getElementById("my-video").setAttribute("src", blobAsDataUrl);
};
reader.readAsDataURL(recoveredBlob);
};
xhr.open('GET', url);
xhr.send();
}
});
Any solution is highly appreciated.
Quick demo of transcoding using ffmpeg.wasm:
const { createFFmpeg } = FFmpeg;
const ffmpeg = createFFmpeg({
log: true
});
const transcode = async (webcamData) => {
const message = document.getElementById('message');
const name = 'record.webm';
await ffmpeg.load();
message.innerHTML = 'Start transcoding';
await ffmpeg.write(name, webcamData);
await ffmpeg.transcode(name, 'output.mp4');
message.innerHTML = 'Complete transcoding';
const data = ffmpeg.read('output.mp4');
const video = document.getElementById('output-video');
video.src = URL.createObjectURL(new Blob([data.buffer], { type: 'video/mp4' }));
dl.href = video.src;
dl.innerHTML = "download mp4"
}
fn().then(async ({url, blob})=>{
transcode(new Uint8Array(await (blob).arrayBuffer()));
})
function fn() {
var recordedChunks = [];
var time = 0;
var canvas = document.getElementById("canvas");
return new Promise(function (res, rej) {
var stream = canvas.captureStream(60);
mediaRecorder = new MediaRecorder(stream, {
mimeType: "video/webm; codecs=vp9"
});
mediaRecorder.start(time);
mediaRecorder.ondataavailable = function (e) {
recordedChunks.push(event.data);
// for demo, removed stop() call to capture more than one frame
}
mediaRecorder.onstop = function (event) {
var blob = new Blob(recordedChunks, {
"type": "video/webm"
});
var url = URL.createObjectURL(blob);
res({url, blob}); // resolve both blob and url in an object
myVideo.src = url;
// removed data url conversion for brevity
}
// for demo, draw random lines and then stop recording
var i = 0,
tid = setInterval(()=>{
if(i++ > 20) { // draw 20 lines
clearInterval(tid);
mediaRecorder.stop();
}
let canvas = document.querySelector("canvas");
let cx = canvas.getContext("2d");
cx.beginPath();
cx.strokeStyle = 'green';
cx.moveTo(Math.random()*100, Math.random()*100);
cx.lineTo(Math.random()*100, Math.random()*100);
cx.stroke();
},200)
});
}
<script src="https://unpkg.com/#ffmpeg/ffmpeg#0.8.1/dist/ffmpeg.min.js"></script>
<canvas id="canvas" style="height:100px;width:100px"></canvas>
<video id="myVideo" controls="controls"></video>
<video id="output-video" controls="controls"></video>
<a id="dl" href="" download="download.mp4"></a>
<div id="message"></div>
Hi so i'm trying to make a Drum-Kit, for this i'm using AudioContext API. My issue is when I use it exactly 50 times than it stops working. Only thing I found to make it work is to close the previous AudioContext that was used, thing is that it makes a "clicky" sound due to the sound stoping abruptly.
Any ideas on what to do?
Here is what i'm working with to not make it stop at 50 uses:
let i = 0;
let audioContext;
let volumeControl;
// Credit to MDN for AudioContext and StereoPannerNode tutorial.
function playSound(src, volume, pitch, stereo) {
if (audioContext != null) {
//volumeControl.gain.value = 0;
audioContext.close();
}
console.log(i++);
audioContext = new AudioContext();
const stereoControl = new StereoPannerNode(audioContext);
volumeControl = audioContext.createGain();
volumeControl.gain.value = volume;
stereoControl.pan.value = stereo;
const source = audioContext.createBufferSource();
const request = new XMLHttpRequest();
request.open('GET', src, true);
request.responseType = 'arraybuffer';
request.onload = function() {
const audioData = request.response;
audioContext.decodeAudioData(audioData, function(buffer) {
source.buffer = buffer;
source.playbackRate.value = pitch;
source.connect(volumeControl).connect(stereoControl).connect(audioContext.destination);
});
};
request.send();
source.play = source.start;
source.play();
}
Don't create an audio context for each sound, but create a single one for your page and add nodes to it. Something like this...
const audioContext = new AudioContext();
function playSound(src, volume, pitch, stereo) {
const stereoControl = audioContext.createStereoPanner();
const volumeControl = audioContext.createGain();
volumeControl.gain.value = volume;
stereoControl.pan.value = stereo;
const source = audioContext.createBufferSource();
const request = new XMLHttpRequest();
request.open("GET", src, true);
request.responseType = "arraybuffer";
request.onload = function() {
const audioData = request.response;
audioContext.decodeAudioData(audioData, function(buffer) {
source.buffer = buffer;
source.playbackRate.value = pitch;
source
.connect(volumeControl)
.connect(stereoControl)
.connect(audioContext.destination);
source.start();
});
};
request.send();
}
In addition, for a drumkit thing, you'll want to either preload all samples, or at the very least cache the decoded audio buffers and not do a request every time for them:
const cache = {};
const audioContext = new AudioContext();
function loadSound(src) {
if (cache[src]) {
// Already cached
return Promise.resolve(cache[src]);
}
return new Promise(resolve => {
const request = new XMLHttpRequest();
request.open("GET", src, true);
request.responseType = "arraybuffer";
request.onload = function() {
const audioData = request.response;
audioContext.decodeAudioData(audioData, function(buffer) {
cache[src] = buffer;
resolve(buffer);
});
};
request.send();
});
}
function playSound(src, volume, pitch, stereo) {
loadSound(src).then(buffer => {
const stereoControl = audioContext.createStereoPanner();
const volumeControl = audioContext.createGain();
volumeControl.gain.value = volume;
stereoControl.pan.value = stereo;
const source = audioContext.createBufferSource();
source.buffer = buffer;
source.playbackRate.value = pitch;
source
.connect(volumeControl)
.connect(stereoControl)
.connect(audioContext.destination);
source.start();
});
}
I know there's an option loop=true on an AudioBufferSourceNode. But I just want to repeat the wav a certain number of times. For example, how might I repeat it twice?
var url = 'main.wav';
var context = new AudioContext();
var source = context.createBufferSource();
source.connect(context.destination);
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = function() {
context.decodeAudioData(request.response, function(response) {
source.buffer = response;
source.start(0);
source.start(source.buffer.duration); // doesn't work "cannot call start more than once."
//source.loop = true; // this is infinite amount of times, not good
}, function () { console.error('The request failed.'); } );
}
request.send();
I also tried to create a second buffer:
var source2 = context.createBufferSource();
// inside callback
source2.buffer = response; // same response so I don't need to load this separately
source2.start(source.buffer.duration); // doesn't work
But that didn't work either. Any ideas?
Your approach with using second buffer is almost correct. You just need to use absolute start time, not relative. To obtain absolute time sum audio context's current time and duration of the sound:
// Play first sound now
var source1 = context.createBufferSource();
source1.buffer = response;
source1.start(context.currentTime); // This is the same as source1.start();
// Play second sound with a delay
var source2 = context.createBufferSource();
source2.buffer = response;
source2.start(context.currentTime + response.duration);
This approach provides gapless playback (if your files are gapless).
You can wrap this into a simple function:
function play(context, buffer, delay=0) {
var source = context.createBufferSource();
source.buffer = buffer;
source.start(context.currentTime + delay);
return source;
}
...
// Play the sound twice, one after another
play(context, response, 0);
play(context, response, response.duration);
You can set the AudioBufferSource's loop to true and call
source.stop( ctx.currentTime + audioBuffer.duration * repeatTime )
(async () => {
const url = 'https://dl.dropboxusercontent.com/s/1cdwpm3gca9mlo0/kick.mp3';
const buf = await fetch( url ).then( (r) => r.arrayBuffer() );
const ctx = new AudioContext();
const audioBuffer = await ctx.decodeAudioData( buf );
btn.onclick = (e) => {
const source = ctx.createBufferSource();
source.buffer = audioBuffer;
source.loop = true;
source.connect( ctx.destination );
source.start( 0 );
source.stop( ctx.currentTime + audioBuffer.duration * inp.valueAsNumber );
};
btn.disabled = false;
})().catch( console.error );
<label> repeat<input id="inp" type="number" min="1" max="20" value="5"></label>
<button id="btn" disabled>start</button>
Another alternative is to use the 3-arg version of start(). Something like:
let nReps = 2; /* Number of times to repeat the source */
let s = new AudioBufferSourceNode(context,
{buffer: buffer, loop: true});
s.start(startTime, 0, buffer.duration * nReps);
One solution would be to allow the loop behavior of your source and then schedule a callback to stop() the sources playback after a period of time that would allow playback of the sound file multiple times:
const url = 'main.wav';
const context = new AudioContext();
const source = context.createBufferSource();
// Tracks the number to playback cycles remaining
const playbackCount = 5;
source.connect(context.destination);
const request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = () => {
context.decodeAudioData(request.response, (response) => {
source.buffer = response;
// Schedule sound to stop at future time, after which sound
// should have played "playbackCount" number of times
setTimeout(() => {
source.stop();
}, response.duration * playbackCount * 1000);
// Enable loop behavior
source.loop = true;
source.start(0);
}, () => {
console.error('The request failed.');
});
}
request.send();
I have written some code stream video over a websocket so a sourcebuffer which works in Chrome and Edge.
However, when I run this in Firefox, the video never plays back, just a spinning wheel animation is displayed. When I check the <video> statistics, It reads HAVE_METADATA as the ready state and NETWORK_LOADING as the network state.
The code looks follows:
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<video controls></video>
<script>
var mime = 'video/mp4; codecs="avc1.4D401E,mp4a.40.2"';
var address = 'ws://localhost:54132'
/* Media Source */
var source = new MediaSource();
var video = document.querySelector('video');
video.src = URL.createObjectURL(source);
source.addEventListener('sourceopen', sourceOpen);
/* Buffer */
var buffer;
var socket;
var queue = [];
var offset = -1;
var timescale;
// When the media source opens:
function sourceOpen() {
buffer = source.addSourceBuffer(mime);
buffer.addEventListener('updateend', processQueue);
socket = new WebSocket(address);
socket.binaryType = 'arraybuffer';
socket.onmessage = onMessage;
}
// When more data is received.
function onMessage(event) {
queue.push(event.data);
processQueue();
}
// Process queue if possible.
function processQueue() {
if ((queue.length == 0) || (buffer.updating)) {
return;
}
var data = queue.shift();
if (offset === -1) {
var parsed = parseMP4(data);
if (parsed.hasOwnProperty('moov')) {
timescale = parsed.moov.mvhd.timescale;
} else if (parsed.hasOwnProperty('moof')) {
offset = 0 - (parsed.moof.traf[0].tfdt.baseMediaDecodeTime / this.timescale - 0.4);
buffer.timestampOffset = offset;
}
}
// console.log('appending ' + data.byteLength + ' bytes');
buffer.appendBuffer(data);
}
// Parse out the offset.
function parseMP4(data) {
// SNIP for brevity
}
</script>
</body>
</html>
Could not reproduce <video> element not playing at firefox 47.
Merged approaches at Mocking Websocket Message Events to create mock WebSocket events; bufferAll.html demo at Let's Make a Netflix
An Intro to Streaming Media on the Web for MediaSource usage pattern.
Included <progress> and progress event to notify user of media loading status.
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<progress min="0" value="0"></progress><br><label></label><br>
<video controls></video>
<script>
// http://nickdesaulniers.github.io/netfix/demo/bufferAll.html
// http://jsfiddle.net/adamboduch/JVfkt/
// The global web socket.
var sock, sourceBuffer;
sock = new WebSocket( "ws://mock" );
sock.onerror = function(e) {
console.log("sock error", e)
}
// This is unchanging production code that doesn"t know
// we"re mocking the web socket.
sock.onmessage = function( e ) {
console.log("socket message", e.data);
sourceBuffer.appendBuffer(e.data);
};
var video = document.querySelector("video");
var progress = document.querySelector("progress");
var label = document.querySelector("label");
var assetURL = "http://nickdesaulniers.github.io/netfix/"
+ "demo/frag_bunny.mp4";
// Need to be specific for Blink regarding codecs
// ./mp4info frag_bunny.mp4 | grep Codec
var mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
if ("MediaSource" in window
&& MediaSource.isTypeSupported(mimeCodec)) {
var mediaSource = new MediaSource;
//console.log(mediaSource.readyState); // closed
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener("sourceopen", sourceOpen);
} else {
console.error("Unsupported MIME type or codec: ", mimeCodec);
}
video.addEventListener("canplay", function() {
alert("video canplay")
})
function sourceOpen (_) {
//console.log(this.readyState); // open
var mediaSource = this;
sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
fetchAB(assetURL, function (buf) {
sourceBuffer.addEventListener("updateend", function (event) {
console.log("sourceBuffer updateend event;"
+ "mediaSource.readyState:"
, mediaSource.readyState);
// mediaSource.endOfStream();
// video.play();
// console.log(mediaSource.readyState); // ended
});
});
};
// mock `WebSocket` message
function fetchAB (url, cb) {
var xhr = new XMLHttpRequest;
xhr.open("get", url);
var file = url.split("/").pop();
xhr.responseType = "arraybuffer";
xhr.onload = function () {
// mock `WebSocket` message
sock.dispatchEvent( new MessageEvent( "message", {
data: xhr.response
}));
console.log("video sent to sock", sock);
cb();
};
xhr.onprogress = function(e) {
progress.max = e.total;
progress.value = e.loaded;
label.innerHTML = "loading " + file + " ...<br>"
+ e.loaded + " of "
+ e.total + " bytes loaded";
}
xhr.send();
};
</script>
</body>
</html>
plnkr http://plnkr.co/edit/RCIqDXTB2BL3lec9bhfz
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<progress min="0" value="0"></progress><br><label></label><br>
<video controls></video>
<script>
// http://nickdesaulniers.github.io/netfix/demo/bufferAll.html
// http://jsfiddle.net/adamboduch/JVfkt/
// The global web socket.
var sock, sourceBuffer;
sock = new WebSocket( "ws://mock" );
sock.onerror = function(e) {
console.log("sock error", e)
}
// This is unchanging production code that doesn"t know
// we"re mocking the web socket.
sock.onmessage = function( e ) {
console.log("socket message", e.data);
sourceBuffer.appendBuffer(e.data);
};
var video = document.querySelector("video");
var progress = document.querySelector("progress");
var label = document.querySelector("label");
var assetURL = "http://nickdesaulniers.github.io/netfix/"
+ "demo/frag_bunny.mp4";
// Need to be specific for Blink regarding codecs
// ./mp4info frag_bunny.mp4 | grep Codec
var mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
if ("MediaSource" in window
&& MediaSource.isTypeSupported(mimeCodec)) {
var mediaSource = new MediaSource;
//console.log(mediaSource.readyState); // closed
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener("sourceopen", sourceOpen);
} else {
console.error("Unsupported MIME type or codec: ", mimeCodec);
}
video.addEventListener("canplay", function() {
alert("video canplay")
})
function sourceOpen (_) {
//console.log(this.readyState); // open
var mediaSource = this;
sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
fetchAB(assetURL, function (buf) {
sourceBuffer.addEventListener("updateend", function (event) {
console.log("sourceBuffer updateend event;"
+ "mediaSource.readyState:"
, mediaSource.readyState);
// mediaSource.endOfStream();
// video.play();
// console.log(mediaSource.readyState); // ended
});
});
};
// mock `WebSocket` message
function fetchAB (url, cb) {
var xhr = new XMLHttpRequest;
xhr.open("get", url);
var file = url.split("/").pop();
xhr.responseType = "arraybuffer";
xhr.onload = function () {
// mock `WebSocket` message
sock.dispatchEvent( new MessageEvent( "message", {
data: xhr.response
}));
console.log("video sent to sock", sock);
cb();
};
xhr.onprogress = function(e) {
progress.max = e.total;
progress.value = e.loaded;
label.innerHTML = "loading " + file + " ...<br>"
+ e.loaded + " of "
+ e.total + " bytes loaded";
}
xhr.send();
};
</script>
</body>
</html>
I'm trying to get a MediaStream object into a blob, but I have some problems.
This is the part of my script for the UserMedia component:
var mediastream = undefined;
var blob = undefined;
var video = document.getElementById('test');
This is my script where I'm trying my test:
var mediastream = undefined;
var blobUrl = undefined;
var video = document.getElementById('test');
function successCallback (stream) {
mediastream = stream;
blobUrl = URL.createObjectURL(mediastream);
var xhr = new XMLHttpRequest();
xhr.open('GET', blobUrl, true);
xhr.responseType = 'arraybuffer';
xhr.withCredentials = true;
xhr.onreadystatechange = function () {
console.log('xhr.readyState='+xhr.readyState);
if (xhr.readyState !== 4) {
return;
} else {
var myBlob = this.response;
}
function errorCallback (stream) {
console.log('error');
}
Setting the blobUrl to the video.src I don't have any problem:
video.src = blobUrl; <- WORKS
But if I call the urlBob url (like this one: blob:http%3A//localhost%3A8080/b1ffa5b7-c0cc-4acf-8890-f5d0f08de9cb ) I obtain HTTP status 404.
For sure there is something wrong on my implementation.
Any suggestions?
Thank you!