Can you Trim an MP4 Video using only Javascript and HTML? [duplicate] - javascript

Closed. This question needs to be more focused. It is not currently accepting answers.
Want to improve this question? Update the question so it focuses on one problem only by editing this post.
Closed 6 years ago.
Improve this question
I am trying to trim the length of a video within the browser, from either the beginning or the end. What I currently have is a MediaStream which is being recorded using the MediaRecorder API, which I use to produce a Blob at the end of the recording with type set to 'video/mp4'. This works great and I am able to play the video back, however I would like a way to trim the video either at the start or end.
Upon further investigation of the MediaStream API I came across the SourceBuffer object, that you can obtain from a MediaStreamTrack and use it to remove a time slice, which is exactly what I want. However I am unsure of how to obtain the MediaStreamTrack from the video (blob) since video.srcObject property returns null.

I am trying to trim the length of a video within the browser, from
either the beginning or the end.
You can use Array.prototype.slice() to remove blobs in chunks of one second 1000ms of data, or other time range, segments from end of array containing blobs pushed to an array at MediaRecorder dataavailable event. Where MediaRecorder .start() is called with parameter 1000, to set each recorded Blob to 1000ms of recorded data.
Approach utilizes modified version of https://github.com/samdutton/simpl/tree/gh-pages/mediarecorder . Added <input type="number"> elements to set remove chunks of 1s from recorded video for both playback and download by using .slice()
html
<video id="gum" autoplay muted controls></video>
<video id="recorded" autoplay controls></video>
<div>
<button id="record">Start Recording</button><label for="record"></label><br>
<span>Seconds of recorded video to play (min 1):</span><input min="1" type="number" disabled />
<button id="play" disabled>Play</button>
<span>Seconds of recorded video to download (min 1):</span><input min="1" type="number" disabled /><button id="download" disabled>Download</button>
</div>
javascript
'use strict';
/* globals MediaRecorder */
// This code is adapted from
// https://rawgit.com/Miguelao/demos/master/mediarecorder.html
'use strict';
/* globals MediaRecorder */
// This code is adapted from
// https://rawgit.com/Miguelao/demos/master/mediarecorder.html
var mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
var mediaRecorder;
var recordedBlobs;
var sourceBuffer;
var gumVideo = document.querySelector('video#gum');
var recordedVideo = document.querySelector('video#recorded');
var input = document.querySelectorAll("input[type=number]");
recordedVideo.ontimeupdate = function(e) {
console.log("recorded video currentTime:", e.target.currentTime)
}
gumVideo.onprogress = function(e) {
// console.log("getUserMedia video currentTime:", e.target.currentTime)
}
var recordButton = document.querySelector('button#record');
var playButton = document.querySelector('button#play');
var downloadButton = document.querySelector('button#download');
recordButton.onclick = toggleRecording;
playButton.onclick = play;
downloadButton.onclick = download;
var currentTimes = [];
recordButton.nextElementSibling.innerHTML = "recorded video "
+ currentTimes.length
+ "s";
// window.isSecureContext could be used for Chrome
var isSecureOrigin = location.protocol === 'https:' ||
location.host === 'localhost';
if (!isSecureOrigin) {
alert('getUserMedia() must be run from a secure origin: HTTPS or localhost.' +
'\n\nChanging protocol to HTTPS');
location.protocol = 'HTTPS';
}
// Use old-style gUM to avoid requirement to enable the
// Enable experimental Web Platform features flag in Chrome 49
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
var constraints = {
audio: true,
video: true
};
navigator.getUserMedia(constraints, successCallback, errorCallback);
function successCallback(stream) {
console.log('getUserMedia() got stream: ', stream);
window.stream = stream;
if (window.URL) {
gumVideo.src = window.URL.createObjectURL(stream);
} else {
gumVideo.src = stream;
}
}
function errorCallback(error) {
console.log('navigator.getUserMedia error: ', error);
}
// navigator.mediaDevices.getUserMedia(constraints)
// .then(function(stream) {
// console.log('getUserMedia() got stream: ', stream);
// window.stream = stream; // make available to browser console
// if (window.URL) {
// gumVideo.src = window.URL.createObjectURL(stream);
// } else {
// gumVideo.src = stream;
// }
// }).catch(function(error) {
// console.log('navigator.getUserMedia error: ', error);
// });
function handleSourceOpen(event) {
console.log('MediaSource opened');
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8"');
console.log('Source buffer: ', sourceBuffer);
}
function handleDataAvailable(event) {
if (event.data && event.data.size > 0) {
currentTimes.push(gumVideo.currentTime);
recordedBlobs.push(event.data);
recordButton.nextElementSibling.innerHTML = "recorded video "
+ recordedBlobs.length
+ "s";
}
}
function handleStop(event) {
console.log('Recorder stopped: ', event);
console.log("recorded times from getUserMedia video:", currentTimes);
}
function toggleRecording() {
if (recordButton.textContent === 'Start Recording') {
startRecording();
} else {
stopRecording();
recordButton.textContent = 'Start Recording';
playButton.disabled = false;
downloadButton.disabled = false;
}
}
// The nested try blocks will be simplified when Chrome 47 moves to Stable
function startRecording() {
var options = {
mimeType: 'video/webm',
bitsPerSecond: 100000
};
recordedBlobs = [];
currentTimes = [];
for (var i = 0; i < input.length; i++) {
input[i].setAttribute("max", 1);
input[i].setAttribute("disabled", "disabled");
}
playButton.disabled = true;
downloadButton.disabled = true;
try {
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e0) {
console.log('Unable to create MediaRecorder with options Object: ', e0);
try {
options = {
mimeType: 'video/webm,codecs=vp9',
bitsPerSecond: 100000
};
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e1) {
console.log('Unable to create MediaRecorder with options Object: ', e1);
try {
options = 'video/vp8'; // Chrome 47
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e2) {
alert('MediaRecorder is not supported by this browser.\n\n' +
'Try Firefox 29 or later, or Chrome 47 or later,'
+ ' with Enable experimental Web Platform features enabled '
+ ' from chrome://flags.');
console.error('Exception while creating MediaRecorder:', e2);
return;
}
}
}
console.log('Created MediaRecorder', mediaRecorder, 'with options', options);
recordButton.textContent = 'Stop Recording';
playButton.disabled = true;
downloadButton.disabled = true;
mediaRecorder.onstop = handleStop;
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start(1000); // collect 1000ms of data
console.log('MediaRecorder started', mediaRecorder);
}
function stopRecording() {
mediaRecorder.stop();
for (var i = 0; i < input.length; i++) {
input[i].setAttribute("max", recordedBlobs.length);
input[i].removeAttribute("disabled");
}
console.log('Recorded Blobs: ', recordedBlobs);
recordedVideo.controls = true;
}
function play() {
console.log(`playing ${input[0].value}s of getUserMedia video`
+ `recorded by MediaRecorder from time ranges`
, currentTimes.slice(0, input[0].value));
// slice `input[0].value` amount, in seconds, from end of recorded video
// for playback
var file = recordedBlobs.slice(0, input[0].value);
var superBuffer = new Blob(file, {
type: 'video/webm'
});
recordedVideo.src = window.URL.createObjectURL(superBuffer);
}
function download() {
// slice `input[1].value` amount, in seconds, from end of recorded video
// for download
var file = recordedBlobs.slice(0, input[1].value);
var blob = new Blob(file, {
type: 'video/webm'
});
var url = window.URL.createObjectURL(blob);
var a = document.createElement('a');
a.style.display = 'none';
a.href = url;
a.download = 'test.webm';
document.body.appendChild(a);
a.click();
setTimeout(function() {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
}
plnkr https://plnkr.co/edit/LxuV5jMX0RZtDxOxT1qa?p=preview

Related

Subsequent mediarecorder webcam recordings are corrupted when uploaded

I am trying to record 3 movies after eachother, with different instructions showing to the user. The code seems to work fine, the instructions change, the variables I am logging check out fine (chunks recorded, saving recording).
After running the script, I have 3 webm files, all roughly the same size. However, only one is playable. The other two are corrupted. Been tinkering alot in the code, but everything seems to be fine.
Would love if someone could give a hand!
Initialization:
var constraints = { audio: true,video: {width: {min: 1280}, height: {min: 720}, facingmode: 'user'}};
navigator.mediaDevices.getUserMedia(constraints).then((stream) => { video.srcObject = stream; cameraStream = stream; startRecording(); });
StartRecording function:
function startRecording()
{
if(stage == 1)
{
console.log('Started stage '+stage);
lineOne.innerHTML = "Kijk recht in de camera";
line.animate(1);
fName = 'base';
}
if(stage == 2)
{
lineOne.innerHTML = "Kijk over uw linkerschouder";
line.set(0);
line.animate(1);
fName = 'left';
}
if(stage == 3)
{
lineOne.innerHTML = "Kijk over uw rechterschouder";
line.set(0);
line.animate(1);
fName = 'right';
}
blobs_recorded = [];
mr = new MediaRecorder(cameraStream);
mr.addEventListener('dataavailable', function(e) {
blobs_recorded.push(e.data);
});
mr.start(500);
setTimeout(function(){stopRecording(fName);},5000);
}
Stop recording / save file / start next movie:
function stopRecording(name)
{
recording = null;
mr.stop();
console.log("stopped recording");
console.log(blobs_recorded.length);
recording = new File(blobs_recorded, name+'.webm', { type: 'video/webm' });
var xhr = new XMLHttpRequest();
const upload = new FormData();
upload.append('file',recording);
upload.append('fileName',name);
upload.append('code',code);
upload.append('renterId',renterId);
xhr.open('POST','upload.php',false);
xhr.send(upload);
stage = stage+1;
lineOne.innerHTML = "";
if(stage < 4)
{
startRecording();
}
}
After running the script, I got a base.webm / left.webm and right.webm, but only base.webm is playable, the other two are corrupted.

WebRTC & Peer JS Calling Issues

In my function connect(id): I start a call using var call = this.peer.call(id, this.mediastream); then I use
call.on('stream', function(stream)
{
// `stream` is the MediaStream of the remote peer.
// Here you'd add it to an HTML video/canvas element.
peerVideo.srcObject = stream;
console.log("Call stream has started! CLinet end!!!!!" + stream);
output.innerHTML = "Streaming audio from other client...";
});
To get the audio from the peer. This call.on stream is never executed so that means the call is not returning anything.
On the peer side of things they can hear the caller just fine. I'm not sure what is wrong!
Here is the full code:
const video = document.getElementById("video");
const peerVideo = document.getElementById("peervideo");
const output = document.getElementById("output");
const peerText = document.getElementById("peerid");
const peerButton = document.getElementById("peersubmit");
var promise = null;
var mediastream = null;
var peer = null;
var myId = null;
async function init()
{
//init the media devices
initMediaDevices();
//init peer
initPeer();
//Add timeout to ensure the peer.on method had time to get id from server
setTimeout(() => { console.log("My ID is: " + this.myId); output.innerHTML = "My ID: " + formatString(this.myId)}, 2000);
}
function initMediaDevices()
{
//Setup stream for usermedia
try
{
this.promise = navigator.mediaDevices.getUserMedia({audio: true})
promise.then((stream) =>
{
setStream(stream);
//video.srcObject = stream;
console.log(stream);
})
output.innerHTML = "Audio was established!";
}
catch(err)
{
console.log(err)
output.innerHTML = "Audio Failed!"
}
}
function initPeer()
{
this.peer = new Peer();
this.peer.on('open', function(id)
{
setID(id)
});
peer.on('call', function(call)
{
// Answer the call, providing our mediaStream
call.answer(this.mediastream);
console.log(call + "-------------------- got here peer call");
output.innerHTML = "Connected to Peer";
call.on('stream', function(stream)
{
// `stream` is the MediaStream of the remote peer.
// Here you'd add it to an HTML video/canvas element.
peerVideo.srcObject = stream;
console.log("Call stream has started! Peer end");
output.innerHTML = "Streaming audio from other client...";
});
});
}
function setID(id)
{
this.myId = id;
}
function setStream(stream)
{
this.mediastream = stream;
console.log("Media Stream Set! " + this.mediastream);
}
function connect(id)
{
var call = this.peer.call(id, this.mediastream);
call.on('stream', function(stream)
{
// `stream` is the MediaStream of the remote peer.
// Here you'd add it to an HTML video/canvas element.
peerVideo.srcObject = stream;
console.log("Call stream has started! CLinet end!!!!!" + stream);
output.innerHTML = "Streaming audio from other client...";
});
console.log(call + "----------------------" + this.mediastream + " This is the person that connected");
}
init();
//Event listeners
peerButton.addEventListener("click", () =>
{
let id = peerText.value;
console.log("Button Pressed!")
connect(id);
});
//unrelated
function formatString(string)
{
var newString = "";
for (var i = 0; i < string.length; i++)
{
var letter = string.charAt(i);
if(isNaN(letter))
{
newString += letter.fontcolor("red");
}
else
{
newString += letter;
}
}
return newString;
}
And the html
<video id="video" autoplay>Video Stream no available</video>
<video id="peervideo" autoplay>Video Stream no available</video>
<h3 id="output"></h3>
<input type="text" id="peerid">
<input type="submit" value="Submit" id="peersubmit">
I have same issue.
When you decide to set a debugger and make breaking points around
var call = this.peer.call(id, this.mediastream);
call.on('stream', function(s
You will receive the stream, sounds like you need to put some setTimeout to prevent this behaviour, and I know it works... but its bad practice
Add timeout to connect function it will work
your connect(id) is triggered before the new user completes navigator.getUserMedia();
Actually it's because you executed call.on before navigator.mediaDevices.getUserMedia.
so try use await initMediaDevices to make sure initPeer();
execute afterwards.

Browser's audio record or microphone's permission on button click rather than on page load

I have user the MediaStream Recording API for recording audio using microphone, the recording works well but in case of don't allow the browser to record a voice then click on the record button it's not showing any error,
What I want is in case of user don't allow voice and still clicks on record button browser again asp for audio recording permission.
I have tried several options with the following code but unfortunately, it does not work, Can anyone help me out with this issue?
The same question or similar question asked on reprompt for permissions with getUserMedia() after initial denial but still, there is no resolution for that issue, hope for the best appropriate answer which resolve the reprompt permission popup once initialized the page.
<body>
<div class="wrapper">
<section class="main-controls">
<canvas class="visualizer" height="60px" style="display:none"></canvas>
<div id="buttons" style="margin-bottom:20px;">
<button class="record">Record</button>
<button class="stop">Stop</button>
</div>
</section>
<section class="sound-clips"></section>
</div>
<script>
const record = document.querySelector('.record');
const stop = document.querySelector('.stop');
const soundClips = document.querySelector('.sound-clips');
const mainSection = document.querySelector('.main-controls');
stop.disabled = true;
let audioCtx;
if (navigator.mediaDevices.getUserMedia) {
const constraints = {
audio: true
};
let chunks = [];
let onSuccess = function(stream) {
const mediaRecorder = new MediaRecorder(stream);
visualize(stream);
record.onclick = function() {
//ask for browser's audio record or microphone permission here
if (navigator.mediaDevices.getUserMedia) {
mediaRecorder.start();
console.log("recorder started");
record.style.background = "red";
stop.disabled = false;
record.disabled = true;
}
}
stop.onclick = function() {
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
record.style.background = "";
record.style.color = "";
stop.disabled = true;
record.disabled = false;
}
mediaRecorder.onstop = function(e) {
const clipContainer = document.createElement('article');
const clipLabel = document.createElement('p');
const audio = document.createElement('audio');
clipContainer.classList.add('clip');
audio.setAttribute('controls', '');
clipContainer.appendChild(audio);
clipContainer.appendChild(clipLabel);
soundClips.appendChild(clipContainer);
audio.controls = true;
const blob = new Blob(chunks, {
'type': 'audio/ogg; codecs=opus'
});
chunks = [];
const audioURL = window.URL.createObjectURL(blob);
audio.src = audioURL;
console.log("recorder stopped");
clipLabel.onclick = function() {
const existingName = clipLabel.textContent;
const newClipName = prompt('Enter a new name for your sound clip?');
if (newClipName === null) {
clipLabel.textContent = existingName;
} else {
clipLabel.textContent = newClipName;
}
}
}
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
}
let onError = function(err) {
console.log('The following error occured: ' + err);
}
navigator.mediaDevices.getUserMedia(constraints).then(onSuccess, onError);
} else {
console.log('getUserMedia not supported on your browser!');
}
function visualize(stream) {
if (!audioCtx) {
audioCtx = new AudioContext();
}
const source = audioCtx.createMediaStreamSource(stream);
const analyser = audioCtx.createAnalyser();
analyser.fftSize = 2048;
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
source.connect(analyser);
}
</script>
</body>

mediarecorder.requestdata returns nothing

I am using the MediaRecorder APIs in my Chromebook extension application.
The problem is that the size of the data returned by mediarecorder.requestdata is zero. The capture stream is fine and mediaRecorder.ondataavailable is called, but the data size is zero always.
This is my configuration:
var mimeType = 'video/webm;codecs=h264,pcm';
var bitsPerSecond;
var options = {
mimeType: mimeType,
ignoreMuteMedia:true,
video:{
maxWidth:1280,
maxHeight:720
}
};
haveAudio = false;
sampleRate = 48000;
frmcnt = 0;
//if (quality === 'low') { options.bitsPerSecond = 4000000; }
var mRecorder = new MediaRecorder(stream, options);
There is an issue here.
Try this:
mediaRecorder.addEventListener('dataavailable', function(e) {
console.log(e.data);
});

Unable to stream video over a websocket to Firefox

I have written some code stream video over a websocket so a sourcebuffer which works in Chrome and Edge.
However, when I run this in Firefox, the video never plays back, just a spinning wheel animation is displayed. When I check the <video> statistics, It reads HAVE_METADATA as the ready state and NETWORK_LOADING as the network state.
The code looks follows:
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<video controls></video>
<script>
var mime = 'video/mp4; codecs="avc1.4D401E,mp4a.40.2"';
var address = 'ws://localhost:54132'
/* Media Source */
var source = new MediaSource();
var video = document.querySelector('video');
video.src = URL.createObjectURL(source);
source.addEventListener('sourceopen', sourceOpen);
/* Buffer */
var buffer;
var socket;
var queue = [];
var offset = -1;
var timescale;
// When the media source opens:
function sourceOpen() {
buffer = source.addSourceBuffer(mime);
buffer.addEventListener('updateend', processQueue);
socket = new WebSocket(address);
socket.binaryType = 'arraybuffer';
socket.onmessage = onMessage;
}
// When more data is received.
function onMessage(event) {
queue.push(event.data);
processQueue();
}
// Process queue if possible.
function processQueue() {
if ((queue.length == 0) || (buffer.updating)) {
return;
}
var data = queue.shift();
if (offset === -1) {
var parsed = parseMP4(data);
if (parsed.hasOwnProperty('moov')) {
timescale = parsed.moov.mvhd.timescale;
} else if (parsed.hasOwnProperty('moof')) {
offset = 0 - (parsed.moof.traf[0].tfdt.baseMediaDecodeTime / this.timescale - 0.4);
buffer.timestampOffset = offset;
}
}
// console.log('appending ' + data.byteLength + ' bytes');
buffer.appendBuffer(data);
}
// Parse out the offset.
function parseMP4(data) {
// SNIP for brevity
}
</script>
</body>
</html>
Could not reproduce <video> element not playing at firefox 47.
Merged approaches at Mocking Websocket Message Events to create mock WebSocket events; bufferAll.html demo at Let's Make a Netflix
An Intro to Streaming Media on the Web for MediaSource usage pattern.
Included <progress> and progress event to notify user of media loading status.
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<progress min="0" value="0"></progress><br><label></label><br>
<video controls></video>
<script>
// http://nickdesaulniers.github.io/netfix/demo/bufferAll.html
// http://jsfiddle.net/adamboduch/JVfkt/
// The global web socket.
var sock, sourceBuffer;
sock = new WebSocket( "ws://mock" );
sock.onerror = function(e) {
console.log("sock error", e)
}
// This is unchanging production code that doesn"t know
// we"re mocking the web socket.
sock.onmessage = function( e ) {
console.log("socket message", e.data);
sourceBuffer.appendBuffer(e.data);
};
var video = document.querySelector("video");
var progress = document.querySelector("progress");
var label = document.querySelector("label");
var assetURL = "http://nickdesaulniers.github.io/netfix/"
+ "demo/frag_bunny.mp4";
// Need to be specific for Blink regarding codecs
// ./mp4info frag_bunny.mp4 | grep Codec
var mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
if ("MediaSource" in window
&& MediaSource.isTypeSupported(mimeCodec)) {
var mediaSource = new MediaSource;
//console.log(mediaSource.readyState); // closed
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener("sourceopen", sourceOpen);
} else {
console.error("Unsupported MIME type or codec: ", mimeCodec);
}
video.addEventListener("canplay", function() {
alert("video canplay")
})
function sourceOpen (_) {
//console.log(this.readyState); // open
var mediaSource = this;
sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
fetchAB(assetURL, function (buf) {
sourceBuffer.addEventListener("updateend", function (event) {
console.log("sourceBuffer updateend event;"
+ "mediaSource.readyState:"
, mediaSource.readyState);
// mediaSource.endOfStream();
// video.play();
// console.log(mediaSource.readyState); // ended
});
});
};
// mock `WebSocket` message
function fetchAB (url, cb) {
var xhr = new XMLHttpRequest;
xhr.open("get", url);
var file = url.split("/").pop();
xhr.responseType = "arraybuffer";
xhr.onload = function () {
// mock `WebSocket` message
sock.dispatchEvent( new MessageEvent( "message", {
data: xhr.response
}));
console.log("video sent to sock", sock);
cb();
};
xhr.onprogress = function(e) {
progress.max = e.total;
progress.value = e.loaded;
label.innerHTML = "loading " + file + " ...<br>"
+ e.loaded + " of "
+ e.total + " bytes loaded";
}
xhr.send();
};
</script>
</body>
</html>
plnkr http://plnkr.co/edit/RCIqDXTB2BL3lec9bhfz
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<progress min="0" value="0"></progress><br><label></label><br>
<video controls></video>
<script>
// http://nickdesaulniers.github.io/netfix/demo/bufferAll.html
// http://jsfiddle.net/adamboduch/JVfkt/
// The global web socket.
var sock, sourceBuffer;
sock = new WebSocket( "ws://mock" );
sock.onerror = function(e) {
console.log("sock error", e)
}
// This is unchanging production code that doesn"t know
// we"re mocking the web socket.
sock.onmessage = function( e ) {
console.log("socket message", e.data);
sourceBuffer.appendBuffer(e.data);
};
var video = document.querySelector("video");
var progress = document.querySelector("progress");
var label = document.querySelector("label");
var assetURL = "http://nickdesaulniers.github.io/netfix/"
+ "demo/frag_bunny.mp4";
// Need to be specific for Blink regarding codecs
// ./mp4info frag_bunny.mp4 | grep Codec
var mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
if ("MediaSource" in window
&& MediaSource.isTypeSupported(mimeCodec)) {
var mediaSource = new MediaSource;
//console.log(mediaSource.readyState); // closed
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener("sourceopen", sourceOpen);
} else {
console.error("Unsupported MIME type or codec: ", mimeCodec);
}
video.addEventListener("canplay", function() {
alert("video canplay")
})
function sourceOpen (_) {
//console.log(this.readyState); // open
var mediaSource = this;
sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
fetchAB(assetURL, function (buf) {
sourceBuffer.addEventListener("updateend", function (event) {
console.log("sourceBuffer updateend event;"
+ "mediaSource.readyState:"
, mediaSource.readyState);
// mediaSource.endOfStream();
// video.play();
// console.log(mediaSource.readyState); // ended
});
});
};
// mock `WebSocket` message
function fetchAB (url, cb) {
var xhr = new XMLHttpRequest;
xhr.open("get", url);
var file = url.split("/").pop();
xhr.responseType = "arraybuffer";
xhr.onload = function () {
// mock `WebSocket` message
sock.dispatchEvent( new MessageEvent( "message", {
data: xhr.response
}));
console.log("video sent to sock", sock);
cb();
};
xhr.onprogress = function(e) {
progress.max = e.total;
progress.value = e.loaded;
label.innerHTML = "loading " + file + " ...<br>"
+ e.loaded + " of "
+ e.total + " bytes loaded";
}
xhr.send();
};
</script>
</body>
</html>

Categories