I am using the MediaRecorder APIs in my Chromebook extension application.
The problem is that the size of the data returned by mediarecorder.requestdata is zero. The capture stream is fine and mediaRecorder.ondataavailable is called, but the data size is zero always.
This is my configuration:
var mimeType = 'video/webm;codecs=h264,pcm';
var bitsPerSecond;
var options = {
mimeType: mimeType,
ignoreMuteMedia:true,
video:{
maxWidth:1280,
maxHeight:720
}
};
haveAudio = false;
sampleRate = 48000;
frmcnt = 0;
//if (quality === 'low') { options.bitsPerSecond = 4000000; }
var mRecorder = new MediaRecorder(stream, options);
There is an issue here.
Try this:
mediaRecorder.addEventListener('dataavailable', function(e) {
console.log(e.data);
});
Related
Closed. This question needs to be more focused. It is not currently accepting answers.
Want to improve this question? Update the question so it focuses on one problem only by editing this post.
Closed 6 years ago.
Improve this question
I am trying to trim the length of a video within the browser, from either the beginning or the end. What I currently have is a MediaStream which is being recorded using the MediaRecorder API, which I use to produce a Blob at the end of the recording with type set to 'video/mp4'. This works great and I am able to play the video back, however I would like a way to trim the video either at the start or end.
Upon further investigation of the MediaStream API I came across the SourceBuffer object, that you can obtain from a MediaStreamTrack and use it to remove a time slice, which is exactly what I want. However I am unsure of how to obtain the MediaStreamTrack from the video (blob) since video.srcObject property returns null.
I am trying to trim the length of a video within the browser, from
either the beginning or the end.
You can use Array.prototype.slice() to remove blobs in chunks of one second 1000ms of data, or other time range, segments from end of array containing blobs pushed to an array at MediaRecorder dataavailable event. Where MediaRecorder .start() is called with parameter 1000, to set each recorded Blob to 1000ms of recorded data.
Approach utilizes modified version of https://github.com/samdutton/simpl/tree/gh-pages/mediarecorder . Added <input type="number"> elements to set remove chunks of 1s from recorded video for both playback and download by using .slice()
html
<video id="gum" autoplay muted controls></video>
<video id="recorded" autoplay controls></video>
<div>
<button id="record">Start Recording</button><label for="record"></label><br>
<span>Seconds of recorded video to play (min 1):</span><input min="1" type="number" disabled />
<button id="play" disabled>Play</button>
<span>Seconds of recorded video to download (min 1):</span><input min="1" type="number" disabled /><button id="download" disabled>Download</button>
</div>
javascript
'use strict';
/* globals MediaRecorder */
// This code is adapted from
// https://rawgit.com/Miguelao/demos/master/mediarecorder.html
'use strict';
/* globals MediaRecorder */
// This code is adapted from
// https://rawgit.com/Miguelao/demos/master/mediarecorder.html
var mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
var mediaRecorder;
var recordedBlobs;
var sourceBuffer;
var gumVideo = document.querySelector('video#gum');
var recordedVideo = document.querySelector('video#recorded');
var input = document.querySelectorAll("input[type=number]");
recordedVideo.ontimeupdate = function(e) {
console.log("recorded video currentTime:", e.target.currentTime)
}
gumVideo.onprogress = function(e) {
// console.log("getUserMedia video currentTime:", e.target.currentTime)
}
var recordButton = document.querySelector('button#record');
var playButton = document.querySelector('button#play');
var downloadButton = document.querySelector('button#download');
recordButton.onclick = toggleRecording;
playButton.onclick = play;
downloadButton.onclick = download;
var currentTimes = [];
recordButton.nextElementSibling.innerHTML = "recorded video "
+ currentTimes.length
+ "s";
// window.isSecureContext could be used for Chrome
var isSecureOrigin = location.protocol === 'https:' ||
location.host === 'localhost';
if (!isSecureOrigin) {
alert('getUserMedia() must be run from a secure origin: HTTPS or localhost.' +
'\n\nChanging protocol to HTTPS');
location.protocol = 'HTTPS';
}
// Use old-style gUM to avoid requirement to enable the
// Enable experimental Web Platform features flag in Chrome 49
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
var constraints = {
audio: true,
video: true
};
navigator.getUserMedia(constraints, successCallback, errorCallback);
function successCallback(stream) {
console.log('getUserMedia() got stream: ', stream);
window.stream = stream;
if (window.URL) {
gumVideo.src = window.URL.createObjectURL(stream);
} else {
gumVideo.src = stream;
}
}
function errorCallback(error) {
console.log('navigator.getUserMedia error: ', error);
}
// navigator.mediaDevices.getUserMedia(constraints)
// .then(function(stream) {
// console.log('getUserMedia() got stream: ', stream);
// window.stream = stream; // make available to browser console
// if (window.URL) {
// gumVideo.src = window.URL.createObjectURL(stream);
// } else {
// gumVideo.src = stream;
// }
// }).catch(function(error) {
// console.log('navigator.getUserMedia error: ', error);
// });
function handleSourceOpen(event) {
console.log('MediaSource opened');
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8"');
console.log('Source buffer: ', sourceBuffer);
}
function handleDataAvailable(event) {
if (event.data && event.data.size > 0) {
currentTimes.push(gumVideo.currentTime);
recordedBlobs.push(event.data);
recordButton.nextElementSibling.innerHTML = "recorded video "
+ recordedBlobs.length
+ "s";
}
}
function handleStop(event) {
console.log('Recorder stopped: ', event);
console.log("recorded times from getUserMedia video:", currentTimes);
}
function toggleRecording() {
if (recordButton.textContent === 'Start Recording') {
startRecording();
} else {
stopRecording();
recordButton.textContent = 'Start Recording';
playButton.disabled = false;
downloadButton.disabled = false;
}
}
// The nested try blocks will be simplified when Chrome 47 moves to Stable
function startRecording() {
var options = {
mimeType: 'video/webm',
bitsPerSecond: 100000
};
recordedBlobs = [];
currentTimes = [];
for (var i = 0; i < input.length; i++) {
input[i].setAttribute("max", 1);
input[i].setAttribute("disabled", "disabled");
}
playButton.disabled = true;
downloadButton.disabled = true;
try {
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e0) {
console.log('Unable to create MediaRecorder with options Object: ', e0);
try {
options = {
mimeType: 'video/webm,codecs=vp9',
bitsPerSecond: 100000
};
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e1) {
console.log('Unable to create MediaRecorder with options Object: ', e1);
try {
options = 'video/vp8'; // Chrome 47
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e2) {
alert('MediaRecorder is not supported by this browser.\n\n' +
'Try Firefox 29 or later, or Chrome 47 or later,'
+ ' with Enable experimental Web Platform features enabled '
+ ' from chrome://flags.');
console.error('Exception while creating MediaRecorder:', e2);
return;
}
}
}
console.log('Created MediaRecorder', mediaRecorder, 'with options', options);
recordButton.textContent = 'Stop Recording';
playButton.disabled = true;
downloadButton.disabled = true;
mediaRecorder.onstop = handleStop;
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start(1000); // collect 1000ms of data
console.log('MediaRecorder started', mediaRecorder);
}
function stopRecording() {
mediaRecorder.stop();
for (var i = 0; i < input.length; i++) {
input[i].setAttribute("max", recordedBlobs.length);
input[i].removeAttribute("disabled");
}
console.log('Recorded Blobs: ', recordedBlobs);
recordedVideo.controls = true;
}
function play() {
console.log(`playing ${input[0].value}s of getUserMedia video`
+ `recorded by MediaRecorder from time ranges`
, currentTimes.slice(0, input[0].value));
// slice `input[0].value` amount, in seconds, from end of recorded video
// for playback
var file = recordedBlobs.slice(0, input[0].value);
var superBuffer = new Blob(file, {
type: 'video/webm'
});
recordedVideo.src = window.URL.createObjectURL(superBuffer);
}
function download() {
// slice `input[1].value` amount, in seconds, from end of recorded video
// for download
var file = recordedBlobs.slice(0, input[1].value);
var blob = new Blob(file, {
type: 'video/webm'
});
var url = window.URL.createObjectURL(blob);
var a = document.createElement('a');
a.style.display = 'none';
a.href = url;
a.download = 'test.webm';
document.body.appendChild(a);
a.click();
setTimeout(function() {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
}
plnkr https://plnkr.co/edit/LxuV5jMX0RZtDxOxT1qa?p=preview
I am trying to record 3 movies after eachother, with different instructions showing to the user. The code seems to work fine, the instructions change, the variables I am logging check out fine (chunks recorded, saving recording).
After running the script, I have 3 webm files, all roughly the same size. However, only one is playable. The other two are corrupted. Been tinkering alot in the code, but everything seems to be fine.
Would love if someone could give a hand!
Initialization:
var constraints = { audio: true,video: {width: {min: 1280}, height: {min: 720}, facingmode: 'user'}};
navigator.mediaDevices.getUserMedia(constraints).then((stream) => { video.srcObject = stream; cameraStream = stream; startRecording(); });
StartRecording function:
function startRecording()
{
if(stage == 1)
{
console.log('Started stage '+stage);
lineOne.innerHTML = "Kijk recht in de camera";
line.animate(1);
fName = 'base';
}
if(stage == 2)
{
lineOne.innerHTML = "Kijk over uw linkerschouder";
line.set(0);
line.animate(1);
fName = 'left';
}
if(stage == 3)
{
lineOne.innerHTML = "Kijk over uw rechterschouder";
line.set(0);
line.animate(1);
fName = 'right';
}
blobs_recorded = [];
mr = new MediaRecorder(cameraStream);
mr.addEventListener('dataavailable', function(e) {
blobs_recorded.push(e.data);
});
mr.start(500);
setTimeout(function(){stopRecording(fName);},5000);
}
Stop recording / save file / start next movie:
function stopRecording(name)
{
recording = null;
mr.stop();
console.log("stopped recording");
console.log(blobs_recorded.length);
recording = new File(blobs_recorded, name+'.webm', { type: 'video/webm' });
var xhr = new XMLHttpRequest();
const upload = new FormData();
upload.append('file',recording);
upload.append('fileName',name);
upload.append('code',code);
upload.append('renterId',renterId);
xhr.open('POST','upload.php',false);
xhr.send(upload);
stage = stage+1;
lineOne.innerHTML = "";
if(stage < 4)
{
startRecording();
}
}
After running the script, I got a base.webm / left.webm and right.webm, but only base.webm is playable, the other two are corrupted.
I stored some jpeg files (exactly 350, same files same size. Total: 336.14 MB) as Blob in IndexedDB. It took around 1 second to complete the transaction. Then I read all the data from IndexedDB to an array and again sored to IndexedDB. But this time it takes around 15 Seconds. I observed this as a consistent behavior. Anything wrong here? I used performance.now() to get the time difference
Files: 350,
Size of each: 937 KB,
Browser: Chrome and Chromium Edge
//Open
var dbOpen = indexedDB.open(INDEXED_DB_NAME, INDEXED_DB_VERSION);
dbOpen.onupgradeneeded = function (e) {
console.log("onupgradeneeded");
var store = e.currentTarget.result.createObjectStore(
IMAGE_DATA_STORE, { autoIncrement: true });
};
dbOpen.onsuccess = function (e) {
image_data_db = dbOpen.result;
console.log("indexed DB opened");
};
//Initial Write
var inputFiles = document.getElementById('inputFiles');
for (var i = 0; i < inputFiles.files.length; i++) {
let file = inputFiles.files[i];
var b = new Blob([file], { type: file.type });
fileblobs.push(b);
}
StoreIdb(fileblobs); // < First write
//StoreIdb()
t0 = performace.now();
var trx = image_data_db.transaction(IMAGE_DATA_STORE, 'readwrite');
var imagestore = trx.objectStore(IMAGE_DATA_STORE);
for (i = 0; i < fileblobs.length; i++) {
request = imagestore.add(fileblobs[i]);
request.onsuccess = function (e) {
console.log('added');
};
request.onerror = function (e) {
console.error("Request Error", this.error);
};
}
trx.onabort = function (e) {
console.error("Exception:", this.error, this.error.name);
};
trx.oncomplete = function (e) {
console.log('completed');
t1 = performance.now();
timetaken = t1 - t0;
}
//Read
var objectStore = image_data_db.transaction(IMAGE_DATA_STORE).objectStore(IMAGE_DATA_STORE);
objectStore.openCursor().onsuccess = function (e) {
var cursor = e.target.result;
if (cursor) {
blobArray.push(cursor.value.blob);
cursor.continue();
}
else
{
// completed
}
}
// blobArray will be used for second time << Second Write
I figured it out. First time it was storing file instance blob.
I ve changed file instance blob to Array buffer just to want to ensure data type similar in both cases. Now it is taking same time.
for (var i = 0; i < inputFiles.files.length; i++) {
let file = inputFiles.files[i];
file.arrayBuffer().then((arrayBuffer) => {
let blob = new Blob([new Uint8Array(arrayBuffer)], {type: file.type });
blobs.push(blob);
if ( blobs.length == inputFiles.files.length){
callback(blobs);
}
});
}
MediaRecorder ondataavailable work successful once.
I need to get blob, get it base64, send to my server, decode this base64 to audio blob.
This is very strange.
For example, output:
blob1
blob2
blob3
blob4
blob5
blob6
blob7
blob8
blob9
....
I can hear just blob1, other blobs is "disabled".
Try it!
This code record audio:
window.startRecord = function(cb){
var int;
navigator.mediaDevices.getUserMedia({ audio: true , video:false}).then(function(stream){
var options = {
audioBitsPerSecond : 128000,
videoBitsPerSecond : 2500000,
mimeType : 'audio/webm\;codecs=opus'
}
if(!MediaRecorder.isTypeSupported(options['mimeType'])) options['mimeType'] = "audio/ogg; codecs=opus";
window.voice = new MediaRecorder(stream, options);
voice.start(500);
voice.ondataavailable = function(data){
var reader = new FileReader();
var blob = data.data;
reader.readAsDataURL(blob);
reader.onloadend = function () {
var result = reader.result;
cb(result);
}
};
voice.onstop = function(){
console.log('stop audio call');
}
});
}
window.convertDataURIToBinary = function(dataURI) {
var BASE64_MARKER = ';base64,';
var base64Index = dataURI.indexOf(BASE64_MARKER) + BASE64_MARKER.length;
var base64 = dataURI.substring(base64Index);
var raw = window.atob(base64);
var rawLength = raw.length;
var array = new Uint8Array(new ArrayBuffer(rawLength));
for(i = 0; i < rawLength; i++) {
array[i] = raw.charCodeAt(i);
}
return array;
}
<body>
<button onclick="startRecord(function(r){
var binary= convertDataURIToBinary(r);
var blob=new window.Blob([binary], {type : 'audio/webm'});
var blobUrl = window.URL.createObjectURL(blob);
console.log('URL : ' + blobUrl);
document.getElementById('data').append(blobUrl + `
|
`);
})">Exec</button>
<div id="data">
</div>
<body>
</body>
I am not sure what is the problem you try to highlight, but:
The dataavailable event's data property contains only a chunk of the whole data that has been recorded.
For instance, only the first chunk will contain the metadata needed for the final recorded media.
It is then expected that you will merge all these chunks together at the time you will export them.
And this should be done only once, at the MediaRecorder.stop event.
const chunks = []; // store all the chunks in an array
recorder.ondataavailable = e => chunks.push(e.data);
// merge the chunks in a single Blob here
recoder.onstop = e => export_media(new Blob(chunks));
I have a function that mainly download images in a blob object, and it's working fine on chrome, FF, iOS 7+, but not on iOS 6...
downloadImage: function( url ) {
var that = this;
return new Ember.RSVP.Promise(function( resolve, reject ) {
var xhr = new XMLHttpRequest();
xhr.open('GET', url);
xhr.onreadystatechange = function() {
if (this.readyState === this.DONE) {
that.chart.incrementProgress();
if (this.status === 200) {
var blob = this.response;
resolve( that.imageStore.writeImage( that, url, blob ) );
}
else {
resolve();
}
}
};
xhr.responseType = 'blob';
xhr.send();
});
}
In iOS6 in the console debugger, when I want to see my blob object, its seems to be a string with super weird character in it.. I'm not sure if it normal or my request doesn't work properly on this version of iOS.
After that I need to convert it into a base64, so I use FileReader for that like this :
this.writeImage = function( controller, url, blob ) {
var that = this;
return new Ember.RSVP.Promise(function( resolve ) {
var reader = new window.FileReader();
reader.readAsDataURL(blob);
reader.onloadend = function() {
var base64 = reader.result;
var object = { id: url, key: url, base64: base64 };
//controller.store.update('image', object).save();
controller.store.findQuery('image', { key: url })
.then(function( result ) {
var record = result.content[0];
record._data.base64 = base64;
record.save().then( resolve );
})
.catch(function() {
controller.store.createRecord('image', object).save().then( resolve );
});
};
});
};
Don't pay attention to the Promise thing and other arguments, but the blob is the same as the one in the downloadImage function.
And for a mysterious reason, the reader.loadend is never triggered because the state in reader is always at 0.
Should I do something particular for iOS6 or my code is wrong ?
[edit] : It's like the onloadend callback is not triggered ??
[edit2] : After further investigation, it seems that the response from the ajax request is a string instead of a blob... And my responseType is set as "" as well ?
I have found a workaround for now, I convert my binaryString into a blob like this :
function binaryStringToBlob( byteCharacters, contentType ) {
var sliceSize = 1024;
var bytesLength = byteCharacters.length;
var slicesCount = Math.ceil(bytesLength / sliceSize);
var byteArrays = new Array(slicesCount);
for (var sliceIndex = 0; sliceIndex < slicesCount; ++sliceIndex) {
var begin = sliceIndex * sliceSize;
var end = Math.min(begin + sliceSize, bytesLength);
var bytes = new Array(end - begin);
for (var offset = begin, i = 0 ; offset < end; ++i, ++offset) {
bytes[i] = byteCharacters[offset].charCodeAt(0);
}
byteArrays[sliceIndex] = new Uint8Array(bytes);
}
return new Blob(byteArrays, { type: contentType });
}
You just need to get the content-type and here you go !