I need to validate video resolution, if it's above 1280x720, I need to block it before user uploads it. How to do this on browser or javascript?
Can use mp4box npm library.
import MP4Box from 'mp4box'
let reader = new FileReader()
const mp4boxfile = MP4Box.createFile();
function _base64ToArrayBuffer(base64) {
var binary_string = window.atob(base64);
var len = binary_string.length;
var bytes = new Uint8Array(len);
for (var i = 0; i < len; i++) {
bytes[i] = binary_string.charCodeAt(i);
}
return bytes.buffer;
}
reader.onload = function() {
mp4boxfile.onReady = function (info) {
console.log('Video info: ', info); // resolution info is available on info.videoTracks[0].video.width and .height
}
const arrayBuffer = _base64ToArrayBuffer(reader.result.slice(22))
arrayBuffer.fileStart = 0;
mp4boxfile.appendBuffer(arrayBuffer);
};
reader.readAsDataURL(videoFile); // videoFile comes from onChange event of input file element
Video codec info detail as well as video duration also available on info object.
Related
i currently have the ff code my goal is to remove the extra filreader loop? is it possible to maybe chain the readAsArrayBuffer and readAsDataURL or load them both?
var images = [];
var files = dt.files.length;
for (i = 0; i < files; i++) {
var reader = new FileReader();
reader.fileId = i;
reader.onload = function (){
images[this.fileId].is_valid = true / false; // checks mime type
}
reader.readAsArrayBuffer(dt.files[i]);
for (i = 0; i < files; i++) {
var reader = new FileReader();
reader.fileId = i;
reader.onload = function (){
//load image
if(image[i].is_valid){
// do this
}else {
// do this
}
}
reader.readAsDataURL(dt.files[i]);
I stored some jpeg files (exactly 350, same files same size. Total: 336.14 MB) as Blob in IndexedDB. It took around 1 second to complete the transaction. Then I read all the data from IndexedDB to an array and again sored to IndexedDB. But this time it takes around 15 Seconds. I observed this as a consistent behavior. Anything wrong here? I used performance.now() to get the time difference
Files: 350,
Size of each: 937 KB,
Browser: Chrome and Chromium Edge
//Open
var dbOpen = indexedDB.open(INDEXED_DB_NAME, INDEXED_DB_VERSION);
dbOpen.onupgradeneeded = function (e) {
console.log("onupgradeneeded");
var store = e.currentTarget.result.createObjectStore(
IMAGE_DATA_STORE, { autoIncrement: true });
};
dbOpen.onsuccess = function (e) {
image_data_db = dbOpen.result;
console.log("indexed DB opened");
};
//Initial Write
var inputFiles = document.getElementById('inputFiles');
for (var i = 0; i < inputFiles.files.length; i++) {
let file = inputFiles.files[i];
var b = new Blob([file], { type: file.type });
fileblobs.push(b);
}
StoreIdb(fileblobs); // < First write
//StoreIdb()
t0 = performace.now();
var trx = image_data_db.transaction(IMAGE_DATA_STORE, 'readwrite');
var imagestore = trx.objectStore(IMAGE_DATA_STORE);
for (i = 0; i < fileblobs.length; i++) {
request = imagestore.add(fileblobs[i]);
request.onsuccess = function (e) {
console.log('added');
};
request.onerror = function (e) {
console.error("Request Error", this.error);
};
}
trx.onabort = function (e) {
console.error("Exception:", this.error, this.error.name);
};
trx.oncomplete = function (e) {
console.log('completed');
t1 = performance.now();
timetaken = t1 - t0;
}
//Read
var objectStore = image_data_db.transaction(IMAGE_DATA_STORE).objectStore(IMAGE_DATA_STORE);
objectStore.openCursor().onsuccess = function (e) {
var cursor = e.target.result;
if (cursor) {
blobArray.push(cursor.value.blob);
cursor.continue();
}
else
{
// completed
}
}
// blobArray will be used for second time << Second Write
I figured it out. First time it was storing file instance blob.
I ve changed file instance blob to Array buffer just to want to ensure data type similar in both cases. Now it is taking same time.
for (var i = 0; i < inputFiles.files.length; i++) {
let file = inputFiles.files[i];
file.arrayBuffer().then((arrayBuffer) => {
let blob = new Blob([new Uint8Array(arrayBuffer)], {type: file.type });
blobs.push(blob);
if ( blobs.length == inputFiles.files.length){
callback(blobs);
}
});
}
MediaRecorder ondataavailable work successful once.
I need to get blob, get it base64, send to my server, decode this base64 to audio blob.
This is very strange.
For example, output:
blob1
blob2
blob3
blob4
blob5
blob6
blob7
blob8
blob9
....
I can hear just blob1, other blobs is "disabled".
Try it!
This code record audio:
window.startRecord = function(cb){
var int;
navigator.mediaDevices.getUserMedia({ audio: true , video:false}).then(function(stream){
var options = {
audioBitsPerSecond : 128000,
videoBitsPerSecond : 2500000,
mimeType : 'audio/webm\;codecs=opus'
}
if(!MediaRecorder.isTypeSupported(options['mimeType'])) options['mimeType'] = "audio/ogg; codecs=opus";
window.voice = new MediaRecorder(stream, options);
voice.start(500);
voice.ondataavailable = function(data){
var reader = new FileReader();
var blob = data.data;
reader.readAsDataURL(blob);
reader.onloadend = function () {
var result = reader.result;
cb(result);
}
};
voice.onstop = function(){
console.log('stop audio call');
}
});
}
window.convertDataURIToBinary = function(dataURI) {
var BASE64_MARKER = ';base64,';
var base64Index = dataURI.indexOf(BASE64_MARKER) + BASE64_MARKER.length;
var base64 = dataURI.substring(base64Index);
var raw = window.atob(base64);
var rawLength = raw.length;
var array = new Uint8Array(new ArrayBuffer(rawLength));
for(i = 0; i < rawLength; i++) {
array[i] = raw.charCodeAt(i);
}
return array;
}
<body>
<button onclick="startRecord(function(r){
var binary= convertDataURIToBinary(r);
var blob=new window.Blob([binary], {type : 'audio/webm'});
var blobUrl = window.URL.createObjectURL(blob);
console.log('URL : ' + blobUrl);
document.getElementById('data').append(blobUrl + `
|
`);
})">Exec</button>
<div id="data">
</div>
<body>
</body>
I am not sure what is the problem you try to highlight, but:
The dataavailable event's data property contains only a chunk of the whole data that has been recorded.
For instance, only the first chunk will contain the metadata needed for the final recorded media.
It is then expected that you will merge all these chunks together at the time you will export them.
And this should be done only once, at the MediaRecorder.stop event.
const chunks = []; // store all the chunks in an array
recorder.ondataavailable = e => chunks.push(e.data);
// merge the chunks in a single Blob here
recoder.onstop = e => export_media(new Blob(chunks));
I am trying to send audio file (or chunk) via socket.io.
I tried:
// server
socket.on('client-get-audio', function (data) {
//data.fname - name of requested file
var buffer = fs.readFileSync(data.fname, null);
// try 2 - read arraybuffer
//var buffer = fs.readFileSync(data.fname, null).buffer;
buffer = new Uint8Array(buffer);
socket.emit('audio-data', {count: 1, buff: buffer});
});
I can not decode data:
// client
// ctx - AudioContext
// source - buffer source
socket.on('audio-data', function(data){
ctx.decodeAudioData(data.buff, decoded => {
source.buffer = decoded;
source.start(0);
});
I will not describe all my attempts, there were a lot of them.
update
working variant
// server
socket.on('client-get-audio', function (data) {
//data.fname - name of requested file
var buffer = fs.readFileSync(data.fname, null);
socket.emit('audio-data', {count: 1, buff: buffer});
});
// client
function toArrayBuffer(buf) {
var ab = new ArrayBuffer(buf.length);
var view = new Uint8Array(ab);
for (var i = 0; i < buf.length; ++i) {
view[i] = buf[i];
}
return ab;
}
// ctx - AudioContext
// source - buffer source
socket.on('audio-data', function(data){
ctx.decodeAudioData(toArrayBuffer(data.buff.data), decoded => {
source.buffer = decoded;
source.start(0);
});
Maybe someone can suggest a more correct variant?
Try to build an API using the socket.
It's more convenient.
I think.
So i've got a video file and the path to it (e.g. /outerfolder/innerfolder/video.mp4). I now want to encode this video with Base64 in JS so that I am able to store it in a database. If you could help me with the encoding of the video file, I would be very thankful.
Thanks in advance.
You could encode the file with the following function to convert your file to an ArrayBuffer:
[update]
//<input type=file id="encondeMP4">
var encodeMP4 = document.getElementById('encondeMP4');
You now add an event listener to when file input changes
window.onload = function () {
//add eventlisteners
encodeMP4.addEventListener('change', someFunction);
}
you need a function to handle the call from the eventlistener
function someFunction(){
encode(arrayBufferToString)
}
function encode(callback){
var file = encodeMP4.files[0];
var reader = new FileReader();
reader.onload = function(e){
var contents = e.target.result;
var contentBuffer = arrayBufferToString(contents);
var array = callback(contentBuffer);
}
reader.readAsArrayBuffer(file);
}
in var array you now have the MP4 enconded in binary, in the previous function is an internal variable so you'll need to adapt this code to your needs. Maybe a global container called YourEncodedMP4 = array
function arrayBufferToString(buffer) {
var binary = '';
var bytes = new Uint8Array( buffer );
var len = bytes.byteLength;
for (var i = 0; i < len; i++) {
binary += String.fromCharCode( bytes[ i ] );
}
return binary;
}
you now are ready to call this function to convert the MP4 enconded String to a Base64 one.
Is up to you where the contents of var array would be stored, but remembered this call is asynchronous.
Now you could use this function using the container "YourEncodedMP4"
function stringToArrayBuffer(YourEncodedMP4) {
var arrBuff = new ArrayBuffer(YourEncodedMP4.length);
var writer = new Uint8Array(arrBuff);
for(var i = 0, len = YourEncodedMP4.length; i < len; i++){
writer[i] = YourEncodedMP4.charCodeAt(i);
}
return writer;
}
you now have a function that returns a Byte Array, than you could use
var base64String = btoa(String.fromCharCode.apply(null, new Uint8Array(stringToArrayBuffer(YourEncodedMP4))));
you'll end up with a StringBase64