How to define document in javascript blob? - javascript

I want to create an audio object in the blob but i couldn't.
const blobCode = () => {
return `
var sound = document.createElement('audio');
sound.id = 'audio-player';
sound.controls = 'controls';
sound.src = 'media/Blue Browne.mp3';
sound.type = 'audio/mpeg';
document.body.appendChild(sound);
class WhiteNoiseProcessor extends AudioWorkletProcessor {
process (inputs, outputs, parameters) {
const output = outputs[0]
output.forEach(channel => {
for (let i = 0; i < channel.length; i++) {
channel[i] = Math.random() * 2 - 1
}
})
return true
}
}
registerProcessor('white-noise-processor', WhiteNoiseProcessor);`
}
const audioContext = new OfflineAudioContext(1, 128, 300000);
var blob_url = new Blob([blobCode()], {
type: "text/javascript"
});
var blob_url_create = URL.createObjectURL(blob_url);
await audioContext.audioWorklet.addModule(blob_url_create).then(async () => {
var wa = new AudioWorkletNode(audioContext, "white-noise-processor");
});
If i set the Blob type text/javascript get the error Uncaught ReferenceError: document is not defined
var blob_url = new Blob([blobCode()], {type: "text/javascript"});
If i set the Blob type text/html get the error Uncaught DOMException: The user aborted a request
var blob_url = new Blob([blobCode()], {type: "text/html"});
How do i resolve this error?

Related

Cannot read property of undefined(reading 'getusermedia' ) in AWS EC2

I have deployed a django App into AWS EC2 in order to recording audio from a speaker.
Here is my code for recording audio stream;
let log = console.log.bind(console),
id = val => document.getElementById(val),
ul = id('ul'),
gUMbtn = id('gUMbtn'),
start = id('start'),
stop = id('stop'),
stream,
recorder,
counter=1,
chunks,
media;
gUMbtn.onclick = e => {
let mv = id('mediaVideo'),
mediaOptions = {
audio: {
tag: 'audio',
type: 'audio/wav',
ext: '.wav',
gUM: {audio: true}
}
};
media = mediaOptions.audio;
navigator.mediaDevices.getUserMedia(media.gUM).then(_stream => {
stream = _stream;
id('gUMArea').style.display = 'none';
id('btns').style.display = 'inherit';
start.removeAttribute('disabled');
recorder = new MediaRecorder(stream);
recorder.ondataavailable = e => {
chunks.push(e.data);
if(recorder.state == 'inactive'){
makeLink();
}
};
log('got media successfully');
}).catch(log);
}
start.onclick = e => {
start.disabled = true;
stop.removeAttribute('disabled');
chunks=[];
recorder.start();
}
stop.onclick = e => {
stop.disabled = true;
recorder.stop();
start.removeAttribute('disabled');
}
function makeLink(){
let blob = new Blob(chunks, {type: media.type })
, url = URL.createObjectURL(blob)
, li = document.createElement('li')
, mt = document.createElement(media.tag)
, hf = document.createElement('a')
;
mt.controls = true;
mt.src = url;
hf.href = url;
hf.download = `${counter}${media.ext}`;
hf.innerHTML = `donwload ${hf.download}`;
li.appendChild(mt);
li.appendChild(hf);
ul.innerHTML = "";
ul.appendChild(li);
}
When I run that code in the server, it said "Uncaught TypeError: Cannot read properties of undefined (reading 'getUserMedia')"
If someone has experienced this before, please give me a hand!
When I hosted the app on a https, it did run well.
Actually the mediaDevice is undefined at http.

javascript fetch an image and create base64

I'm using CloudFlare service workers and I want to fetch an image and then generate a base64 representation of it.
So something like this:
const res = await fetch('https://cdn.cnn.com/cnnnext/dam/assets/211010073527-tyson-fury-exlarge-169.jpg')
const blob = await res.blob();
console.log(blob)
console.log(btoa(blob))
this of course doesn't work, any ideas how to get this resolved?
complete worker script with cloudflare using btao
addEventListener('fetch', event => {
event.respondWith(handleRequest(event.request))
})
const imageUrl =
'https://upload.wikimedia.org/wikipedia/commons/thumb/7/72/' +
'Cat_playing_with_a_lizard.jpg/1200px-Cat_playing_with_a_lizard.jpg';
function base64Encode (buf) {
let string = '';
(new Uint8Array(buf)).forEach(
(byte) => { string += String.fromCharCode(byte) }
)
return btoa(string)
}
function base64Decode (string) {
string = atob(string);
const
length = string.length,
buf = new ArrayBuffer(length),
bufView = new Uint8Array(buf);
for (var i = 0; i < length; i++) { bufView[i] = string.charCodeAt(i) }
return buf
}
async function handleRequest(request) {
const
upstreamResponse = await fetch(imageUrl),
text = base64Encode(await upstreamResponse.arrayBuffer()),
bin = base64Decode(text);
return new Response(bin, {status: 200, headers: {
'Content-type': upstreamResponse.headers.get('Content-type')
}})
}
can refer this discussion as well

Why is writing the same data to IndexedDB a second time consistently slower?

I stored some jpeg files (exactly 350, same files same size. Total: 336.14 MB) as Blob in IndexedDB. It took around 1 second to complete the transaction. Then I read all the data from IndexedDB to an array and again sored to IndexedDB. But this time it takes around 15 Seconds. I observed this as a consistent behavior. Anything wrong here? I used performance.now() to get the time difference
Files: 350,
Size of each: 937 KB,
Browser: Chrome and Chromium Edge
//Open
var dbOpen = indexedDB.open(INDEXED_DB_NAME, INDEXED_DB_VERSION);
dbOpen.onupgradeneeded = function (e) {
console.log("onupgradeneeded");
var store = e.currentTarget.result.createObjectStore(
IMAGE_DATA_STORE, { autoIncrement: true });
};
dbOpen.onsuccess = function (e) {
image_data_db = dbOpen.result;
console.log("indexed DB opened");
};
//Initial Write
var inputFiles = document.getElementById('inputFiles');
for (var i = 0; i < inputFiles.files.length; i++) {
let file = inputFiles.files[i];
var b = new Blob([file], { type: file.type });
fileblobs.push(b);
}
StoreIdb(fileblobs); // < First write
//StoreIdb()
t0 = performace.now();
var trx = image_data_db.transaction(IMAGE_DATA_STORE, 'readwrite');
var imagestore = trx.objectStore(IMAGE_DATA_STORE);
for (i = 0; i < fileblobs.length; i++) {
request = imagestore.add(fileblobs[i]);
request.onsuccess = function (e) {
console.log('added');
};
request.onerror = function (e) {
console.error("Request Error", this.error);
};
}
trx.onabort = function (e) {
console.error("Exception:", this.error, this.error.name);
};
trx.oncomplete = function (e) {
console.log('completed');
t1 = performance.now();
timetaken = t1 - t0;
}
//Read
var objectStore = image_data_db.transaction(IMAGE_DATA_STORE).objectStore(IMAGE_DATA_STORE);
objectStore.openCursor().onsuccess = function (e) {
var cursor = e.target.result;
if (cursor) {
blobArray.push(cursor.value.blob);
cursor.continue();
}
else
{
// completed
}
}
// blobArray will be used for second time << Second Write
I figured it out. First time it was storing file instance blob.
I ve changed file instance blob to Array buffer just to want to ensure data type similar in both cases. Now it is taking same time.
for (var i = 0; i < inputFiles.files.length; i++) {
let file = inputFiles.files[i];
file.arrayBuffer().then((arrayBuffer) => {
let blob = new Blob([new Uint8Array(arrayBuffer)], {type: file.type });
blobs.push(blob);
if ( blobs.length == inputFiles.files.length){
callback(blobs);
}
});
}

Hi, is it possible to convert Blob/Base64 to fileURI format using javascript/angularjs and ionic1.

I am trying to capture images using cordovaCamera and imagePicker then remove the EXIF data and create a copy of the image that I get after the metadata is removed.
In the following code I get image file in Blob format which I pass to resolveLocalFileSystemURL but unable to proceed as resolveLocalFileSystemURL does not accept blob or base64 data so is it possible to convert Blob/Base64 to fileURI format. Or is there any alternative to solve this.
In html:
<input id="erd" type="file"/>
In controllers.js :
var input = document.querySelector('#erd');
input.addEventListener('change', load);
function load(){
var fr = new FileReader();
fr.onload = process;
fr.readAsArrayBuffer(this.files[0]);
console.log(" onload "+URL.createObjectURL(this.files[0]));
}
function process(){
var dv = new DataView(this.result);
var offset = 0, recess = 0;
var pieces = [];
var i = 0;
if (dv.getUint16(offset) == 0xffd8){
offset += 2;
var app1 = dv.getUint16(offset);
offset += 2;
while (offset < dv.byteLength){
if (app1 == 0xffe1){
pieces[i] = {recess:recess,offset:offset-2};
recess = offset + dv.getUint16(offset);
i++;
}
else if (app1 == 0xffda){
break;
}
offset += dv.getUint16(offset);
var app1 = dv.getUint16(offset);
offset += 2;
}
if (pieces.length > 0){
var newPieces = [];
pieces.forEach(function(v){
newPieces.push(this.result.slice(v.recess, v.offset));
}, this);
newPieces.push(this.result.slice(recess));
var br = new Blob(newPieces, {type: 'image/jpeg'});
console.log(" pieces.length "+URL.createObjectURL(br));
$scope.strimg=URL.createObjectURL(br).toString();
//var file = new File(URL.createObjectURL(br), "uploaded_file.jpg", { type: "image/jpeg", lastModified: Date.now() });
var myFile = blobToFile(br, "my-image.jpg");
console.log('myFile'+JSON.stringify(myFile));
$scope.strimg = myFile;
createFileEntry($scope.strimg);
}
}
}
function blobToFile(theBlob,fileName){
console.log('theBlob'+theBlob+fileName);
var b = theBlob;
b.lastModifiedDate = new Date();
b.name = fileName; //Cast to a File() type
return theBlob;
}
function createFileEntry(fileURI) {
window.resolveLocalFileSystemURL(fileURI, copyFile, fail);
}
// 5
function copyFile(fileEntry) {
console.log(" copyFile "+fileEntry);
var name = fileEntry.fullPath.substr(fileEntry.fullPath.lastIndexOf('/') + 1);
var newName = name;
$scope.filepathnew="file:///storage/emulated/0/Android/data/com.offermanagement.system/files/";
window.resolveLocalFileSystemURL($scope.filepathnew, function(fileSystem2) {
fileEntry.copyTo(
fileSystem2,
newName,
onCopySuccess,
fail
);
},
fail);
}
function onCopySuccess(entry) {
$scope.$apply(function () {
$rootScope.profilepic=entry.nativeURL;
$rootScope.images.push(entry.nativeURL);
$rootScope.items.push({src:entry.nativeURL,sub:'' });
});
}
function fail(error) {
console.log("fail: " + error.code);
}
I get the following error for the above code
"Uncaught TypeError: Wrong type for parameter "uri" of resolveLocalFileSystemURI: Expected String, but got Blob."

How to get image width and height in a web-worker?

Here's my code:
// process-image.js (web-worker)
self.addEventListener('message', ev => {
const {id,file} = ev.data;
const reader = new FileReader();
reader.onload = ev => {
const imageFile = new Image(); // <-- error is here
imageFile.src = ev.target.result;
imageFile.onload = () => {
const fit = makeFit(imageFile);
self.postMessage({
id,
file: {
src: ev.target.result,
width: fit.width,
height: fit.height,
}
})
}
};
reader.readAsDataURL(file);
});
This was working fine in the main UI thread, but apparently I don't have access to Image inside of a web-worker. The specific error is:
Uncaught ReferenceError: Image is not defined
at FileReader.reader.onload (process-image.js:12)
Is there another way get the width and height of an image?
I'd like to support as many file types as possible, but just JPG is good enough for now if there's some sort of library that can do this and will run in a web-worker.
Here's the relevant bit from the UI thread:
// some-react-component.js
componentDidMount() {
this.worker = new ImageWorker();
this.worker.addEventListener('message', ev => {
const {id, file} = ev.data;
this.setState(({files}) => {
files = files.update(id, img => ({
...img,
...file,
}))
const withWidths = files.filter(f => f.width);
const avgImgWidth = withWidths.reduce((acc, img) => acc + img.width, 0) / withWidths.size;
return {files, avgImgWidth};
});
});
}
onDrop = ev => {
ev.preventDefault();
Array.prototype.forEach.call(ev.dataTransfer.files, file => {
const id = shortid();
this.worker.postMessage({id, file});
const img = {
id,
width: null,
height: null,
src: null,
name: file.name,
}
this.setState(({files}) => ({files: files.set(id, img)}));
});
}
Only thing worth noting here is that id is just a random UUID, and file is a File. I'm passing the whole thing to the web-worker so that I can do all the processing there.
I think there might be a simpler solution:
https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/createImageBitmap
I managed to get the size without using FileReader at all:
http://jsfiddle.net/hL1Ljkmv/2/
var response = `self.addEventListener('message', async ev => {
const {id,file} = ev.data;
console.log('received data');
let image = await self.createImageBitmap(file);
console.log(image);
});`;
const blob = new Blob([response], {type: 'application/javascript'});
const worker = new Worker(URL.createObjectURL(blob));
const input = document.querySelector('#file-input');
input.addEventListener('change', (e) => {
worker.postMessage({file: input.files[0], id: 1})
});
I managed to get the width and height of most JPEGs by following this spec.
self.addEventListener('message', ev => {
const {id, file} = ev.data;
const reader = new FileReader();
reader.onload = ev => {
const view = new DataView(reader.result);
let offset = 0;
let header = view.getUint16(offset, false);
if(header !== 0xFFD8) {
throw new Error(`Not a JPEG`);
}
offset += 2;
for(; ;) {
header = view.getUint16(offset, false);
offset += 2;
let length = view.getUint16(offset, false);
if(header === 0xFFC0) break;
offset += length;
}
const height = view.getUint16(offset + 3);
const width = view.getUint16(offset + 5);
const fit = makeFit({width, height});
self.postMessage({
id,
file: {
src: URL.createObjectURL(file),
width: fit.width,
height: fit.height,
}
})
};
reader.readAsArrayBuffer(file);
});
This code is flakey as heck, but surprisingly it works. I'm still looking for a more robust solution.

Categories