Chrome ERR_INSUFFICIENT_RESOURCES workaround - javascript

I have a JS library that is responsible to perform the download of JPEG images for the client. All of this is done asynchronously. In some cases, the count of images is really large... Around 5000 images. In this case, the Chrome browser issues the "ERR_INSUFFICIENT_RESOURCES" error for the ajax request.
Each request must be done individually, there is no option to pack the images on the server-side.
What are my options here? How can I find a workaround for this problem? The download works fine in Firefox...
Attached code of the actual download:
function loadFileAndDecrypt(fileId, key, type, length, callback, obj) {
var step = 100 / length;
eventBus.$emit('updateProgressText', "downloadingFiles");
var req = new dh.crypto.HttpRequest();
req.setAesKey(key);
let dataUrl;
if (type == "study") {
dataUrl = "/v1/images/";
}else {
dataUrl = "/v1/dicoms/";
}
var url = axios.defaults.baseURL + dataUrl + fileId;
req.open("GET", url, true);
req.setRequestHeader("Authorization", authHeader().Authorization+"")
req.setRequestHeader("Accept", "application/octet-stream, application/json, text/plain, */*");
req.responseType = "arraybuffer";
req.onload = function() {
console.log(downloadStep);
downloadStep += step;
eventBus.$emit('updatePb', Math.ceil(downloadStep));
var data = req.response;
obj.push(data);
counter ++;
//last one
if (counter == length) {
callback(obj);
}
};
req.send();
}

The error means your code is overloading your memory (most likely, or the quota of pending requests was exhausted). Instead of sending all the data from the backend, make your frontend request for 5000 individual images instead and control the requests flow. regardless, downloading 5000 images is bad. You should pack them up for downloading. If you mean fetching the images, then loading images from the frontend through static or dynamic links is much more logical ;)

Create a class:
Which accepts the file-Id (image that needs to be downloaded) as an argument
Which can perform the HTTP API request
Which can store the result of the request
Create an array of objects from this class using how many ever file-Ids that needs to be downloaded.
Store the array in a RequestManager which can start and manage the downloads:
can batch the downloads, say it fires 5 requests from the array and waits for them to finish before starting the next batch
can stop the downloads on multiple failures
manipulate batch size depending on the available bandwidth
stops download on auth expiry and resumes on auth refresh
offers to retry the previously failed downloads

Related

Handle specific image response by javascript

<img src="/a.jpg" onerror="fetch(\'/a.jpg\')
.then(code => console.log(code === 499
? 'image will be available in a moment'
: 'image not found'))">
Is it possible to do this without firing two HTTP requests (one by img.src and one by fetch function)?
My use case is I want to fire a polling loop (which I have already implemented, just skipped it there for simplicity) that will retry loading the image if it is still being prepared on server (the loop will of course fire more HTTP requests, but that's OK), but if the image actually does not exist, just show "image not found".
The server can be implemented for example this way:
if an image exists and has a thumbnail ready, return an image response
if an image exists but thumbnail is not ready yet, return specific HTTP code (499)
Compatibility with modern browsers & IE 11 is enough for me.
Finally found the solution myself - load the image using XHR and display it using BLOB API.
This solution provides all what I wanted:
fires only single HTTP request to get an {image|error code},
does not need additional user permissions (like implementation with webRequest hook),
does not pollute DOM with extra long base64 urls,
seems compatible with modern browsers and even IE10.
var myImage = document.querySelector('img');
var myRequest = new XMLHttpRequest();
myRequest.open('GET', 'http://placekitten.com/123/456', true);
myRequest.responseType = 'blob';
myRequest.onreadystatechange = () => {
if (myRequest.readyState !== 4) {
return;
}
if (myRequest.status === 200) {
var blob = myRequest.response;
var objectURL = URL.createObjectURL(blob);
// this is the trick - generates url like
// blob:http://localhost/adb50c88-9468-40d9-8b0b-1f6ec8bb5a32
myImage.src = objectURL;
} else if (myRequest.status === 499) {
console.log('... waiting for thumbnail');
retryAfter5s(); // implementation of retry loop is not important there
} else {
console.log('Image not found');
}
};
myRequest.send();

Why does datatransfer between sockets takes lot of time?

I have implemented a web based client-server system. The goal is to request for an image file to server, through the socket.
Here is my code at client end. [embedded Javascript code]
<a id="downloadLnk" download="new.jpeg" style="color:red">Download as image</a>
var socket = io("ipaddress");
socket.on("image", function(info) {
if (info.image) {
var end1 = new Date().getTime();
document.getElementById("demo1").innerHTML = end1;
var img = new Image();
img.src = 'data:image/jpeg;base64,' + info.buffer;
}
function download() {
this.href = img.src;
};
downloadLnk.addEventListener('click', download, false);
});
And this is the code at server side: [node.js server, express module, fs module]
io.on('connection', function(socket){
var start1 = new Date().getTime();
console.log(start1);
fs.readFile(__dirname + '/aorta-high512.jpg', function(err, buf){
socket.emit('image', { image: true, buffer: buf.toString('base64') });
});
});
I am transferring a 512x512 resolution image of size 88KB and it is taking approximately one second. Similarly for a 259KB file it takes around 1.2s and 2MB file it takes 2.5s. I do not understand why it is taking so much time?
I checked the bandwidth avalable, internet speed of my network in speedtest.net. The download speed is 95.97Mbps and upload speed is 23.30Mbps.
Could you please let me know, why the transfer time of data is so slow? Is there any other method to transfer data in a faster way? I definitely know that 96Mbps is the bandwidth available but still to test I downloaded a 100Mb pdf file from internet it took approximately 12-14s. Looking at this I atleast expect faster transfer of data at the rate of atleast 2-3 Mbps.
Socket.IO supports sending/receiving binary data, so taking advantage of that will allow you to avoid expensive encoding of data.
Secondly, when generating/using a data URL in browsers you have to be careful about the URL length. Many browsers impose various limits on the maximum size of such data URLs. One possible workaround to this (not including serving the image directly via HTTP GET) could include having the server split the image into a smaller set of images, which you then use with stacked img tags to give the appearance of a single image.

AJAX Upload file straight after downloading it (without storing)

I'm making a JavaScript script that is going to essentially save an old game development sandbox website before the owners scrap it (and lose all of the games). I've created a script that downloads each game via AJAX, and would like to somehow upload it straight away, also using AJAX. How do I upload the downloaded file (that's stored in responseText, presumably) to a PHP page on another domain (that has cross origin headers enabled)?
I assume there must be a way of uploading the data from the first AJAX request, without transferring the responseText to another AJAX request (used to upload the file)? I've tried transferring the data, but as expected, it causes huge lag (and can crash the browser), as the files can be quite large.
Is there a way that an AJAX request can somehow upload individual packets as soon as they're recieved?
Thanks,
Dan.
You could use Firefox' moz-chunked-text and moz-chunked-arraybuffer response types. On the JavaScript side you can do something like this:
function downloadUpload() {
var downloadUrl = "server.com/largeFile.ext";
var uploadUrl = "receiver.net/upload.php";
var dataOffset = 0;
xhrDownload = new XMLHttpRequest();
xhrDownload.open("GET", downloadUrl, true);
xhrDownload.responseType = "moz-chunked-text"; // <- only works in Firefox
xhrDownload.onprogress = uploadData;
xhrDownload.send();
function uploadData() {
var data = {
file: downloadUrl.substring(downloadUrl.lastIndexOf('/') + 1),
offset: dataOffset,
chunk: xhrDownload.responseText
};
xhrUpload = new XMLHttpRequest();
xhrUpload.open("POST", uploadUrl, true);
xhrUpload.setRequestHeader('Content-Type', 'application/json; charset=UTF-8');
xhrUpload.send(JSON.stringify(data));
dataOffset += xhrDownload.responseText.length;
};
}
On the PHP side you need something like this:
$in = fopen("php://input", "r");
$postContent = stream_get_contents($in);
fclose($in);
$o = json_decode($postContent);
file_put_contents($o->file . '-' . $o->offset . '.txt', $o->chunk);
These snippets will just give you the basic idea, you'll need to optimize the code yourself.

cannot upload files and vars with xhr2 and web workers

I try to create code to upload files using XHR2 and web workers.
I thought I should use web workers , so if a file is big, web page will not freeze.
This is not working for two reasons, I never used web workers before, and I want to post to the server the file and vars at the same time, with the same xhr. When I say vars I mean the name of the file, and an int.
Heres is what I got
Client side
//create worker
var worker = new Worker('fileupload.js');
worker.onmessage = function(e) {
alert('worker says '+e.data);
}
//handle workers error
worker.onerror =werror;
function werror(e) {
console.log('ERROR: Line ', e.lineno, ' in ', e.filename, ': ', e.message);
}
//send stuff to the worker
worker.postMessage({
'files' : files, //img or video
'name' : nameofthepic, //text
'id':imageinsertid //number
});
Inside the worker (fileupload.js file)
onmessage = function (e) {var name=e.data.name; var id=e.data.id ; var file=e.data.files;
//create a var to catch the anser of the server
var datax;
var xhr = new XMLHttpRequest();
xhr.onload = function() {
if (xhr.status == 200) {datax=xhr.response;}
else { datax=525;}//actually, whatever, just give a value
};
xhr.open('POST', 'upload.php');
xhr.send(file,name,id);
//i also tried xhr.send('file=file&name=name&id=id'); and still nothing
//i also tried just the text/int xhr.send('name=name&id=id'); and still nothing
I am confused. I cannot send anything to the server. I get no feedback from the worker. I dont even know if the data are send to the fileupload.js. Server side does not INSERT.
Is that possible, sending files and text at the same time? What am I missing?
I need to pass text and int along with the file, so server side not only will upload the file, but also will INSERT to the database the int and the text, if the file is uploaded succesfully. This was easy just with formData and xhr, but, putting web workers in the middle, I cant get it right.
Also, can I use Transferable Objects to speed things up? Are Transferable Objects supported in all major browsers?
Thanks in advance

Large file upload with WebSocket

I'm trying to upload large files (at least 500MB, preferably up to a few GB) using the WebSocket API. The problem is that I can't figure out how to write "send this slice of the file, release the resources used then repeat". I was hoping I could avoid using something like Flash/Silverlight for this.
Currently, I'm working with something along the lines of:
function FileSlicer(file) {
// randomly picked 1MB slices,
// I don't think this size is important for this experiment
this.sliceSize = 1024*1024;
this.slices = Math.ceil(file.size / this.sliceSize);
this.currentSlice = 0;
this.getNextSlice = function() {
var start = this.currentSlice * this.sliceSize;
var end = Math.min((this.currentSlice+1) * this.sliceSize, file.size);
++this.currentSlice;
return file.slice(start, end);
}
}
Then, I would upload using:
function Uploader(url, file) {
var fs = new FileSlicer(file);
var socket = new WebSocket(url);
socket.onopen = function() {
for(var i = 0; i < fs.slices; ++i) {
socket.send(fs.getNextSlice()); // see below
}
}
}
Basically this returns immediately, bufferedAmount is unchanged (0) and it keeps iterating and adding all the slices to the queue before attempting to send it; there's no socket.afterSend to allow me to queue it properly, which is where I'm stuck.
Use web workers for large files processing instead doing it in main thread and upload chunks of file data using file.slice().
This article helps you to handle large files in workers. change XHR send to Websocket in main thread.
//Messages from worker
function onmessage(blobOrFile) {
ws.send(blobOrFile);
}
//construct file on server side based on blob or chunk information.
I believe the send() method is asynchronous which is why it will return immediately. To make it queue, you'd need the server to send a message back to the client after each slice is uploaded; the client can then decide whether it needs to send the next slice or a "upload complete" message back to the server.
This sort of thing would probably be easier using XMLHttpRequest(2); it has callback support built-in and is also more widely supported than the WebSocket API.
In order to serialize this operation you need the server to send you a signal every time a slice is received & written (or an error occurs), this way you could send the next slice in response to the onmessage event, pretty much like this:
function Uploader(url, file) {
var fs = new FileSlicer(file);
var socket = new WebSocket(url);
socket.onopen = function() {
socket.send(fs.getNextSlice());
}
socket.onmessage = function(ms){
if(ms.data=="ok"){
fs.slices--;
if(fs.slices>0) socket.send(fs.getNextSlice());
}else{
// handle the error code here.
}
}
}
You could use https://github.com/binaryjs/binaryjs or https://github.com/liamks/Delivery.js if you can run node.js on the server.
EDIT : The web world, browsers, firewalls, proxies, changed a lot since this answer was made. Right now, sending files using websockets
can be done efficiently, especially on local area networks.
Websockets are very efficient for bidirectional communication, especially when you're interested in pushing information (preferably small) from the server. They act as bidirectional sockets (hence their name).
Websockets don't look like the right technology to use in this situation. Especially given that using them adds incompatibilities with some proxies, browsers (IE) or even firewalls.
On the other end, uploading a file is simply sending a POST request to a server with the file in the body. Browsers are very good at that and the overhead for a big file is really near nothing. Don't use websockets for that task.
I think this socket.io project has a lot of potential:
https://github.com/sffc/socketio-file-upload
It supports chunked upload, progress tracking and seems fairly easy to use.

Categories