consecutive ajax calls freeze chrome and IE - javascript

i got a strange one. I have to make several consecutive ajax calls, and when a call is complete i update a progress bar. This works perfectly on FF but on the rest of the browsers what happens is that the screen freezes until all the calls are complete.
I am not executing the calls in a loop, but by using some sort of recursion cause there's a lot of checking that needs to be done and a loop is not convenient.
When i tried the same thing using a loop the outcome was more or less the same. Chrome or IE did not update the screen until all the ajax requests where done.
What i noticed is that it works ok on FF and opera, but chrome (safari too i suppose) and IE9 are behaving strange. Also on Chrome, during these requests, the response body of the previous request is empty and will remain like that until all requests are done.
Any ideas?
Code is extensive, but here goes. There is a wrapper to ajax, $(db).bind is a callback for success. db.records is the Json result. Model is an object holding several controller functions
$(db).bind('tokenComplete',function(){
var x = db.records;
if (!x.success) { model.callRollBack(); return false; }
var next = parseInt(x.get.num)+ 1;
if (typeof x.post.tokens[next] != 'undefined') {
model.executeToken(next,x.post);
}
else {
model.progressCurrent.find('div.report').html('all done!!');
}
});
model = {
drawProgressBarsTotal : function(el,i,v) {
var p = Math.floor(100 * i / versions.total);
el.find('span').html(p);
el.find('div.report').html('updating to : ' + v.version);
el.find('.changeLog').html(v.changeLog);
el.find('.changeLog').parents('div').show();
el.find('img').css({'background-position': 100 - p + '% 100%'});
},
executeToken : function(i,x) {
if (this.fail == true) { return; }
this.drawProgressBarsCurrent(this.progressCurrent,i+1,x);
db.trigger = 'tokenComplete';
db.data = x;
db.url = dbDefaults.url + '?num='+i+'&action='+x.tokens[i];//bring the first
$(db).loadStore(db);
}
}
loadStore :
$.dataStore = function( ds ) {
$.fn.loadStore = function(ds){
$.ajax({
type: ds.method,
url: ds.url,
data: ds.data,
dataType: ds.dataType,
cache:false,
async:true,
timeout:ds.timeout?ds.timeout:10000,
queue: "autocomplete",
contentType:'application/x-www-form-urlencoded;charset=utf-8',
accepts: {
xml: "application/xml, text/xml",
json: "application/json, text/json",
_default: "*/*"
},
beforeSend:function(){
loadStatus = true;
},
success: function(data) {
loadStatus = false;
if(data)
{ds.records=data;}
$(ds).trigger(ds.trigger);
},
error: function()
{
loadStatus = false;
$(ds).trigger('loadError');
}
});//END AJAX
};//END LOADSTORE
try {
return ds;
} finally {
ds = null;
}
}
}

Haven't followed your entire code, but it sounds like your problem may be related to continuous code execution. Typically the UI will not update during continuous code execution. To fix this, any call to a setTimeout() or any ajax calls should allow the browser time to update the UI. Basically, you must stop the code briefly, then start it again.
function updateUI () {
// change ui state
document.setTimeout( updateUI, 1 );
}
If I am off base here, let me know.

Related

How to ensure that a function containing multiple $.ajax calls runs fully synchronously and also allows for browser repaints as it executes

I've been working on getting a function written to:
1) Process an input array using $.ajax calls to fill an output array (below this is inputList)
2) Below is what I have, but I'm having issues with it:
requestData(), when I call it, runs straight through to processing the outputList array without having fully populated/filled it - it puts one value into it then starts to process that, but the function still apparently runs on seperately to the subsequent processing asynchronously. I need it to be fully synchronous so that it does not return until the inputList array has been fully processed.
I'm not seeing the browser repainting the div that has its html updated on every call of the runajax() function - I'm attempting to do this with a setTimeout.
3) I've set the ajax request to be synchronous (async : false) - but this doesn't seem to help
I've tried to use jQuery's $.when to provide an ability to ensure that everything gets called in sequence - but clearly I'm not doing this correctly.
Would appreciate any help - I've asked previous related questions around this and had some useful help - but I've still not resolved this!
Thanks
//declare holding function requestData - expects a non-empty input data array named inputList
function requestData() {
//declare inner function runajax
function runajax() {
if(inputList.length > 0) {
//get first item from inputlist and shorten inputList
var data = $.trim(inputList.shift());
function getData() {
//send the data to server
return $.ajax({
url: 'sada_ajax_fetch_data.php',
cache: false,
async: false,
method: "post",
timeout: 2000,
data: {
requesttype: "getmydata",
email: encodeURIComponent(data)
}
});
}
function handleReturnedData (response) {
response = $.trim(decodeURIComponent(response));
//update the div inner html
if(response == "Failed") {
$('#fetchupdatestatus').html('There was an error retrieving the data you requested!');
} else {
$('#fetchupdatestatus').html('The item returned was '+response);
}
//add the response from ajax to the end of the outputList array
outputList.push(response);
//set up the next ajax call
var doNextBitOfWork = function () {
runajax();
};
//call setTimeout so that browser shows refreshed div html
setTimeout(doNextBitOfWork, 0);
//return
return $.when();
}
//do the next ajax request and response processing
return getData().done(handleReturnedData);
} else {
//did the last one so return
return $.when();
}
}
//kick off the ajax calls
runajax();
}
var inputList = new Array();
var outputList = new Array();
.....load +/- 100 values to be processed using ajax into array inputList
requestData();
.....process stuff in array outputList
.....etc
There was my answer with "you're doing it wrong" earlier, but then I just decided to show, how you can do it (almost) right: https://jsfiddle.net/h4ffz1by/
var request_maker = {
working: false,
queue: [],
output: [],
requestData: function(inputList) {
if (request_maker.working == true) {
return false;
}
request_maker.output = [];
request_maker.working = true;
while (inputList.length > 0) {
var data = $.trim(inputList.shift());
request_maker.queue.push(data);
}
console.log(request_maker.queue);
request_maker.doTheJob();
return true;
},
doTheJob: function() {
current_data_to_send = request_maker.queue.shift();
console.log(current_data_to_send);
if (typeof current_data_to_send != 'undefined' && request_maker.queue.length >= 0) {
$.ajax({
url: '/echo/json/',
cache: false,
method: "post",
timeout: 2000,
data: {
requesttype: "getmydata",
email: encodeURIComponent(current_data_to_send)
},
success: function(data, status, xhrobject) {
console.log(xhrobject);
request_maker.handleReturnedData(data);
},
});
} else {
request_maker.working = false;
console.log('all data has been sent');
}
},
handleReturnedData: function(response) {
console.log(response);
response = $.trim(decodeURIComponent(response));
//response= 'Failed';//uncomment to emulate this kind of response
if (response == "Failed") {
$('#fetchupdatestatus').append('There was an error retrieving the data you requested!<br/>');
} else {
$('#fetchupdatestatus').append('The item returned was ' + response + '<br/>');
request_maker.output.push(response);
}
request_maker.doTheJob();
if (request_maker.working == false) {
console.log('all requests have been completed');
console.log(request_maker.output);
}
}
}
inputList = [1, 2, 3, 4, 5, 6];
if (request_maker.requestData(inputList)) {
console.log('started working');
}
if (!request_maker.requestData(inputList)) {
console.log('work in progress, try again later');
}
Note that I've changed request path to jsfiddle's ajax simulation link and replaced html() with append() calls to print text in div. The calls are made and get handled in the same order as it is in inputList, still they don't lock user's browser. request_maker.output's elements order is also the same as in inputList.
Have in mind, that you will need to add error handling too (probably just a function that pushes 'error' string into output instead of result), otherwise any ajax error (403/404/502, etc.) will get it "stuck" in working state. Or you can use complete instead of success and check request status right there.
UPD: Answer to the question: you cannot get both. You either use callbacks and let browser repaint inbetween asynchroneous requests or you make requests synchroneous and block browser untill your code finished working.
UPD2: There actually is some information on forcing redraw, however I don't know if it will work for you: Force DOM redraw/refresh on Chrome/Mac

How do I capture an aborted call or is setting the timeout to 0 correct?

I have a JavaScript client that works in Chrome and Firefox, but fails in IE. Looking at the network trace in the IE debugger it shows that multiple of the AJAX calls have been aborted.
I've been able to get around it by setting the timeout to 0. I'd like to know if this is the correct way to handle my requests being aborted? Basically what could go wrong?
My initial thought was that I should capture and resend on error, and if multiple resubmits do not result in a completed request, finally alert the user. I'd still like to know how to do this even if the setTimeout is the proper way to address my immediate issue.
Also the application will process an excel workbook of addresses, call a web service to add some data to them and then allow the user to download the enhanced file.
This is what I have so far, first in the app.js
var requestWithFeedback = function (args) {
$(".loader").removeClass('hidden');
var oldConfig = args.config || function () { };
args.config = function (xhr) {
xhr.setRequestHeader("Authorization", "Bearer " + localStorage.token);
oldConfig(xhr);
extract: extract;
};
var deferred = m.deferred();
setTimeout(function () { // <== This solved in IE, but is this the way to handle this?
m.request(args).then(deferred.resolve, function(err){
if (err === "Invalid token!"){
m.route('/');
}
})}, 0);
$(".loader").addClass('hidden');
return deferred.promise;
}
From the model.js
app.MarkedAddresses.ProcessAddressBatch = function () {
var requestData = {
Addresses: app.MarkedAddresses.vm.addresses
}
return requestWithFeedback({
method: "POST"
, url: "API/server.ashx"
, data: requestData
, deserialize: function (value) { return value; }
})
.then(function (value) {
var responseJSON = $.parseJSON(value);
$.merge(app.MarkedAddresses.vm.results, responseJSON)
app.MarkedAddresses.vm.currentRecord(app.MarkedAddresses.vm.results.length);
app.MarkedAddresses.vm.progress(Math.max(app.MarkedAddresses.vm.progress(), ~~(app.MarkedAddresses.vm.currentRecord() / app.MarkedAddresses.vm.totalRecords() * 100)));
m.redraw(); //Force redraw for progress bar
return value;
},
function (error) { console.log(error) } // <== I thought error would show up here, but I never hit a breakpoint here.
);
}
Added loops
function process_wb(wb) {
app.MarkedAddresses.vm.results.length = 0;
$('.descending').removeClass("descending");
$('.ascending').removeClass("ascending");
app.MarkedAddresses.vm.progress(.1);
m.redraw();
var header = mapHeader(wb);
var addressJSON = to_json(wb, header);
app.MarkedAddresses.vm.totalRecords(addressJSON.length);
for (var i = 0; (i < addressJSON.length + 1) ; i += 1000) {
app.MarkedAddresses.vm.addresses = addressJSON.slice(i, Math.min(((i) + 1000), addressJSON.length));
app.MarkedAddresses.vm.response(new app.MarkedAddresses.vm.processAddressBatch());
}
}
Why isn't the error triggered in the section of the code?
It seems like I should add a deferred section here, but anything I've tried has been a syntax error.

Ajax dojo request locking browser big json

I have a big json data about 40000 item. When I send request to get all, browser is locked process until responce come.
So I am sending request by index and chunk like following.
var index = 0;
var chunk = 500;
var repeat = true;
document.getElementById('loading').style.display='inline-block';
while (repeat == true) {
var requestOptions = {
handleAs: "json",
sync: true,
query: {
page: index,
chunk: chunk
},
};
request.get("domain.com/getdata", requestOptions).then(
function(response) {
array.forEach(response.data, function(item) {
//do something
});
if (response.data.length < chunk) {
repeat = false;
document.getElementById('loading').style.display='inline-block';
}
index = index + 1;
},
function(error) {
repeat = false;
}
);
}
I am sending request to get first 500 record. Than get secont 500 record...
When I sart process, the browser locking. I want to Show loading request but not appearing.
I see in the comments on your question that you've been recommended to use async:true, to which you respond that it is sending requests without getting any response, and always sending the same request parameters.
I think then that you're perhaps a bit unfamiliar with the asynchronous paradigm in Javascript (remember, Ajax means asynchronous Javascript and XML).
First off: async:true is the right way to solve your problem. However, as you've noticed, that alone doesn't fix anything in your code.
Here's a simplified and modified version of your code (don't try this, it doesn't work, it's for explanation purposes only).
var index = 0;
var chunk = 500;
var repeat = true;
while (repeat == true) {
var requestOptions = {
handleAs: "json",
sync: false, // false is default, so this line is redundant
query: { page: index, chunk: chunk },
};
request.get("domain.com/getdata", requestOptions).then(
responseOk, responseError);
}
function responseOk(response) {
//do something..
if (response.data.length < chunk) {
repeat = false;
}
index = index + 1;
}
function responseError(error) {
repeat = false;
}
Here's the kicker: the ´responseOk´ function is never run. Therefore, index is never updated, and repeat is never set to false - in effect making your while loop infinite!
Why is this? The reason is that Javascript's "Ajax" functions (which are wrapped by dojo's request.get() and friends) are asynchronous.
What you are saying in your code (or rather, in my simplified version above) is effectively:
Hey, Javascript, do a GET request to the server. When you are done,
sometime in the future, run this responseOk function (or responseError
on error). In the mean time, while you are doing that, I'll
continue with my while loop.
So the while loop keeps churning out GET requests to the server, with the same index! Since the neverending loop is keeping your Javascript thread busy (you only have one!), the responseOk function isn't allowed to execute (even though the server may have responded).
That said, how can you split your huge JSON array into multiple, subsequent requests?
You can try something like this:
var index = 0,
chunk = 500,
requestOptions = {....};
function handleResponseAndGetNextChunk(response) {
response && array.forEach(response.data, function(item) {
//do something
});
if(response && response.data.length < chunk) {
return;
} else {
requestOptions.page = index++;
request.get("domain.com/getdata", requestOptions).then(
handleResponseAndGetNextChunk, responseError);
}
}
// To start off the sequence of requests:
handleResponseAndGetNextChunk(null);

While loop with jQuery async AJAX calls

The thing:
I have a page, which has to display undetermined number of images, loaded through AJAX (using base64 encoding on the server-side) one by one.
var position = 'front';
while(GLOB_PROCEED_FETCH)
{
getImageRequest(position);
}
function getImageRequest(position)
{
GLOB_IMG_CURR++;
$.ajax({
url: urlAJAX + 'scan=' + position,
method: 'GET',
async: false,
success: function(data) {
if ((data.status == 'empty') || (GLOB_IMG_CURR > GLOB_IMG_MAX))
{
GLOB_PROCEED_FETCH = false;
return true;
}
else if (data.status == 'success')
{
renderImageData(data);
}
}
});
}
The problem is that images (constructed with the renderImageData() function) are appended (all together) to the certain DIV only when all images are fetched. I mean, there is no any DOM manipulation possible until the loop is over.
I need to load and display images one by one because of possible huge number of images, so I can't stack them until they all will be fetched.
Your best bet would be to restructure your code to use async ajax calls and launch the next call when the first one completes and so on. This will allow the page to redisplay between image fetches.
This will also give the browser a chance to breathe and take care of its other housekeeping and not think that maybe it's locked up or hung.
And, use async: 'false' is a bad idea. I see no reason why properly structured code couldn't use asynchronous ajax calls here and not hang the browser while you're fetching this data.
You could do it with asynchronous ajax like this:
function getAllImages(position, maxImages) {
var imgCount = 0;
function getNextImage() {
$.ajax({
url: urlAJAX + 'scan=' + position,
method: 'GET',
async: true,
success: function(data) {
if (data.status == "success" && imgCount <= maxImages) {
++imgCount;
renderImageData(data);
getNextImage();
}
}
});
}
getNextImage();
}
// no while loop is needed
// just call getAllImages() and pass it the
// position and the maxImages you want to retrieve
getAllImages('front', 20);
Also, while this may look like recursion, it isn't really recursion because of the async nature of the ajax call. getNextImage() has actually completed before the next one is called so it isn't technically recursion.
Wrong and wrong. Don't user timers, don't chain them. Look at jQuery Deferred / when, it has everything you need.
var imgara = [];
for (image in imglist) {
imgara[] = ajax call
}
$.when.apply($, imgara).done(function() {
// do something
}).fail(function() {
// do something else
});
Try using setInterval() function instead of while().
var fetch = setInterval(loadImage, 2000);
function loadImage(){
position= new position; //Change variable position here.
getImageRequest(position);
if(!GLOB_PROCEED_FETCH){
clearInterval(fetch);
}
}

AJAX call freezes browser for a bit while it gets response and executes success

I am doing an AJAX call to my webserver which fetches a lot of data. i show a loading image that spins while the ajax call is executed and then fades away.
the thing i have noticed is that all of the browsers on this particular call will make it non-responsive for about 7 seconds. That being said, the loading image is NOT spinning as what i had planned while the fetch was occurring.
I did not know if this was something that happens or if there is a way around to, in a sense cause there to be a fork() so that it does 1 thing, while my loading icon still spins.
THoughts? Ideas?
below is the code as someone wanted to see it:
$("div.loadingImage").fadeIn(500);//.show();
setTimeout(function(){
$.ajax({
type: "POST",
url: WEBSERVICE_URL + "/getChildrenFromTelTree",
dataType: "json",
async: true,
contentType: "application/json",
data: JSON.stringify({
"pText": parentText,
"pValue": parentValue,
"pr_id": LOGGED_IN_PR_ID,
"query_input": $("#queryInput").val()
}),
success: function (result, textStatus, jqXHR) {
//alert("winning");
//var childNodes = eval(result["getChildrenFromTelTreeResult"]);
if (result.getChildrenFromTelTreeResult == "") {
alert("No Children");
} else {
var childNodes = JSON.parse(result.getChildrenFromTelTreeResult);
var newChild;
//alert('pText: '+parentText+"\npValue: "+parentValue+"\nPorofileID: "+ LOGGED_IN_PR_ID+"\n\nFilter Input; "+$("#queryInput").val() );
//alert(childNodes.length);
for (var i = 0; i < childNodes.length; i++) {
TV.trackChanges();
newChild = new Telerik.Web.UI.RadTreeNode();
newChild.set_text(childNodes[i].pText);
newChild.set_value(childNodes[i].pValue);
//confirmed that newChild is set to ServerSide through debug and get_expandMode();
parentNode.get_nodes().add(newChild);
TV.commitChanges();
var parts = childNodes[i].pValue.split(",");
if (parts[0] != "{fe_id}" && parts[0] != "{un_fe_id}") {
newChild.set_expandMode(Telerik.Web.UI.TreeNodeExpandMode.ServerSide);
}
}
}
//TV.expand();
//recurseStart(TV);
},
error: function (xhr, status, message) {
alert("errrrrror");
}
}).always(function () {
$("div.loadingImage").fadeOut();
});
},500);
A corworker of mine noticed this issue, and suggested i add a setTimeout(function(){..},500); but it does not fix the issue at hand, so it will most likely be removed.
Since JavaScript is single threaded, a lot of sync processing will hang up the event queue and prevent other code from executing. In your case, it's the for-loop thats locking up the browser while it's executing.
What you can try is putting all your iterations into your event queue.
for (var i = 0 ; i < childNodes.length ; i = i + 1) {
(function(i) {
setTimeout(function(i) {
// code-here
}, 0)
})(i)
}
This should space out the processing and not force the browser to finish them all at once. The self executing function is there to create a closure to hold on to the value of the loop counter i.

Categories