How can I run ajax for an array of objects? - javascript

I'm not sure if this is an efficient way to use ajax but I am looping through an array of information using a for loop:
loadProfiles.js
var tempString = "";
var searchPeople = function(sv){
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function(){
if(xhttp.readyState == 4 && xhttp.status == 200){
tempString = xhttp.responseText;
loadPeople(tempString, sv);
}
}
var searchvalue = sv;
searchvalue = searchvalue.join(" ");
xhttp.open("GET", "php/searchProfiles.php?searchvalue=" + searchvalue, true);
xhttp.send();
}
var loadPeople = function(people, sv){
loadedPeople = [];
var normList = people.split(",");
var list = people.toLowerCase().split(",");
list.splice(list.length - 1, 1);
var zsearch = sv;
for(var i = 0; i < list.length; i++){
loadedImageId[i] = list[i].split("_")[1];
if(loadedImageId[i] == 0){
loadedImageId[i] = "images/GrayProfilePic.png";
}
else{///////////////////////////////////This is what I need to fix
var grabPic = new XMLHttpRequest();
grabPic.onreadystatechange = function(){
if(grabPic.readyState == 4 && grabPic.status == 200){
console.log("ready to go");
loadedImageId[i] = grabPic.responseText;
if(loadedImageId[i] == "Error1"){
loadedImageId[i] = "images/GrayProfilePic.png";
}
}
}
grabPic.open("GET", "php/grabProfPics.php?imageid=" + loadedImageId[i], true);
grabPic.send();
}//////////////////////////////////////////////
list[i] = list[i].split("_")[0];
for(var j = 0; j < zsearch.length; j++){
if(list[i].indexOf(zsearch[j]) > -1){
if(loadedPeople.indexOf(list[i]) == -1){
if(loadedPeople.indexOf(normList[i].split("_")[0]) == -1){
loadedPeople.push(normList[i].split("_")[0]);
}
}
}
}
}
console.log(loadedPeople);
console.log(loadedImageId);
}
searchProfiles.php
$query = "SELECT username, imageid FROM `memberHandler`";
$result = mysqli_query($connect, $query) or die("Could not query");
while($row = mysqli_fetch_assoc($result)){
echo $row['username'] . "_" . $row['imageid'] . ",";
}
grabProfPics.php
$query = "SELECT image, mime_type FROM memberProfilePictures WHERE `id`='$imageid'";
$result = mysqli_query($connect, $query);
if(mysqli_num_rows($result) != 0){
$row = mysqli_fetch_assoc($result);
$imagesrc = $row['image'];
$imagesrc = base64_encode($imagesrc);
$imagetype = $row['mime_type'];
echo "data:" . $imagetype . ";base64," . $imagesrc . "";
}
else{
echo "Error1";
}
However the server takes a moment to send it's return code, by which time the variable i in the for loop has long since been changed. Is there a way to do this efficiently and update the array with new information based on what the current array value is? I hope this question makes sense! Thanks for the help =)
Basically I am trying to loop through the image id, and if the id is not zero(meaning they have already set an image for their profile-otherwise they haven't and the id is 0) then it will use ajax to connect to a database of images, grab the image that is relative to the specific ID, and then return the image source as well as update the array. I am sorry I was not more specific in saying this before I just figured i could get away with a more simplified version.

Post Question Update: I wrote this before your pasted all of your code. It still applies, but a few more thoughts:
You seem to be just dumping data into SQL query strings. Little Bobby Tables would be proud, but you should worry about SQL injection.
If you insist on writing the std new XMLHttpRequest(); code yourself (and not use a library, like fetch or jquery), you should wrap that in a function(url,data,method,successCb,errorCb). Libraries will help.
In your marked error code, here's the one that really bites you:
The i has long since moved on and doesn't match the index the call was used to make.
loadedImageId[i] = grabPic.responseText;
Moving on, original aysnc explanation:
Async Code
You're touching on how to handle general asynchronous tasks, which include ajax calls.
There are a host of ways to handle this problem, notably callbacks and promises.
While you could do this in a synchronous way, for anything other than toys or quick hacks, favoring asynchronous data is best.
Example
First, define our service. In this case, it's not leaving our machine, but the principle would be the same. You send something (profile id) and get something back (profile image url).
// After ~1-2 seconds, answer the callback with the evenness of the input
var isEvenAjax = function(num,cb) {
setTimeout(function(){
var isEven = num % 2 === 0;
cb(num + " is " + (isEven ? "Even" : "Odd"));
},(Math.floor(Math.random() * 12) + 3) * 150);
};
You can have different signatures, but this is the crux. You put data into something, wait a while, and get a response.
For example:
isEvenAjax(2,console.log);
isEvenAjax(3,console.log);
isEvenAjax(7,console.log);
Could result in a feedback of:
"7 is Odd"
"2 is Even"
"3 is Odd"
And our test data:
var information = [
10,11,12,
];
Now to send our data to the service and get something back. A simple foreach can handle this (NOTE: this is for simple demo purposes. This could get real messy real fast. Promises are a good way to go).
var getInformationResponses = function(information,cb) {
var responses = [];
information.forEach(function(i){
isEvenAjax(i,function(response){
console.log("Feedback for " + i + " is: " + response);
responses.push({num:i,response:response});
if (responses.length >= information.length){
cb(responses);
}
});
});
};
Note that the function that wraps all of your asynchronous calls is itself asynchronous (and, under our callback style, it needs a 'done' callback).
Breaking this down:
After declaring a responses array (into which we put all the results), loop through all of the information elements:
var responses = [];
information.forEach(function(i){
For every element, make an async call.
isEvenAjax(i,function(response){
For the callback for every element (as in, when data is returned from the long running service), note with console.log (for demo) and push the results and the original data into the responses array. Maintaining the source data may not matter for all apps, but in some cases (like which profile ids correspond to which profile urls) it will. Recall: async calls will never guarantee order.
console.log("Feedback for " + i + " is: " + response);
responses.push({num:i,response:response});
Now, check if the number of responses match the requests. If not, then not all the results are in and do nothing. If so, then trigger the main callback and send the complete data back to the main caller.
if (responses.length >= information.length){
cb(responses);
}
So an example like:
getInformationResponses(information,console.log);
can return something such as:
"Feedback for 10 is: 10 is Even"
"Feedback for 12 is: 12 is Even"
"Feedback for 11 is: 11 is Odd"
[[object Object] {
num: 10,
response: "10 is Even"
}, [object Object] {
num: 12,
response: "12 is Even"
}, [object Object] {
num: 11,
response: "11 is Odd"
}]
Promises
This exercise is purely intended to explore how asynchronous calls can be handled and wouldn't do well in production. Problems like error handling (ajax calls will fail) aren't addressed here.
As mentioned by CallMeNorm, promises can be great. I don't have time to cover them now.

As it stands the example you have is going to throw some errors
information[i] does not have a doAjaxstuff method. However, what I think you're trying would be easily done with Promises which are native in modern browsers and even jquery 3.0 has a compliant implementation. In that case, you could do something like:
var promises = information.map(function(piece, index) {
//doAjaxstuff must should return a promise
return Promise.resolve([index, doAjaxstuff(piece)]);
});
var inOrderPromises = promises.reduce(function(state, value) {
return state[value[0], value[1]];
}, []);
});
Promises.all(inOrderPromises)
.then(function(inOrderValues) {
//doYourThing
});

Ajax calls are asynchronous, which mean they will not wait for the for loop to finish. If you want to "pause" the iteration and only resume when the ajax call returns you have to set it synchronous.
You can do it by adding async: false
for more information check jquery docs

Related

nodelist sometimes loads after the function is finished

I have an issue where I sometimes am able to load the nodelist before it is being called but at the same time it sometimes loads after it is being called(Causing an error of the list being undefined).
This is what I wish would appear all the time
Sorry, this is the right image now. This is the error I receive sometimes.
I believe this is the issue but I do not know how to fix it
I have done some searching online and I think it is related to the code being async or synchrous..(I have not learned about this so I am unsure if I am correct). Here's my code. Context: the getNeighbourhoodData() is being onloaded to the body of my html page.
function getNeighbourhoodData(){
var request = new XMLHttpRequest();
request.open('GET', neighbourhood_url, true);
//This function will be called when data returns from the web api
request.onload = function() {
//get all the restaurant records into our neighbourhood array
neighbourhood_array = JSON.parse(request.responseText);
//get User data
displayNeighbourhoods();
};
//This command starts the calling of the restaurant web api
request.send();
}
function displayNeighbourhoods() {
var list = document.getElementsByName("neiList");
console.log(list);
num=0;
alphabet_array=["A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Y","Z"];
console.log(alphabet_array);
for (var count = 0; count < neighbourhood_array.length; count++) {
var neighbourhood = neighbourhood_array[count].Neighbourhood;
if(neighbourhood_array[count].Neighbourhood.startsWith(alphabet_array[num])== true ){
var cell = '<li><a class="a--grey" href="/restByNeighbourhood.html" onclick="getName(this)" name="Paya Lebar">'+ neighbourhood +'</a></li>';
list[num].insertAdjacentHTML('beforeend', cell);
if(count >= neighbourhood_array.length-1 && num <= 25){
num+=1;
count=-1;
console.log(num);
}
}
else if(count >= neighbourhood_array.length - 1 && num <= 25){
num+=1;
count=-1;
console.log(num);
}
else if(num >= 26){
break;
}
else{
continue;
}
}
}
JavaScript is Single threaded, which means only one thing can happen at a time. However, with async calls you can "act" like a multy-threaded language.
For example the build-in fetch() funktion returns a Promise that you can await.
async function loadURLodContent() { //
const result = await fetch(/* url-path */);
}
So you can await Promises and write async funktions that return promises.
But this topic isnt an easy one. I'd really recomend getting into Promises and Async calls as soon as possible because you're gonna encounter them if you develop in the Web sooner or later.
But to your Problem.... at least from my point of view you're not giving enough information. Tracer69 hase a good proposal for that in the comments.

Twitch TV JSON API Issue

So,I am trying to use the twitch API:
https://codepen.io/sterg/pen/yJmzrN
If you check my codepen page you'll see that each time I refresh the page the status order changes and I can't figure out why is this happening.
Here is my javascript:
$(document).ready(function(){
var ur="";
var tw=["freecodecamp","nightblue3","imaqtpie","bunnyfufuu","mushisgosu","tsm_dyrus","esl_sc2"];
var j=0;
for(var i=0;i<tw.length;i++){
ur="https://api.twitch.tv/kraken/streams/"+tw[i];
$.getJSON(ur,function(json) {
$(".tst").append(JSON.stringify(json));
$(".name").append("<li> "+tw[j]+"<p>"+""+"</p></li>");
if(json.stream==null){
$(".stat").append("<li>"+"Offline"+"</li>");
}
else{
$(".stat").append("<li>"+json.stream.game+"</li>");
}
j++;
})
}
});
$.getJSON() works asynchronously. The JSON won't be returned until the results come back. The API can return in different orders than the requests were made, so you have to handle this.
One way to do this is use the promise API, along with $.when() to bundle up all requests as one big promise, which will succeed or fail as one whole block. This also ensures that the response data is returned to your code in the expected order.
Try this:
var channelIds = ['freecodecamp', 'nightblue3', 'imaqtpie', 'bunnyfufuu', 'mushisgosu', 'tsm_dyrus', 'esl_sc2'];
$(function () {
$.when.apply(
$,
$.map(channelIds, function (channelId) {
return $.getJSON(
'https://api.twitch.tv/kraken/streams/' + encodeURIComponent(channelId)
).then(function (res) {
return {
channelId: channelId,
stream: res.stream
}
});
})
).then(function () {
console.log(arguments);
var $playersBody = $('table.players tbody');
$.each(arguments, function (index, data) {
$playersBody.append(
$('<tr>').append([
$('<td>'),
$('<td>').append(
$('<a>')
.text(data.channelId)
.attr('href', 'https://www.twitch.tv/' + encodeURIComponent(data.channelId))
),
$('<td>').text(data.stream ? data.stream.game : 'Offline')
])
)
})
})
});
https://codepen.io/anon/pen/KrOxwo
Here, I'm using $.when.apply() to use $.when with an array, rather than list of parameters. Next, I'm using $.map() to convert the array of channel IDs into an array of promises for each ID. After that, I have a simple helper function with handles the normal response (res), pulls out the relevant stream data, while attaching the channelId for use later on. (Without this, we would have to go back to the original array to get the ID. You can do this, but in my opinion, that isn't the best practice. I'd much prefer to keep the data with the response so that later refactoring is less likely to break something. This is a matter of preference.)
Next, I have a .then() handler which takes all of the data and loops through them. This data is returned as arguments to the function, so I simply use $.each() to iterate over each argument rather than having to name them out.
I made some changes in how I'm handling the HTML as well. You'll note that I'm using $.text() and $.attr() to set the dynamic values. This ensures that your HTML is valid (as you're not really using HTML for the dynamic bit at all). Otherwise, someone might have the username of <script src="somethingEvil.js"></script> and it'd run on your page. This avoids that problem entirely.
It looks like you're appending the "Display Name" in the same order every time you refresh, by using the j counter variable.
However, you're appending the "Status" as each request returns. Since these HTTP requests are asynchronous, the order in which they are appended to the document will vary each time you reload the page.
If you want the statuses to remain in the same order (matching the order of the Display Names), you'll need to store the response data from each API call as they return, and order it yourself before appending it to the body.
At first, I changed the last else condition (the one that prints out the streamed game) as $(".stat").append("<li>"+jtw[j]+": "+json.stream.game+"</li>"); - it was identical in meaning to what you tried to achieve, yet produced the same error.
There's a discrepancy in the list you've created and the data you receive. They are not directly associated.
It is a preferred way to use $(".stat").append("<li>"+json.stream._links.self+": "+json.stream.game+"</li>");, you may even get the name of the user with regex or substr in the worst case.
As long as you don't run separate loops for uploading the columns "DisplayName" and "Status", you might even be able to separate them, in case you do not desire to write them into the same line, as my example does.
Whatever way you're choosing, in the end, the problem is that the "Status" column's order of uploading is not identical to the one you're doing in "Status Name".
This code will not preserve the order, but will preserve which array entry is being processed
$(document).ready(function() {
var ur = "";
var tw = ["freecodecamp", "nightblue3", "imaqtpie", "bunnyfufuu", "mushisgosu", "tsm_dyrus", "esl_sc2"];
for (var i = 0; i < tw.length; i++) {
ur = "https://api.twitch.tv/kraken/streams/" + tw[i];
(function(j) {
$.getJSON(ur, function(json) {
$(".tst").append(JSON.stringify(json));
$(".name").append("<li> " + tw[j] + "<p>" + "" + "</p></li>");
if (json.stream == null) {
$(".stat").append("<li>" + "Offline" + "</li>");
} else {
$(".stat").append("<li>" + json.stream.game + "</li>");
}
})
}(i));
}
});
This code will preserve the order fully - the layout needs tweaking though
$(document).ready(function() {
var ur = "";
var tw = ["freecodecamp", "nightblue3", "imaqtpie", "bunnyfufuu", "mushisgosu", "tsm_dyrus", "esl_sc2"];
for (var i = 0; i < tw.length; i++) {
ur = "https://api.twitch.tv/kraken/streams/" + tw[i];
(function(j) {
var name = $(".name").append("<li> " + tw[j] + "<p>" + "" + "</p></li>");
var stat = $(".stat").append("<li></li>")[0].lastElementChild;
console.log(stat);
$.getJSON(ur, function(json) {
$(".tst").append(JSON.stringify(json));
if (json.stream == null) {
$(stat).text("Offline");
} else {
$(stat).text(json.stream.game);
}
}).then(function(e) {
console.log(e);
}, function(e) {
console.error(e);
});
}(i));
}
});

How to slow down an Ajax call?

I have a function in JS contains a loop, that calls an AJAX call every iteration. The call to inserts checked elements into a DB and returns the results of those elements in the same page in the next section.
The problem I have is that when I check for e.g. 4 checkboxes out of 3 groupes, the only checkboxes of the last group gets added to the page. However, when I use alert(), I can see all elements.
I used setTimeout, but I got error in the code. I also added lines to give more time to AJX call, but the problem remains. So I wonder if there is a solution to slow down the code without using alert().
This is my script:
addAptitudeField : function(currentAutocompleteField, idChamp) {
var currentAutocompleteFieldBind = currentAutocompleteField;
var idChampBind = idChamp;
window.setTimeout(function() {
// Code ...
var paramDwr = {};
var newDivName = "div" + idChamp + lastValueId;
paramDwr[attributs.r_divId] = newDivName;
paramDwr[attributs.r_currentValue] = currentValue;
paramDwr[attributs.r_hiddenIdsField] = hiddenIdsField.id;
paramDwr[attributs.r_lastValueId] = lastValueId;
paramDwr[attributs.r_itemmod] = nbAptitudesCat % 2 == 0;
// setTimeout ( RepertoireDwr.ligneSuppEtSpanMessage, 1000 ) doesn't work
RepertoireDwr.ligneSuppEtSpanMessage(paramDwr, function(ajaxPage) {
divCategorie.update(divCategorie.innerHTML + ajaxPage.texte);
aptitudeAvecDetail.remetsValeursStockees();
var btnSuppression = $(newDivName).getElementsByTagName('img')[0];
btnSuppression.setAttribute("onclick", "formulaireFiche.updateCSS('" + newDivName + "');" + btnSuppression.getAttribute("onclick") + "fiche.updateCategorieSuppressionAptLieeUo(\'divCat" + currentCategorie + "\');"); });
}
//
// alert() : It works in this case.
//
// for (var i=0; i<5000000; i++) ; it doesn't work
}, 400);
}
Thank you in advance for your help and time.
I will likely be downvoted for mentioning this, because it is not a recommended procedure, but I believe every coder should have all facts.
In jQuery AJAX construct, there is option async:false, which will delay the script from continuing UNTIL the AJAX has completed processing. Needless to say, if things go wrong in the AJAX the browser could freeze. A lot depends on who your users are, and amount of traffic -- on a few of my ten-user in-house projects it was an acceptable solution.
$.ajax({
async: false,
type: 'post',
url: 'ajax/ax.php',
data: 'request=',
success: function(d){
if (d.length) alert(d);
}
});
Ref:
What does "async: false" do in jQuery.ajax()?
The better idea, however, is to look into the Promises interface, with methods like .when() and .then()
References:
https://jsfiddle.net/v86bc028/2/
http://jqfundamentals.com/chapter/ajax-deferreds#
http://digitizor.com/jquery-html-callback-function-using-promise/#
how does jquery's promise method really work?
The problem you're running into deals with asynchronous functions, or the A in AJAX. If you don't know what an asynchronous function is, there are many others who can explain it better than I can, so give that a google.
What's happening without the alert() in there is your code makes 4 sever calls, but all 4 get sent out before you get a response to any of them. With the alert() (or setTimeout), you're giving the code time to received each response to a call before the next one is made.
There are several ways you can approach this, the first way is by calling the next call after the first receives a response. The second way is to use an async function to call all 4 at once on different chains(?). I'm not the best at explaining this part, but there's plenty of code to be found on SO and online.
I think you have a more generic problem in your code, since you seem to need to delay your executions to wait till sth. else is finished, instead of getting anounced when it is done.
The line that annoys me most is this one
divCategorie.update(divCategorie.innerHTML + ajaxPage.texte);
what exactly is update doing? How is it implemented?
I assume it does sth. like divCategorie.innerHTML += ajaxPage.texte;
Wich is highly unfavorable, since the browser has to parse and rebuild, whatever there already is in divCategorie.innerHTML.
Just appending the new Markup would be better.
long way short: maybe a good hack would be to insert some hidden node as a placeholder (so you kan keep order, although the AJAX-requests may return in a different order) and replace that node with the real content, as soon as it arrives.
Kind of like this:
addAptitudeField : function(currentAutocompleteField, idChamp) {
var currentAutocompleteFieldBind = currentAutocompleteField;
var idChampBind = idChamp;
//this is done immediately, and therefore preserves the order of the loop,
//without any delays/timeouts
var placeholder = document.createElement("div");
placeholder.className = "placeholder";
placeholder.style.display = "none";
divCategorie.appendChild(placeholder);
window.setTimeout(function() {
// Code ...
var paramDwr = {};
var newDivName = "div" + idChamp + lastValueId;
paramDwr[attributs.r_divId] = newDivName;
paramDwr[attributs.r_currentValue] = currentValue;
paramDwr[attributs.r_hiddenIdsField] = hiddenIdsField.id;
paramDwr[attributs.r_lastValueId] = lastValueId;
paramDwr[attributs.r_itemmod] = nbAptitudesCat % 2 == 0;
// setTimeout ( RepertoireDwr.ligneSuppEtSpanMessage, 1000 ) doesn't work
RepertoireDwr.ligneSuppEtSpanMessage(paramDwr, function(ajaxPage) {
//convert the passed text into a DocumentFragment
var frag = fragment(ajaxPage.texte);
//replacing the placeholder with the fragment
divCategorie.insertBefore(frag, placeholder);
divCategorie.removeChild(placeholder);
aptitudeAvecDetail.remetsValeursStockees();
var btnSuppression = $(newDivName).getElementsByTagName('img')[0];
//this is also pretty horrible to me:
btnSuppression.setAttribute("onclick", "formulaireFiche.updateCSS('" + newDivName + "');" + btnSuppression.getAttribute("onclick") + "fiche.updateCategorieSuppressionAptLieeUo(\'divCat" + currentCategorie + "\');"); });
}
}, 400);
}
I think you should do some major refactoring. And take a look into Promises.
// * -> DocumentFragment
//strings/primitives are parsed as HTML-markup,
//null / undefined is ignored
//Arraylike structures are parsed recursively
var fragment = (function(container){
return function(src){
return reducer(document.createDocumentFragment(), src);
}
function reducer(frag, node){
var i, len, fc, c, r;
if(node === Object(node)){
if("nodeType" in node){
//dom nodes
frag.appendChild(node);
}else{
//Arraylike structures, like NodeLists or jQuery-Objects, or just plain Arrays
for(i = 0, len = ("length" in node && node.length)|0, r = reducer; i < len; (i in node) && r(frag, node[i]));
}
}else if(node != null) {
//strings (all primitives)
for((c=container).innerHTML = node; fc = c.firstChild; frag.appendChild(fc));
}
return frag;
}
})(document.createElement("div"));

How to restructure a long running php process to not time out [duplicate]

This question already has answers here:
How to increase the execution timeout in php?
(14 answers)
Closed 8 years ago.
I have a simple javascript function like so:
$(document).ready(function(){
var request = $.ajax({
url: "read_images.php",
type: "GET",
dataType: "html"
});
request.done(function(msg) {
$("#mybox").html(msg);
document.getElementById('message').innerHTML = '';
});
request.fail(function(jqXHR, textStatus) {
alert( "Request failed: " + textStatus );
});
});
The php script it is calling loops on the contents of a folder, runs some checks, and returns a response. The script is as follows:
//Get all Images from server, store in variable
$server_images = scandir('../images/original');
//Remove first 3 elements, which are not correct
array_shift($server_images);
array_shift($server_images);
array_shift($server_images);
$j = 0;
for($i=0;$i<count($server_images) && $i<3000;$i++) {
$server_image = $server_images[$i];
//Make sure that the server image does not have a php extension
if(!preg_match('/.php/',$server_image)) {
//Select products_id and name from table where the image name is equal to server image name
$query = "SELECT `name`
FROM `images`
WHERE `name` = '$server_image'";
$mro_images = $db->query($query);
$mro_images_row = $mro_images->fetch();
$mro_image = $mro_images_row['name'];
//If no results are found
if(empty($mro_image)) {
$images[$j] = $server_image;
$j++;
}
}
}
It works if the loop is restricted to 2000 iterations but if I try to do e.g. 3000 iterations the result is:
HTTP/1.1 500 Internal Server Error 31234ms
I've tried increasing the php execution limit, but this didn't have any effect as, after contacting my host:
Unfortunately in our environment we don't have any way to increase the loadbalancer timeout beyond 30 seconds
Therefore: How can I restructure this code to avoid hitting the execution time limit?
The below code indicates the basic logic to follow. It isn't tested code and should not be taken as a drop in code example.
Use a javascript loop
Instead of making a slow process slower - write your JavaScript to ask for smaller chunks of data in a loop.
I.e. the js could use a while loop:
$(document).ready(function(){
var done = false,
offset = 0,
limit = 20;
while (!done) {
var url = "read_images.php?offset=" + offset + "&limit=" + limit;
$.ajax({
async: false,
url: url
}).done(function(response) {
if (response.processed !== limit) {
// asked to process 20, only processed <=19 - there aren't any more
done = true;
}
offset += response.processed;
$("#mybox").html("Processed total of " + offset + " records");
}).fail(function(jqXHR, textStatus) {
$("#mybox").html("Error after processing " + offset + " records. Error: " textStatus);
done = true;
});
}
});
Note that in the above example the ajax call is forced to be syncronous. Normally you don't want to do this, but in this example makes it easier to write, and possibly easier to understand.
Do a fixed amount of work per php request
The php code also needs modifying to expect and use the get arguments being passed:
$stuff = scandir('../images/original');
$offset = $_GET['offset'];
$limit = $_GET['limit'];
$server_images = array_slice($stuff, $offset, $limit);
foreach($server_images as $server_image) {
...
}
...
$response = array(
'processed' => count($server_images),
'message' => 'All is right with the world'
);
header('Content-Type: application/json');
echo json_encode($response);
die;
In this way the amount of work a given php request needs to process is fixed, as the overall amount of data to process grows (assuming the number of files in the directory doesn't grow to impractical numbers).
If everything works with 2000 iterations for 3000 iterations try upping the time limit to allow php to execute longer. But under normal circumstances this is not a good idea. Make sure you know what you are doing and have a good reason for increasing the execution time.
set_time_limit ( 60 );
http://www.php.net/manual/en/function.set-time-limit.php
Also this could be due to the script exhausting the amount of memory. Create a file with the phpinfo function in it and then check the value for the memory_limit.
<?php phpinfo(); ?>
Then you can increase the limit by htaccess file. But again make sure you want the script to consume more memory. Be careful.
ini_set('memory_limit', '128M'); #change 128 to suit your needs
Your count($server_images) is probably resulting in an infinite loop.
If count() returns 0, your for loop will never end. So you need to check that first.
//Get all Images from server, store in variable
$server_images = scandir('../images/original');
//Remove first 3 elements, which are not correct
array_shift($server_images);
array_shift($server_images);
array_shift($server_images);
$j = 0;
if(count($server_images) > 0){
for($i=0;$i<count($server_images) && $i<3000;$i++) {
//Do something
}
}

Synchronous query to Web SQL Database

I'm working on a bit of JavaScript that interacts with a client-side SQLite database, via the newish window.openDatabase(...), database.transaction(...) and related APIs. As most of you know when you execute a query in this way it is an asynchronous call, which is typically good. You can make the call and handle the results as appropriate with callbacks.
In my current situation I'm working on an algo for a client that does some hierarchy walking in the locally stored database. The part of the algo I'm having trouble with requires starting at some row, which has a reference to a "parent" (by id) that is another row further up in the table. I have to keep walking up this tree until I reach the root.
The problem is that I'm at a point where I'm not sure how to use an asynchronous style query with a callback to keep feeding the loop parent ids. Ideally I could get the query to block so that I can do it all in the loop. Here's the key parts of my current setup:
for (i in search.searchResults.resultsArray)
{
hierarchyArr = new Array();
pageHierarchyArr = new Array();
id = search.searchResults.resultsArray[i].ID;
while (id != null && id != "")
{
var hierarchySql = "SELECT ID, parentID, type, content FROM content WHERE ID = " + id;
// This is a prettied up call to database.transaction(...)
var rs = db.getRS(hierarchySql);
// Ideally the code below doesn't execute until rs is populated
hierarchyArr.push(rs[0]);
if (rs[0].type == "page")
{
pageHierarchyArr.push(rs[0]);
// Do some additional work
}
id = rs[0].parentID;
}
}
As you might imagine, it doesn't work well. hierarchyArr gets an "undefined" pushed into it, and then the script crashes when it tries to check the type of rs[0].
When I try to set it up with a callback (db.getRSAndCallback(sql, callbackFunc), which I used for the earlier, non-interdependent queries just fine) it's worse: the inner loop takes off like crazy because id isn't getting updated; presumably because the loop is keeping the JavaScript interpreter so busy that it never actually fills rs. In some artificial testing where I forced the inner loop to break after a few iterations all the callbacks started coming through all at the end, after the loop finished.
The "standard" (such as it is right now) at http://dev.w3.org/html5/webdatabase/#synchronous-database-api seems to indicate that there is a synchronous API, but I haven't seen any sign of it on any WebKit based browsers.
Can anyone offer suggestions on how I might either, a. properly formulate these iterative, interdependent queries using callbacks or, b. somehow get the call to actually happen in a synchronous or apparently synchronous manner.
Many thanks in advance for anyone who takes a crack at this seemingly tricky little problem.
Naim
P.S. Here's the client's implementation of db.getRS for reference:
.
.
.
getRS: function(sql)
{
var output = [];
db.database.transaction(function(tx)
{
tx.executeSql(sql, [], function(tx,rs)
{
for(i = 0; i < rs.rows.length; i++)
{
output.push(rs.rows.item(i));
}
},
function(tx, error) { ... }
)});
return output;
},
.
.
.
I used callbacks and a closure to solve a similar problem, consider:
function getFolder(id, callback) {
var data = [];
ldb.transaction(function (tx) {
tx.executeSql('SELECT * FROM folders where id=?',
[id],
function (tx, results) {
if (results.rows && results.rows.length) {
for (i = 0; i < results.rows.length; i++) {
data.push(results.rows.item(i));
}
}
if (typeof(callback) == 'function')
callback(data);
},
function (tx, error) {
console.log(error);
});
});
}
In the continuation of this example, folder has a property parent to define it's relation to other folders. As does a document. The following will get you the path of a document using a closure (success):
function getDocPath(doc, callback) {
var path = [];
var parent = doc.parent;
var success = function(folder) {
var folder = folder[0];
parent = folder.parent;
path.push({'id':folder.id,'name':folder.name});
if (parent != "undefined")
getFolder(parent, success);
else
if ( typeof(callback) == 'function' ) callback(path.reverse());
}
getFolder(parent, success);
}
You could use callbacks with a closure to your stack of remaining queries. Or you could use recursion, passing the stack as parameters.

Categories