Issue with for-loop and standard Javascript AJAX - javascript

I have some issues with a for-loop and AJAX. I need to fetch some information from a database, so I pass the incrementing variable to PHP to grab the information and then send it back. The trouble is that it skips immediately to the maximum value, making it impossible to store any of the information.
I would prefer not to use jQuery. It may be more powerful, but I find Javascript easier to understand.
Here is the JS code:
for (var i = 0; i <= 3; i++) {
var js_var = i;
document.getElementById("link").onclick = function () {
// ajax start
var xhr;
if (window.XMLHttpRequest) xhr = new XMLHttpRequest(); // all browsers
else xhr = new ActiveXObject("Microsoft.XMLHTTP"); // for IE
var url = 'process.php?js_var=' + js_var;
xhr.open('GET', url, false);
xhr.onreadystatechange = function () {
if (xhr.readyState===4 && xhr.status===200) {
var div = document.getElementById('test1');
div.innerHTML = xhr.responseText;
if (js_var == 2) {
var rawr = document.getElementById('test2');
rawr.innerHTML = xhr.responseText;
}
}
}
xhr.send();
// ajax stop
return false;
}
};
Here is the PHP code:
<?php
if (isset($_GET['js_var'])) $count = $_GET['js_var'];
else $count = "<br />js_var is not set!";
$con = mysql_connect("xxx","xxxxx","xxxx");
mysql_select_db('computerparty_d', $con);
$get_hs = mysql_query("SELECT * FROM hearthstone");
$spiller_navn = utf8_encode(mysql_result($get_hs,$count,1));
echo "$spiller_navn";
?>

what you actually are doing is binding an onclick event in your for-loop not sending ajax request, and the other point is, it immediately overrides the previous onclick handler which you have created in the previous iteration.
So if you want to add multiple listeners you should first consider using nested functions and closures to keep the i variable safe for each listener, and then use addEventListener instead of setting the onclick function. Considering these points you can do this instead:
for (var i = 0; i <= 3; i++) {
var clickFunc = (function (js_var) {
return function () {
// ajax start
var xhr;
if (window.XMLHttpRequest) xhr = new XMLHttpRequest(); // all browsers
else xhr = new ActiveXObject("Microsoft.XMLHTTP"); // for IE
var url = 'process.php?js_var=' + js_var;
xhr.open('GET', url, false);
xhr.onreadystatechange = function () {
if (xhr.readyState === 4 && xhr.status === 200) {
var div = document.getElementById('test1');
div.innerHTML = xhr.responseText;
if (js_var == 2) {
var rawr = document.getElementById('test2');
rawr.innerHTML = xhr.responseText;
}
}
}
xhr.send();
// ajax stop
return false;
};
})(i);
document.getElementById("link").addEventListener("click", clickFunc);
}

Be aware that you're making an synchronous AJAX call, which is undesirable (it hangs the browser during the request, which might not end). You may have problems in some browsers with this because you're calling onreadystatechange, that shouldn't be used with synchronous requests.

It looks like you are making the AJAX request with a user click.
for (var i = 0; i <= 3; i++) {
var js_var = i;
document.getElementById("link").onclick
When this JS is executed it will override the "onclick" listener of "link" twice. First time it is assigned for the first time, second time it is overwritten, and the third time it is overwritten again. The result is that when the "link" element is clicked only the last listener exists, resulting in making a single AJAX request for the last configuration.
HTTP request are expensive(time), it might be worth to get all of the data in one request and then use client-side JS to sift through that data accordingly.
jQuery is not more powerful than JS, it is JS with a bunch of wrapper functions. My personal opinion is that once IE9 is no longer relevant, jQuery will be only used by people who know jQuery and not JS.

Related

JS/Ajax/Jquery consecutive requests through linked list [duplicate]

This question already has answers here:
While looping a jquery ajax call
(3 answers)
Closed 1 year ago.
My code:
<script type="text/javascript">
var x = 1;
var data = JSON.parse( document.getElementById('json').innerHTML);
var next = data['next'];
var jsonData = data['data'];
while (next != null) {
x = x+1;
var nextFileUrl = data['next'];
console.log('next:', nextFileUrl);
const xhr = new XMLHttpRequest();
xhr.addEventListener("readystatechange", function () {
if (this.readyState === this.DONE) {
data = this.response;
next = JSON.parse(this.response)['next'];
console.log('newNext:',next);
newJsonData = JSON.parse(this.response)['data'];
console.log('newJsonData:',newJsonData);
jsonData['data'].push(newJsonData);
}
});
xhr.open("GET", nextFileUrl);
xhr.send(null);
}
</script>
Data example:
{
"next" : "path2",
"data" : "some data here"
}
I have multiple JSON files described as above that I need to access consecutively, in a "while next != null" kind of loop, starting with the data that's already in the page. For every call, I need to get the data value, and do something with it, and then make the next call. Currently, my code seems to constantly be logging the result from the first response only.
I'm fairly new to javascript and ajax, and have no experience with jquery. I can't use fetch, as I need the solution to work for all browsers. I'm looking for the best solution to this between JS, Ajax and JQuery if someone can point me in the right direction.
Your sending async requests in a while loop and expecting a value to update each iteration, but that will never work since the loop will not wait for the request to complete. Rather than a loop, you can call a function recursively until the condition is satisfied
var x = 1;
var data = JSON.parse(document.getElementById('json').innerHTML);
var next = data['next'];
var jsonData = data['data'];
function cycle() {
if (!next) return;
x = x + 1;
var nextFileUrl = next;
console.log('next:', nextFileUrl);
const xhr = new XMLHttpRequest();
xhr.addEventListener("readystatechange", function() {
if (this.readyState === this.DONE) {
data = this.response;
next = JSON.parse(this.response)['next'];
console.log('newNext:', next);
newJsonData = JSON.parse(this.response)['data'];
console.log('newJsonData:', newJsonData);
jsonData['data'].push(newJsonData);
// call the function
if (next) cycle();
}
});
xhr.open("GET", nextFileUrl);
xhr.send(null);
}

Adjacently Dependent AJAX (improved)

This question was posted a couple of days ago, but since I'm a nub it was filled with spaghetti code and that sort of thing (please pardon the form handling as well) That aside, I've added some notes and given some context, but the problem still lies in the second AJAX call.
This is the error that Chrome is throwing "Cross origin requests are only supported for protocol schemes: http, data, chrome, chrome-extension, https, chrome-extension-resource."
I have hidden the URL because it contains an API key that I would rather not share.
Any and all criticisms are warmly welcomed
/*
This module will take a user's name, return an ID
then search more stats in the api with the ID.
*/
var search = document.getElementById('search');
search.addEventListener('click', function(){
var demo = document.getElementById('demo');
var player_name = document.getElementById('player_name').value;
var player_id;
// Interpolated API URLs
var name_url = 'URL.pre'+player_name+'URL.end';
var stats_url; //nested in the second ajax call to pass updated player_id
// Get player ID
var xhr = new XMLHttpRequest();
var id_return_text;
xhr.onload = function(){
if(xhr.status === 200) {
id_return_text = JSON.parse(xhr.responseText);
player_id = id_return_text[player_name].id;
demo.innerHTML = id_return_text[player_name].name +', your player ID is: '+player_id;
}
};
xhr.open('GET', name_url, true);
xhr.send();
// Search stats with ID
var xhr_2 = new XMLHttpRequest();
var stats_return_text;
xhr.done = function(){
stats_url = "URL.pre"+player_id+"URL.end";
if(xhr_2.status == 200) {
stats_return_text = JSON.parse(xhr_2.responseText);
demo.innerHTML += stats_return_text['playerStatsSummaries'].playerStatType;
}
};
xhr_2.open("GET",stats_url, true);
xhr_2.send();
});
<div id="container">
<img id="duck" src="duck.png" alt="duck">
<div class="form_wrapper">
<h1 id="app_header">*QUACK* What's Your player ID?</h1>
<form>
<input
type="text"
id="player_name"
placeholder="Summoner Name">
<input type="button" id="search" value="Search">
</form>
</div>
<p id="demo"></p>
</div>
<script type="text/javascript" src="script.js"></script>
So your primary error was that if you need to make CORS requests (or any AJAX requests, really), you need to run the code from a server (even localhost).
Google (and most browsers) will freak out at you if your page's protocol is "file:///" and you're trying to load things from the internet (or vice versa). And "file:///" cannot make requests for other files, either.
Future reference: you also can't make "http" requests from an "https" page.
That out of the way, the second issue (the one that was being hidden by CORS security), is that your AJAX requests are being run in parallel right now.
In order to make this work the way you think it should (after the first one returns, run the second one), you would need to:
move all of the code at the bottom, relating to xhr_2 inside of the xhr.onload
move all of the code inside of xhr.done at the bottom inside of the xhr.onload and replace all of the duplicate information (and use the references to the returned results directly)
This results in something like:
var search = document.getElementById('search');
search.addEventListener('click', function(){
var demo = document.getElementById('demo');
var player_name = document.getElementById('player_name').value;
var player_id;
// Interpolated API URLs
var name_url = 'https://na.api.pvp.net/api/lol/na/v1.4/summoner/by-name/'+player_name+'?api_key=<THIS IS THE API KEY>';
var stats_url; //nested in the second ajax call to pass updated player_id
// Get player ID
var xhr = new XMLHttpRequest();
var id_return_text;
xhr.onload = function(){
if(xhr.status === 200) {
id_return_text = JSON.parse(xhr.responseText);
player_id = id_return_text[player_name].id;
demo.innerHTML = id_return_text[player_name].name +', your player ID is: '+player_id;
// Dropped the XHR_2 stuff here
var xhr_2 = new XMLHttpRequest();
var stats_return_text;
stats_url = "https://na.api.pvp.net/api/lol/na/v1.3/stats/by-summoner/"+player_id+"/summary?season=SEASON2016&api_key=<THIS IS THE API KEY>";
// CHANGED THIS TO BE XHR_2.onload -- IN HERE I KNOW XHR_1 IS ALREADY FINISHED
xhr_2.onload = function(){
if(xhr_2.status == 200) {
stats_return_text = JSON.parse(xhr_2.responseText);
demo.innerHTML += stats_return_text['playerStatsSummaries'].playerStatType;
}
};
xhr_2.open("GET",stats_url, true);
xhr_2.send();
}
};
xhr.open('GET', name_url, true);
xhr.send();
});
That should solve practically all of your woes.
The point of this is that onload is a callback which gets fired long after the program has been run, but xhr_2 was firing immediately after you requested data for xhr_1 (not after it was returning the data).
As such, player_id was undefined.
We want to wait until after we know we have player_id, and we know we have it (or some error) when we're inside the callback to xhr_1.onload.
This gets terribly confusing and very nested, and while I think that Promises and Async Functions / Generators are brilliant solutions for managing that complexity, that's way beyond the scope of this; so instead, I'd suggest looking at some functional composition, to simplify all of this:
function noop () { } // do nothing
function getJSON (url, onload, onerror) {
var xhr = new XMLHttpRequest();
onload = onload || noop; // what I've been given or nothing
onerror = onerror || noop; // " "
xhr.onload = function () {
var data;
var error;
try {
// it's possible for parse to throw on malformed JSON
data = JSON.parse(xhr.responseText);
} catch (e) {
error = e;
}
return error ? onerror(error) : onload(data); // fire one or the other (don't fall into the handler, if onload throws)
};
xhr.onerror = onerror;
xhr.open("GET", url);
xhr.send();
}
// localize URL construction
function buildPlayerIdUrl (name) { return "https://______" + name + "_____"; }
function buildPlayerStatsUrl (id) { return "https://______" + id + "_____"; }
// gets player by name and runs a function after the player has been loaded
function getPlayer (player_name, done, error) {
var id_url = buildPlayerIdUrl(player_name);
function buildPlayer (response) {
var player = response[player_name];
return player;
}
function onload (response) {
done(buildPlayer(response));
}
// Load the JSON, build the player, pass the player to done()
getJSON(url, onload, error);
}
// get stats by player id and runs a function after the stats have been loaded
function getPlayerStats (player_id, done, error) {
var stats_url = buildPlayerStatsUrl(player_id);
function buildStats (response) {
var summary = response.playerStatsSummaries;
return summary;
}
function onload (response) {
done(buildStats(response));
}
// Load the JSON, build the stats, pass the stats to done()
getJSON(stats_url, onload, error);
}
// perform a search by player name
// note: All changes in step-number (1, 2, 3) are asynchronous,
// and thus, must be nested in callbacks of some sort
function search (player_name) {
// Step 1: load the player
getPlayer(playerName, function (player) {
// Step 2a: update the DOM with the player name/id
updatePlayerDom(player);
// Step 2b: load the player stats
getPlayerStats(player.id, function (stats) {
// Step 3: update the DOM with the stats
updateStatsDom(stats);
});
});
}
// player DOM update; keeping it nice and simple
function updatePlayerDom (player) {
document.querySelector(".Player-id").textContent = player.id;
document.querySelector(".Player-name").textContent = player.name;
}
// stats DOM update; same as above
function updateStatsDom (stats) {
document.querySelector(".Player-stats").textContent = stats.playerStatType;
}
// bootstrap yourself to your UI
some_button.onclick = function () {
var player_name = some_input.value;
search(player_name); // kick the whole thing off
};
It's definitely more code, but it's also simpler to make edits to each individual piece, without stepping on the toes of other pieces.
It's (hopefully) also easier to see the _eventual timeline_ of all of the pieces, and how they flow, inside of the search( ) itself.

Why do blank alert statements affect execution of other code?

I'm working on the front page of a website that will have a section that will show news articles. The articles will fade into the next one every 10 seconds. For some reason the code only executes correctly (keep in mind it's not entirely finished so there may be other errors) with the inclusion of a couple alert() statements. These were previously in there just for debugging, but currently, it seems as though they serve some functional purpose. Without them, the code will give different results if any at all.
I'm mainly a Java programmer, so there are probably some idiosyncrasies about JavaScript alert() statements that I'm not familiar with. The other odd thing that I noticed was that at times I would run the code multiple times with no changes and get different results. I used some of the alert() statements in the loadArticles() function to output the value of i and would occasionally get a different result without changing the code. The only idea I have so far is that my computer is taking time to run the statements that is allowing some other process to finish, but there shouldn't be any multi-threading involved.
The init() function is called in onload from the HTML and there's a div with id="news" somewhere in the center of the page.
On top of the main question, extra credit for anyone who could help out with why I'm sometimes not getting the articles to fade in and out. I'm pretty sure it has something to do with the article or container being null, but I haven't had time to get to that yet.
Here's the JavaScript:
var article_count = 0;
var count = 0;
function init() {
getArticleCount();
loadArticles();
changeSlide();
resize();
resize();
}
function getArticleCount() {
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function() {
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
article_count = xmlhttp.responseText;
}
};
xmlhttp.open("GET", "getArticleCount.php", true);
xmlhttp.send();
}
function loadArticles() {
alert();
for(i = 1; i <= article_count; i++) {
alert();
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function() {
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
alert();
var news = document.createElement("iframe");
news.className = "news";
news.src = "articles/" + xmlhttp.responseText;
news.style.zIndex = 0 - i;
var container = document.getElementById("news");
container.appendChild(news);
}
};
alert();
xmlhttp.open("GET", "getArticles.php?q=" + i, true);
xmlhttp.send();
alert();
}
}
function changeSlide() {
var article = document.getElementsByClassName("news")[count];
var interval = setTimeout(function() {
var fadeOut = article.fadeOut(1000, function() {
if(count < article_count) {
count++;
changeSlide();
} else {
count = 0;
resetSlides();
}
});
}, 10000);
}
function resetSlides() {
var articles = document.getElementsByClassName("news");
for(j = 0; j < article_count; j++) {
var fadeIn = articles[j].fadeIn(1000);
}
changeSlide();
}
function resize() {
var body = $(document.body);
var news = $("#news");
$("#menu_left").width((body.outerWidth() - news.outerWidth()) / 2 - 3);
$("#menu_right").width((body.outerWidth() - news.outerWidth()) / 2 - 3);
$("#menu_contact").width(body.outerWidth());
}
There are lots of mistakes in your code, mostly related to the asynchronous nature of Ajax calls. You will need to more about programming with asynchronous operations to write correctly functioning and reliable and consistent code.
alert() statement change the relative timing of asynchronous operations (such as Ajax calls vs. when your other code runs.
In general, stop using alert() statements at all as a debugging tool because it can influence the timing too much. Instead, use console.log() statements. Since console.log() just outputs to the console and does not block execution of the Javascript thread at all, it will not impact the timing of things nearly as much as an alert() statement.
Here's a trivial example to show you how an alert() can change the timing of things:
var img = new Image();
img.src = "http://somedomain.com/myimg.jpg";
alert("Press OK to continue");
if (img.complete) {
console.log("image is done loading");
} else {
console.log("image is not yet done loading");
}
With the alert statement, you will get image is done loading in the console. Without the alert, you will get image is not yet done loading. The alert has changed the flow of your code.
Another thing that can affect the timing of your code is whether resources are in the browser cache or must be loaded over the network. In nearly all cases, properly written code that only uses resources when it knows they have been loaded will continue to work in either case. But, in cases with poorly written code, you may see a different behavior the first time a page is loaded vs. subsequent times when some of the resources are now cached.
To fix your specific code, you need to program asynchronously. That means using completion handlers for asynchronous operations like Ajax calls and calling callbacks to notify other code when asynchronous operation are done.
For example, your getArticleCount() function is asynchronous. It will finish its Ajax operation some time AFTER the getArticleCount() has already returned. You can change it to accept a callback like this:
function getArticleCount(callback) {
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function () {
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
callback(xmlhttp.responseText);
}
};
xmlhttp.open("GET", "getArticleCount.php", true);
xmlhttp.send();
}
And, then you use it like this:
getArticleCount(function(cnt) {
// in here you can use the article count
});
As for your .fadeOut() and .fadeIn() operations, those are not native DOM methods so you can't call them on DOM objects like you are trying to do. It appears that you are attempting to use the jQuery methods with this name. To do, you must load jQuery into your page and then you must create jQuery objects that contain the relevant DOM objects and call .fadeOut() and .fadeIn() on the jQuery objects, not on the DOM objects.
Your loadArticles() function can be fixed by putting the ajax calls inside an internal function inside the method. This will allow each ajax operation you are starting to have it's own separate variables rather than having all of them collide and try to use the same variables. You can do that by using an IIFE (Immediately Invoked Function Expression) inside your for loop like this:
function loadArticles() {
for (i = 1; i <= article_count; i++) {
(function() {
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function () {
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
var news = document.createElement("iframe");
news.className = "news";
news.src = "articles/" + xmlhttp.responseText;
news.style.zIndex = 0 - i;
var container = document.getElementById("news");
container.appendChild(news);
}
};
xmlhttp.open("GET", "getArticles.php?q=" + i, true);
xmlhttp.send();
})();
}
}
Note, because Ajax operations have an indeterminate timing, this code does not guarantee that the items you are adding to the page will be added in any particular order. They are more than likely to be added in the order of your for loop, but that is not guaranteed. If the serve happens to be quicker with one of the requests vs. another, it might finish first and get added to the page first, even though it was not the first one requested.
And, since your resize() function appears to use jQuery, you will find it a TON easier to use jQuery's ajax support rather than coding your own Ajax calls. Plus with jQuery Ajax, you can use the built-in promise interface to make your asynchronous programming and error handling substantially easier.
The reason removing the alert calls in your code made it not work anymore is because your functions getArticleCount(), loadArticles() are making asynchronous requests for data. Alert popups made the program halt, while the AJAX request was off retrieving data and it had returned the result by the time you closed the alert popup.
You can change those 2 functions to execute a callback function as a way to let other functions know that it's finished:
function init() {
getArticleCount(function() {
// finished getting article count
loadArticles(function() {
// finished loading articles
changeSlide();
resize();
resize();
});
});
}
function getArticleCount(callback) {
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function() {
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
article_count = xmlhttp.responseText;
callback(); // done
}
};
xmlhttp.open("GET", "getArticleCount.php", true);
xmlhttp.send();
}
function loadArticles(callback) {
for(i = 1; i <= article_count; i++) {
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function() {
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
var news = document.createElement("iframe");
news.className = "news";
news.src = "articles/" + xmlhttp.responseText;
news.style.zIndex = 0 - i;
var container = document.getElementById("news");
container.appendChild(news);
callback(); // done
}
};
xmlhttp.open("GET", "getArticles.php?q=" + i, true);
xmlhttp.send();
}
}
As a side note, you can debug using the browser Developer Tools, and use console.log() and debugger;
XMLHttpRequest is an asynchronous call.
You make a getArticleCount request in order to get a count of articles.
Then, you have a loop:
for (i = 1; i <= article_count; i++) {
getArticleCount request didn't complete by the time of this loop, and article_count is still equal to zero. You need to use onreadystatechange and move your consequent dependent calls into callbacks:
function init() {
getArticleCount();
}
function getArticleCount() {
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function() {
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
article_count = xmlhttp.responseText;
loadArticles();
}
};
xmlhttp.open("GET", "getArticleCount.php", true);
xmlhttp.send();
}
function loadArticles() {
var container = document.getElementById("news");
for(i = 1; i <= article_count; i++) {
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function() {
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
var news = document.createElement("iframe");
news.className = "news";
news.src = "articles/" + xmlhttp.responseText;
news.style.zIndex = 0 - i;
container.appendChild(news);
}
};
xmlhttp.open("GET", "getArticles.php?q=" + i, true);
xmlhttp.send();
}
}
Anyway, you have some architectural problems:
Making count request apart from data requests is redundant. Moreover, the value could change while you execute your articles.
Making many HTTP requests will cause a huge performance drop.
You need to make a single PHP-file, which will return a JSON array with all articles. Then, you will able to work with its length and every item, and it will work much faster while not causing any synchronization problems.
The alerts are (somewhat) syncronizing your ajax calls. You have dependencies between getArticleCount and loadArticles. The alert() at the top of loadArticles is causing execution to pause until you cancel the alert. In that time the AJAX request to "getArticleCount.php" has completed and assigned a value to article_count.
Without the alerts pausing execution, your code is non-deterministic b/c AJAX calls can not be strung together in a procedural fashion and behave synchronously. You will need to use a functional programming style to get AJAX calls to behave synchronously.
For example you could make write it like this
function init() {
getArticleCount(loadArticles);
resize();
resize();
}
function getArticleCount(callback) {
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function() {
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
//callback will be loadArticles()
callback.call(null, xmlhttp.responseText);
}
xmlhttp.open("GET", "getArticleCount.php", true);
xmlhttp.send();
}
function loadArticles(articleCnt) {
for(i = 1; i <= articleCnt; i++) {
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function() {
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
var news = document.createElement("iframe");
news.className = "news";
news.src = "articles/" + xmlhttp.responseText;
news.style.zIndex = 0 - i;
var container = document.getElementById("news");
container.appendChild(news);
if(i == i){
//modify function to use passed in count instead of global
changeSlides(articleCnt);
}
}
};
xmlhttp.open("GET", "getArticles.php?q=" + i, true);
xmlhttp.send();
}
}
When calling getArticleCount the callback parameter would be loadArticles which has been modified to accept the article_count as a parameter instead of using the global. The above code will fix your issue. You should modify other functions to take a local article count and stop relying on the global.

Why does XMLHttpRequest.abort prevent all Ajax calls except the first inside of a loop

I'm tinkering with achieving threading in Javascript by sending bogus Ajax requests. However I'm surprised at the behavior of my code below.
With xhr.abort() commented out, both "quarter" and "half" get interspersed to the console as I expect. But if xhr.abort() is called, only "quarter" gets sent to the console, and not "half".
Anybody with insight into how XMLHttpRequest's abort method works or situations where it would be appreciated. I've looked at documentation at https://developer.mozilla.org/en/DOM/XMLHttpRequest and it says "abort()....aborts the request if it has already been sent." The key being the *singular* request, not all others, as though XMLHttpRequest is a singleton.
function parallelize(bogusUrl, parallelFns) {
for (var i=0, n=parallelFns.length; i<n; i++) {
var fn = parallelFns[i]
,xhr;
try {xhr = new XMLHttpRequest()}
catch (e) {xhr = new ActiveXObject('Microsoft.XMLHTTP')}
xhr.onreadystatechange = function() {
if (xhr.readyState == 1) {
fn();
//xhr.abort();
}
};
xhr.open('GET', bogusUrl);
xhr.send(null);
}
}
parallelize('bogusUrl', [
function(){setInterval(function(){console.log('quarter')}, 250)},
function(){setInterval(function(){console.log('half')}, 500)}
]);
Blocks do not create a scope in JavaScript; only functions do. Thus, every iteration of your loop is sharing the same "xhr" variable, and its value is overwritten on each iteration.
To fix the problem, you can create a separate function to do the xhr work:
function doXHR(fn) {
var xhr = null;
try {xhr = new XMLHttpRequest()}
catch (e) {xhr = new ActiveXObject('Microsoft.XMLHTTP')}
xhr.onreadystatechange = function() {
if (xhr.readyState == 1) {
fn();
//xhr.abort();
}
};
xhr.open('GET', bogusUrl);
xhr.send(null);
}
Then your loop would look like this:
for (var i=0, n=parallelFns.length; i<n; i++)
doXHR( parallelFns[i] );

Javascript: XHR not handling multiple async requests

Hi I am trying to access one resource multiple times with with different parameters
In this case requesting
var domains = [
'host1',
'host2'
];
var requests = new Array();
for ( i in domains )
{
requests[i]=new request(domains[i]);
}
function request(site)
{
var url = 'get_remote_status.php?host='+site;
var queues = {};
http_request = new XMLHttpRequest();
http_request.open("GET", url, true, 'username', 'password');
http_request.onreadystatechange = function () {
var done = 4, ok = 200;
if (http_request.readyState == done && http_request.status == ok) {
queues = JSON.parse(http_request.responseText);
var queuesDiv = document.getElementById('queues');
print_queues(queues, queuesDiv, site);
}
};
http_request.send(null);
}
However, only one of of the requests is being handled by the code lambda. Chromium reports that both requests have been received and is viewable in the resourced pane.
Also if I make the request synchronous then it works fine. However this is not acceptable to the release code as a request may timeout.
Thanks
Define http_request using var. Currently, you're assigning the XHR object to a global variable. Because of this, your script can only handle one XHR at a time.
Relevant erroneous code:
function request(site)
{
var url = 'get_remote_status.php?host='+site;
var queues = {};
http_request = new XMLHttpRequest();
Proposed change:
function request(site)
{
var url = 'get_remote_status.php?host='+site;
var queues = {};
var http_request = new XMLHttpRequest(); //VAR VAR VAR !!!
When you omit var before a variable, the variable will be defined in the global (window) scope. If you use var before a variable, the variable is defined within the local scope (in function request, in this case).
In fact it is possible to run multiple async xhr call but you have to give them an unique id as parameter to be able to store and load them locally in your DOM.
For example, you'd like to loop on an array and make a ajax call for each object. It's a little bit tricky but this code works for me.
var xhrarray={};
for (var j=0; j<itemsvals.length; j++){
var labelval=itemsvals[j];
// call ajax list if present.
if(typeof labelval.mkdajaxlink != 'undefined'){
var divlabelvalue = '<div id="' + labelval.mkdid + '_' + item.mkdcck + '" class="mkditemvalue col-xs-12 ' + labelval.mkdclass + '"><div class="mkdlabel">' + labelval.mkdlabel + ' :</div><div id="'+ j +'_link_'+ labelval.mkdid +'" class="mkdvalue">'+labelval.mkdvalue+'</div></div>';
mkdwrapper.find('#' + item.mkdcck + ' .mkdinstadivbody').append(divlabelvalue);
xhrarray['xhr_'+item.mkdcck] = new XMLHttpRequest();
xhrarray['xhr_'+item.mkdcck].uniqueid=''+ j +'_link_'+ labelval.mkdid +'';
console.log(xhrarray['xhr_'+item.mkdcck].uniqueid);
xhrarray['xhr_'+item.mkdcck].open('POST', labelval.mkdajaxlink);
xhrarray['xhr_'+item.mkdcck].send();
console.log('data sent');
xhrarray['xhr_'+item.mkdcck].onreadystatechange=function() {
if (this.readyState == 4) {
console.log(''+this.uniqueid);
document.getElementById(''+this.uniqueid).innerHTML = this.responseText;
}
};
}
}
You have to set each xhr object in a global variable object and define a value xhrarray['xhr_'+item.mkdcck].uniqueid
to get its unique id and load its result where you want.
Hope that will help you in the future.

Categories