Is it possible to stop the execution of a previous event when the event is called again?
To clarify, I have a button <button onclick='load()'>load</button> that calls a load() function which gets an array, processes each element and displays it in a list <ul id='main'></ul>
function load(event) {
$("#main").empty(); //empty old elements
$.get("load.php", '', function (data) {
var arr = JSON.parse(data);
for (i = 0; i < arr.length; ++i) {
process(arr[I]); //process and append to #main
}
});
}
Problem is, that if I click the button again while its still putting the elements into the array, I get the new list plus the rest of the old list.
Is there a way to stop the first event while its still executing but still execute the second event?
You should try this:
var xhr;
function load(ev){
if(ev.eventPhase === 2){
if(xhr)xhr.abort();
$('#main').empty();
xhr = $.get('load.php', function(data){
var a = JSON.parse(data);
for(var i=0,l=a.length; i<l; i++){
process(a[i]);
}
});
}
}
I can be wrong, but...
var req = $.ajax({
$("#main").addEventListener("click",()=>{req.abort()})
...
...
$("#main").removeEventListener("click",()=>{req.abort()})
});
As noted, you can stop the event by setting a flag and checking it, but a better approach would simply be to assign the new value directly. If your code works it means JSON.parse is returning an array already.
That means
"use strict";
(function () {
function load(event) {
$("#main").empty();
$.get("load.php", '', function (data) {
process = JSON.parse(data);
$("#main").whateverMethodFillsTheElement(process);
});
}());
Also, when writing asynchronous JavaScript code that makes HTTP requests, promises are preferred to callbacks. Since $.get returns a Promise you can write
"use strict";
(function () {
function load(event) {
$("#main").empty();
$.get("load.php")
.then(function (data) {
var items = JSON.parse(data);
$("#main").whateverMethodFillsTheElement(items);
});
}
}());
As discussed in comments, the aim is to use each item in another request which provides the actual value to add to 'main'. So loading data triggers an asynchronous call for each loaded item.
To accommodate this, we need to determine a key field that we can use to track each item so we do not append existing items to the list. We will call this field id for the sake of exposition.
"use strict";
(function () {
var allItems = [];
function load(event) {
$("#main").empty();
$.get("load.php")
.then(function (data) {
return JSON.parse(data);
})
.then(function (items) {
items.forEach(item => {
processItem(item)
.then(function (processed) {
var existingItem = allItems.filter(i => i.id === item.id)[0];
if(existingItem) {
var existingIndex = allItems.indexOf(existingItem);
allItems[existingIndex] = processed;
}
else {
allItems.push(processed);
}
});
});
});
}
}());
Ok, seems like it's not possible to stop an Ajax success function after it began executing or to stop a past event without aborting the current one.
But the following solution worked for me so I figured I'll post it here:
var num = 0;
function load() {
var curNum = ++num;
$("#main").empty();
$.get("load.php", '', function (data) {
var arr = JSON.parse(data);
for (i = 0; i < arr.length; ++i) {
process(arr[i], curNum);
}
});
}
function process(item, curNum) {
if(curNum === num) { //don't process if a new request has been made
//get 'data' based on 'item'...
if(curNum === num) { //check again in case a new request was made in the meantime
$("#main").append(data);
}
}
}
I appreciate everyone's help.
Related
I am trying to use jQuery's $.when.apply to wait for an unknown number of requests to finish before calling the next function (loadTab). I thought I was using it correctly, but it is definitely not waiting for all the requests to finish before loadTab() is called, so I'm not sure what is wrong. Here is my code:
function update(changes) {
var deferreds = [];
// must loop over changes and call individual requests
for(var i = 0; i < changes.length; i++) {
var direction = changes[i]['direction'];
var data = changes[i]['data'];
if(direction == 'add') {
deferreds.push[add(data)];
}
else if(direction == 'edit') {
deferreds.push[edit(data)];
}
else if(direction == 'delete') {
deferreds.push[delete(data)];
}
}
return $.when.apply($, deferreds); // this when is resolving too soon
}
function add(data) {
return $.ajax({
url: 'add',
data: data,
method: 'post',
error: ajaxErrorFcn
})
.then(function(response) {
handleTimeout(response);
});
}
function edit(data) {
return $.ajax({
url: 'edit',
data: data,
method: 'post',
error: ajaxErrorFcn
})
.then(function(response) {
handleTimeout(response);
});
}
function delete(data) {
return $.ajax({
url: 'delete',
data: data,
method: 'post',
error: ajaxErrorFcn
})
.then(function(response) {
handleTimeout(response);
});
}
// this is the sequence of events I'm trying to sort out
showLoad('#container');
var changes = buildChangesArray();
update(changes)
.then(function(response) {
if(handleTimeout(response)) {
// this is executing before the requests triggered by update() are complete
return loadTab()
.then(function(response) {
// do some other stuff
});
}
})
.then(function(response) {
hideLoad('#container');
});
Update:
The original issue was resolved (There was a typo in my calls to .push(), used brackets instead of parentheses), but now I have a new issue with this code. I need to modify the update() function to run the delete actions first, and then run the add and edit actions. This is what I have, but now I am seeing the add and edit actions start to run before the delete actions finish:
function update(changes) {
var deferreds = [];
var deletes = [];
// must loop over changes and call individual requests
for(var i = 0; i < changes.length; i++) {
var direction = changes[i]['direction'];
var data = changes[i]['data'];
if(direction == 'add') {
deferreds.push(add(data));
}
else if(direction == 'edit') {
deferreds.push(edit(data));
}
else if(direction == 'delete') {
deletes.push(delete(data));
}
}
// we need to perform all the delete operations first, then the adds/edits
return $.when.apply($, deletes) // this when is resolving too soon
.then(function(response) {
return $.when.apply($, deferreds);
});
}
Well, looks like I found my own solution :)
I think the problem was that when pushing the asynchronous calls to add(), edit(), and delete() into the arrays, they are also getting called at that point! This wasn't a problem in the first version because then it didn't matter what order the adds, edits, and deletes were performed in as long as they were all done before loadTab() was called. However, it does pose a problem if all the deletes need to be called before calling any adds or edits, because any adds or edits found in the array before the deletes will start running as soon as they are pushed to the array rather than waiting for the deletes.
To fix this, I changed the code to the following:
function update(changes) {
var loop = function(deleteOnly) {
var array = [];
// must loop over changes and call individual requests
for(var i = 0; i < changes.length; i++) {
var direction = changes[i]['direction'];
var data = changes[i]['data'];
if(direction == 'add' && !deleteOnly) {
array.push(add(data));
}
else if(direction == 'edit' && !deleteOnly) {
array.push(edit(data));
}
else if(direction == 'delete' && deleteOnly) {
array.push(delete(data));
}
}
return array;
};
// we need to perform all the delete operations first
return $.when.apply($, loop(true)) // true means only get the deletes
.then(function(response) {
return $.when.apply($, loop(false)); // false means only get the adds/edits
});
}
So the requests are still starting running as soon as they are pushed into the array, but this way we can separate the deletes to makes sure they finish first.
I have an function inside it i am using $.each method. I want to call another function alertMsg() after $.each completely executed. But when i use breakpoints i can see that before finishing the $.each method it executes the alertMsg function. why? how to solve it.
function test(hospitalJson,blockJson){
$.each(hospitalJson.organisationUnits, function (i, curr_hos) {
if (curr_hos.id == orgUnit.id) {
var stringPath=[];
stringPath= curr_hos.path.split("/");
outerloop:
for(var i=0;i<stringPath.length;i++){
for(var j=0;j<blockJson.length;j++){
if(stringPath[i]==blockJson[j].id){
document.getElementById('blockID').innerHTML = blockJson[j].id;
break outerloop;
}
}
}
// to get district name
var districtNameURL="../api/organisationUnits.json?fields=name&filter=id:in:[" + curr_hos.path.split("/")[4] + "]" ;
$.get(districtNameURL,function(district){
districtName=district.organisationUnits[0].name;
console.log(districtName);
document.getElementById('districtID').innerHTML = districtName;
});
alertMsg = 1;
return false;
}
});
//this message execute before finishing $.each
alert(alertMsg);
}
Due to the fact that $.each has multiple AJAX calls inside, you need to create an array containing Promise objects that need to be resolved . Since you may not know the exact size of the parsed JSON object and jQuery $.when cannot handle arrays you need to extend it's functionality.
function test(hospitalJson, blockJson) {
var deferreds = [];
$.each(hospitalJson.organisationUnits, function(i, curr_hos) {
//...
deferreds.push(
$.get(districtNameURL, function(district) {
districtName = district.organisationUnits[0].name;
console.log(districtName);
document.getElementById('districtID').innerHTML = districtName;
}));
}
return deferreds;
});
}
var resolveData = test(hospitalJson, blockJson);
$.when.apply(null, resolveData).done(function() {
alert(alertMsg);
});
JSfiddle demo
Change:
$.get(districtNameURL,function(district){
districtName=district.organisationUnits[0].name;
console.log(districtName);
document.getElementById('districtID').innerHTML = districtName;
});
To:
$.ajax({
url: districtNameURL,
type: "GET",
async: false
})
.success(function (district) {
districtName = district.organisationUnits[0].name;
console.log(districtName);
document.getElementById('districtID').innerHTML = districtName;
});
This will stop the get action being asynchronous and therefore your logic will be processed in the expected order.
What I am trying to do here are:
Remove all contents in a class first, because every day the events.json file will be updated. I have my first question here: is there a better way to remove all contents from a database class on Parse?
Then I will send a request to get the events.json and store "name" and "id" of the result into a 2D array.
Then I will send multiple requests to get json files of each "name" and "id" pairs.
Finally, I will store the event detail into database. (one event per row) But now my code will terminate before it downloaded the json files.
Code:
function newLst(results) {
var event = Parse.Object.extend("event");
for (var i = 0; i < results.length; i++){
Parse.Cloud.httpRequest({
url: 'https://api.example.com/events/'+ results[i].name +'/'+ results[i].id +'.json',
success: function(newLst) {
var newJson = JSON.parse(newLst.text);
var newEvent = new event();
newEvent.set("eventId",newJson.data.id);
newEvent.set("eventName",newJson.data.title);
newEvent.save(null, {
success: function(newEvent) {
alert('New object created with objectId: ' + newEvent.id);
},
error: function(newEvent, error) {
alert('Failed to create new object, with error code: ' + error.message);
}
});
},
error: function(newLst) {
}
});
}
};
Parse.Cloud.job("getevent", function(request, status) {
var event = Parse.Object.extend("event");
var query = new Parse.Query(event);
query.notEqualTo("objectId", "lol");
query.limit(1000);
query.find({
success: function(results) {
for (var i = 0; i < results.length; i++) {
var myObject = results[i];
myObject.destroy({
success: function(myObject) {
},
error: function(myObject, error) {
}
});
}
},
error: function(error) {
alert("Error: " + error.code + " " + error.message);
}
});
var params = { url: 'https://api.example.com/events.json'};
Parse.Cloud.httpRequest(params).then(function(httpResponse) {
var results = [];
var jsonobj = JSON.parse(httpResponse.text);
for (var i = 0; i < jsonobj.data.length; i++) {
var tmp2D = {"name":"id"}
tmp2D.name = [jsonobj.data[i].name];
tmp2D.id = [jsonobj.data[i].id];
results.push(tmp2D);
}
newLst(results);
}).then(function() {
status.success("run job");
}, function(error) {
status.error(error);
});
});
I think my original answer is correct as a standalone. Rather than make it unreadable with the additional code, here it is made very specific to your edit.
The key is to eliminate passed callback functions. Everything below uses promises. Another key idea is decompose the activities into logical chunks.
A couple of caveats: (1) There's a lot of code there, and the chances that either your code is mistaken or mine is are still high, but this should communicate the gist of a better design. (2) We're doing enough work in these functions that we might bump into a parse-imposed timeout. Start out by testing all this with small counts.
Start with your question about destroying all instances of class...
// return a promise to destroy all instances of the "event" class
function destroyEvents() {
// is your event class really named with lowercase? uppercase is conventional
var query = new Parse.Query("event");
query.notEqualTo("objectId", "lol"); // doing this because the OP code did it. not sure why
query.limit(1000);
return query.find().then(function(results) {
return Parse.Object.destroyAll(results);
});
}
Next, get remote events and format them as simple JSON. See the comment. I'm pretty sure your idea of a "2D array" was ill-advised, but I may be misunderstanding your data...
// return a promise to fetch remote events and format them as an array of objects
//
// note - this differs from the OP data. this will evaluate to:
// [ { "name":"someName0", id:"someId0" }, { "name":"someName1", id:"someId1" }, ...]
//
// original code was producing:
// [ { "name":["someName0"], id:["someId0"] }, { "name":["someName1"], id:["someId1"] }, ...]
//
function fetchRemoteEvents() {
var params = { url: 'https://api.example.com/events.json'};
return Parse.Cloud.httpRequest(params).then(function(httpResponse) {
var results = [];
var remoteEvents = JSON.parse(httpResponse.text).data;
for (var i = 0; i < remoteEvents.length; i++) {
var remoteEvent = { "name": remoteEvents[i].name, "id": remoteEvents[i].id };
results.push(remoteEvent);
}
return results;
});
}
Please double check all of my work above regarding the format (e.g. response.text, JSON.parse().data, etc).
Its too easy to get confused when you mix callbacks and promises, and even worse when you're generating promises in a loop. Here again, we break out a simple operation, to create a single parse.com object based on one of the single remote events we got in the function above...
// return a promise to create a new native event based on a remoteEvent
function nativeEventFromRemoteEvent(remoteEvent) {
var url = 'https://api.example.com/events/'+ remoteEvent.name +'/'+ remoteEvent.id +'.json';
return Parse.Cloud.httpRequest({ url:url }).then(function(response) {
var eventDetail = JSON.parse(response.text).data;
var Event = Parse.Object.extend("event");
var event = new Event();
event.set("eventId", eventDetail.id);
event.set("eventName", eventDetail.title);
return event.save();
});
}
Finally, we can bring it together in a job that is simple to read, certain to do things in the desired order, and certain to call success() when (and only when) it finishes successfully...
// the parse job removes all events, fetches remote data that describe events
// then builds events from those descriptions
Parse.Cloud.job("getevent", function(request, status) {
destroyEvents().then(function() {
return fetchRemoteEvents();
}).then(function(remoteEvents) {
var newEventPromises = [];
for (var i = 0; i < remoteEvents.length; i++) {
var remoteEvent = remoteEvents[i];
newEventPromises.push(nativeEventFromRemoteEvent(remoteEvent));
}
return Parse.Promise.when(newEventPromises);
}).then(function() {
status.success("run job");
}, function(error) {
status.error(error);
});
});
The posted code does just one http request so there's no need for an array of promises or the invocation of Promise.when(). The rest of what might be happening is obscured by mixing the callback parameters to httpRequest with the promises and the assignment inside the push.
Here's a clarified rewrite:
Parse.Cloud.job("getevent", function(request, status) {
var promises = [];
var params = { url: 'https://api.example.com'};
Parse.Cloud.httpRequest(params).then(function(httpResponse) {
var results = [];
var jsonobj = JSON.parse(httpResponse.text);
for (var i = 0; i < jsonobj.data.length; i++) {
// some code
}
}).then(function() {
status.success("run job");
}, function(error) {
status.error(error);
});
});
But there's a very strong caveat here: this works only if ("// some code") that appears in your original post doesn't itself try to do any asynch work, database or otherwise.
Lets say you do need to do asynch work in that loop. Move that work to a promise-returning function collect those in an array, and then use Promise.when(). e.g....
// return a promise to look up some object, change it and save it...
function findChangeSave(someJSON) {
var query = new Parse.Query("SomeClass");
query.equalTo("someAttribute", someJSON.lookupAttribute);
return query.first().then(function(object) {
object.set("someOtherAttribute", someJSON.otherAttribute);
return object.save();
});
}
Then, in your loop...
var jsonobj = JSON.parse(httpResponse.text);
var promises = [];
for (var i = 0; i < jsonobj.data.length; i++) {
// some code, which is really:
var someJSON = jsonobj.data[i];
promises.push(findChangeSave(someJSON));
}
return Parse.Promise.when(promises);
I have some javascript code that updates some data to a database using a http handler, but this async call is made inside an .each loop. At the end of the loop I make a call to function CancelChanges() that refreshed the page. The problem is that the page seems to refresh before the database is updated. The .each loop seems to finish after the call to CancelChanges(). How can I make sure the page is refreshed after all the async calls are completed in the .each loop?
function SaveChanges() {
if (PreSaveValidation()) {
var allChangesSucceeded = true;
var studioId = $("#param_studio_id").val();
var baseDate = $("#param_selected_month").val().substring(6, 10) + $("#param_selected_month").val().substring(0,2);
var currency = "CAD";
var vacationPct = null;
var gvAdmissible = null;
$(".editable-unsaved").each( function() {
var newSalary = $(this).text();
var disciplineId = $(this).data("disciplineid");
var seniorityId = $(this).data("seniorityid");
var handlerCommand = "";
if ($(this).data("valuetype") === "inflated") {
handlerCommand = "AddAverageSalary";
} else if ($(this).data("valuetype") === "actual") {
handlerCommand = "UpdateAverageSalary";
}
$.get("WS/AverageSalary.ashx", { command: handlerCommand, studio_id: studioId, discipline_id: disciplineId, seniority_id: seniorityId, base_date: baseDate, currency: currency, salary: newSalary, vacation_pct: vacationPct, gv_admissible: gvAdmissible }).done(function (data) {
if (data != "1") {
$(this).removeClass("editable-unsaved");
allChangesSucceeded = true;
}
else {
alert('fail');
allChangesSucceeded = false;
}
});
});
if(allChangesSucceeded) CancelChanges();
}
}
function CancelChanges() {
var href = window.location.href;
href = href.split('#')[0];
window.location.href = href;
}
You could try using Promises and jQuery $.when
Store a list of the ajax call promises:
var defereds = [];
$(".editable-unsaved").each( function() {
//...
defereds.push($.get("WS/AverageSalary.ashx" /*...*/));
}
$.when.apply($, defereds).done(function() {
CancelChanges();
});
This should, hopefully, wait for all the ajax calls to finish before calling CancelChanges()
I think you need to change your structure a little bit, using a counter and calling CancelChanges when the counter equals the number of calls.
function SaveChanges() {
if (PreSaveValidation()) {
var studioId = $("#param_studio_id").val();
var baseDate = $("#param_selected_month").val().substring(6, 10) + $("#param_selected_month").val().substring(0,2);
var currency = "CAD";
var vacationPct = null;
var gvAdmissible = null;
var editableUnsaveds = $(".editable-unsaved"); //cache the selector here, because selectors are costly
var numOfGetsReturned = 0;
editableUnsaveds.each( function() {
var newSalary = $(this).text();
var disciplineId = $(this).data("disciplineid");
var seniorityId = $(this).data("seniorityid");
var handlerCommand = "";
if ($(this).data("valuetype") === "inflated") {
handlerCommand = "AddAverageSalary";
} else if ($(this).data("valuetype") === "actual") {
handlerCommand = "UpdateAverageSalary";
}
$.get("WS/AverageSalary.ashx", { command: handlerCommand, studio_id: studioId, discipline_id: disciplineId, seniority_id: seniorityId, base_date: baseDate, currency: currency, salary: newSalary, vacation_pct: vacationPct, gv_admissible: gvAdmissible }).done(function (data) {
if (data != "1") {
$(this).removeClass("editable-unsaved");
}
else {
alert('fail');
}
if(editableUnsaveds.length === ++numOfGetsReturned){
CancelChanges(); //now it should call when the final get call finishes.
}
});
});
}
}
function CancelChanges() {
var href = window.location.href;
href = href.split('#')[0];
window.location.href = href;
}
I'd use promises. The q library is my favorite way to implement them. But since you're using JQuery, I'd recommend following a similar approach to what I outline below, but using $.when, instead of q.allSettled
I often use promises when scraping tons of websites at once -- I need to iterate through a long list of websites, make requests for content, and do something with the content when the requests return. The last thing I want to do is send requests one at a time, handling each one as it returns.
In the abstract, that looks something like this:
function scrapeFromMany() {
var promises = [];
_.forEach(urls, function(url) {
// this makes the request
var promise = scraper(url);
// this stores the promise with the others you iterate through
promises.push(promise);
});
q.allSettled(promises).then(function(res) {
// this function is executed when all of the promises (requests) have been resolved
console.log("Everything is done -- do something with the results.", res);
});
}
Fwiw, promises aren't that easy to grok if you've never used them. If that's the case, plan on spending some time getting up to speed with the concepts. They'll change (for the much much better) the way you write async javascript, and they really are the blessed path with these sorts of operations.
Asynchronously call your check function within the "done" function handler. Keep track of how many requests have completed, and only do your processing once that's equal to the total number of expected requests.
if (PreSaveValidation()) {
var allChangesSucceeded = true;
var length = $(".editable-unsaved").length;
var completedCount = 0;
// ...
$(".editable-unsaved").each( function() {
// ...
$.get("WS/AverageSalary.ashx", data).done(function (data) {
completedCount++;
if (data != "1") {
$(this).removeClass("editable-unsaved");
// don't set all changes succeeded to true here
}
else {
alert('fail');
allChangesSucceeded = false;
}
isComplete(length, completedCount, allChangesSucceeded);
});
});
}
function isComplete(totalLength, currentLength, allChangesSucceeded) {
if (currentLength == totalLength) {
// should this be !allChangesSucceeded?
if (allChangesSucceeded) CancelChanges();
}
}
This happens because you are not waiting for the requests to complete to proceed with the loop.
To achieve so you have to set the "async" flag to false.
The call to the server should be like this:
$.ajax({
url: "WS/AverageSalary.ashx",
async: false,
data:{ command: handlerCommand, studio_id: studioId, discipline_id: disciplineId, seniority_id: seniorityId, base_date: baseDate, currency: currency, salary: newSalary, vacation_pct: vacationPct, gv_admissible: gvAdmissible },
success: function (data) {
if (data != "1") {
$(this).removeClass("editable-unsaved");
allChangesSucceeded = true;
}
else {
alert('fail');
allChangesSucceeded = false;
}
}
});
This is my first week in node so I'm sorry if this is a no brainier.
The code works and does what it should. But I can't figure out how to match the name (url) that starts http.get whit the result it gets from the website.
I found this witch is almost like my problem, except this is a premade function so I can't edit the function and add a callback.
variable scope in asynchronous function
If I could run this code synchronous or make a callback in the http.get function it would all be good. But I don't have the skills and don't know if you even can do it.
Thanks
- Robin.
http = require('http');
function download(name) {
//name is an array whit csgo items names.
for (var i = 0; i < name.length; i++) {
var marketHashName = getGoodName(name[i]);
var url = 'http://steamcommunity.com/market/priceoverview/?currency=1&appid=730&market_hash_name=' + marketHashName;
http.get(url, function (res) {
var data = "";
res.on('data', function (chunk) {
data += chunk;
});
res.on("end", function () {
data = JSON.parse(data);
var value= 0;
//get the value in the json array
if(data.median_price) {
value = data.median_price;
}else{
value = data.lowest_price;
}
value = value.substr(5);
console.log("WEAPON",value);
//callback whit name/link and value?
//callback(name,value);
});
}).on("error", function () {
});
}
}
You can just add a callback argument and then call it with the final data. And, if you want to pass to the callback the particular marketHashName that was being processed, then you can create a closure to capture that uniquely for each time through the for loop:
http = require('http');
function download(name, callback) {
//name is an array whit csgo items names.
for (var i = 0; i < name.length; i++) {
var marketHashName = getGoodName(name[i]);
// create closure to capture marketHashName uniquely for each
// iteration of the for loop
(function(theName) {
var url = 'http://steamcommunity.com/market/priceoverview/?currency=1&appid=730&market_hash_name=' + marketHashName;
http.get(url, function (res) {
var data = "";
res.on('data', function (chunk) {
data += chunk;
});
res.on("end", function () {
data = JSON.parse(data);
var value= 0;
//get the value in the json array
if(data.median_price) {
value = data.median_price;
}else{
value = data.lowest_price;
}
value = value.substr(5);
console.log("WEAPON",value);
// now that the async function is done, call the callback
// and pass it our results
callback(theName, value, data);
});
}).on("error", function () {
});
})(marketHasName);
}
}
// sample usage:
download("whatever", function(name, value, data) {
// put your code here to use the results
});
FYI, you may find that the request module which is a higher level set of functionality on top of the http module will save you some work.