I am modifying an existing Angular JS application (code can be found here https://github.com/gmathiou/gtfs-manager).
In one file (trips-page-module.js) I have the following code:
$scope.saveTrip = function(itemToSave) {
$scope.computeNewStoptimeSequence();
LoadDataService.saveTrip(itemToSave)
.then(function(data) {
if (data["_id"] != null) {
$scope.tripsForRoutes.push(data);
$scope.active_trip_item = data;
}
})
.then( function (data) {
var dirtyElem = angular.element("ng-dirty");
var toUpdate = [];
for(var i = 0; i <= dirtyElem.length, i++){
toUpdate.push(dirtyElem[i].scope().stoptime);
}
LoadDataService.saveStopTimesForTrip(toUpdate);
})
.then(function(data) {
$scope.editing = false;
});
}
which, basically, creates an array of all the elements that are dirty and have to be updated before passing it to LoadDataService.saveStopTimesForTrip.
Still, when I run it I get that dirtyElem[i] is undefined. I clearly understand this is due to the asynchronous nature of the code.
What I don't get is how can I fix it? How can I wait for all my async function to finish?
Related
I've been trying to code up a search engine using angular js, but I can't copy one array to another. When I initiate the the code (in the service.FoundItems in the q.all then function) new array(foundArray) shows up as an empty array. I searched up how to copy one array to another and tried that method as you can see, but it isn't working. Please help, here is the code, and thank you.
P.S. if you need the html please tell me.
(function () {
'use strict';
angular.module('narrowDownMenuApp', [])
.controller('narrowItDownController', narrowItDownController)
.service('MenuSearchService', MenuSearchService)
.directive('searchResult', searchResultDirective);
function searchResultDirective() {
var ddo = {
templateUrl: 'searchResult.html',
scope: {
items: '<'
},
};
return ddo
}
narrowItDownController.$inject = ['MenuSearchService'];
function narrowItDownController(MenuSearchService) {
var menu = this;
menu.input = "";
menu.displayResult = [];
menu.searchX = function(name) {
menu.displayResult = MenuSearchService.FoundItems(menu.input, name);
console.log(menu.displayResult);
};
}
MenuSearchService.$inject = ['$http', '$q'];
function MenuSearchService($http, $q) {
var service = this;
service.getMatchedMenuItems = function(name, searchTerm) {
var deferred = $q.defer();
var foundItems = [];
var result = $http({
method: "GET",
url: ('https://davids-restaurant.herokuapp.com/menu_items.json'),
params: {
category: name
}
}).then(function (result) {
var items = result.data;
for (var i = 0; i < items.menu_items.length; i++) {
if (searchTerm === ""){
deferred.reject("Please enter search term");
i = items.menu_items.length;
}
else if (items.menu_items[i].name.toLowerCase().indexOf(searchTerm.toLowerCase()) ==! -1){
foundItems.push(items.menu_items[i].name)
deferred.resolve(foundItems);
}else {
console.log("doesn't match search");
}
}
});
return deferred.promise;
};
service.FoundItems = function (searchTerm, name) {
var searchResult = service.getMatchedMenuItems(name, searchTerm);
var foundArray = [];
$q.all([searchResult])
.then(function (foundItems) {
foundArray = foundItems[0].slice(0);
foundArray.reverse();
})
.catch(function (errorResponse) {
foundArray.push(errorResponse);
});
console.log(foundArray);
return foundArray;
};
};
})();
If the goal of the service.FoundItems function is to return a reference to an array that is later populated with results from the server, use angular.copy to copy the new array from the server to the existing array:
service.FoundItems = function (searchTerm, name) {
var foundArray = [];
var searchPromise = service.getMatchedMenuItems(name, searchTerm);
foundArray.$promise = searchPromise
.then(function (foundItems) {
angular.copy(foundItems, foundArray);
foundArray.reverse();
return foundArray;
})
.catch(function (errorResponse) {
return $q.reject(errorResponse);
})
.finally(function() {
console.log(foundArray);
});
return foundArray;
};
I recommend that the promise be attached to the array reference as a property named $promise so that it can be used to chain functions that depend on results from the server.
Frankly I don't recommend designing services that return array references that are later populated with results. If you insist on designing it that way, this is how it is done.
I tried the $promise thing that you recommended. I was wondering how you would get the value from it ie the array.
In the controller, use the .then method of the $promise to see the final value of the array:
narrowItDownController.$inject = ['MenuSearchService'];
function narrowItDownController(MenuSearchService) {
var menu = this;
menu.input = "";
menu.displayResult = [];
menu.searchX = function(name) {
menu.displayResult = MenuSearchService.FoundItems(menu.input, name);
̶c̶o̶n̶s̶o̶l̶e̶.̶l̶o̶g̶(̶m̶e̶n̶u̶.̶d̶i̶s̶p̶l̶a̶y̶R̶e̶s̶u̶l̶t̶)̶;̶
menu.displayResult.$promise
.then(function(foundArray) {
console.log(foundArray);
console.log(menu.displayResult);
}).catch(function(errorResponse) {
console.log("ERROR");
console.log(errorResponse);
});
};
}
To see the final result, the console.log needs to be moved inside the .then block of the promise.
Titus is right. The function always immediately returns the initial value of foundArray which is an empty array. The promise is executed asynchronously so by the time you are trying to change foundArray it is too late. You need to return the promise itself and then using .then() to retrieve the value just like you are currently doing inside the method.
From just quickly looking at your code I think you made have a simple error in there. Are you sure you want
foundArray = foundItems[0].slice(0);
instead of
foundArray = foundItems.slice(0);
I have an CSV parsing function in JavaScript which gets data (movie names) from CSV and gets data using Ajax call in loop.
movies = new Array();
for (var i = 1; i < allData.length; i++) {
var mName = allData[i][0];
var mPath = allData[i][1];
// console.log(decodeURIComponent(mName));
$.get(apiCall, function showData(data) {
if (data) {
mData = data.results;
if (mData.length > 1) {
var urlData = new URLSearchParams(this.url);
var movie_name = urlData.get('query');
movies.push(movie_name);
}
}
})
}
If data got more then one record for any movie it will save it as a conflict in array.
Problem is, I can access movies array inside inner if (but it is in iteration so I can't use that) and at loop end it is not accessible. How can I access that?
You should not make api calls inside a for loop. Instead do this,
movies = new Array();
function makeApiCallForEntireArray(index, arr, cb){
if(arr.length == index){
cb(true);
return;
}
$.get(apiCall, function showData(data) {
if (data) {
mData = data.results;
if (mData.length > 1) {
var urlData = new URLSearchParams(this.url);
var movie_name = urlData.get('query');
movies.push(movie_name);
}
}
makeApiCallForEntireArray(index+1, arr, cb);
})
}
makeApiCallForEntireArray(0, allData, function(){
//api calls finished
//movie accesssible here with all the data
});
You will not be able to access the content added in movies array at the end of the loop because ajax requests are still in progress. You need to do this some other way so that you can be sure that its end of asynch ajax calls.
Im going to use the answer of #Jaromanda X in my question here Can't get the summation in for loop
Promise.all(allData.map(function(d) {
return $.get(apiCall, function showData(data){
return data.results;
});
})).then(function(res) {
//push your movies here...the result of your apiCall is inside the res variable
});
I have a similar question here, but I thought I'd ask it a different way to cast a wider net. I haven't come across a workable solution yet (that I know of).
I'd like for XCode to issue a JavaScript command and get a return value back from an executeSql callback.
From the research that I've been reading, I can't issue a synchronous executeSql command. The closest I came was trying to Spin Lock until I got the callback. But that hasn't worked yet either. Maybe my spinning isn't giving the callback chance to come back (See code below).
Q: How can jQuery have an async=false argument when it comes to Ajax? Is there something different about XHR than there is about the executeSql command?
Here is my proof-of-concept so far: (Please don't laugh)
// First define any dom elements that are referenced more than once.
var dom = {};
dom.TestID = $('#TestID'); // <input id="TestID">
dom.msg = $('#msg'); // <div id="msg"></div>
window.dbo = openDatabase('POC','1.0','Proof-Of-Concept', 1024*1024); // 1MB
!function($, window, undefined) {
var Variables = {}; // Variables that are to be passed from one function to another.
Variables.Ready = new $.Deferred();
Variables.DropTableDeferred = new $.Deferred();
Variables.CreateTableDeferred = new $.Deferred();
window.dbo.transaction(function(myTrans) {
myTrans.executeSql(
'drop table Test;',
[],
Variables.DropTableDeferred.resolve()
// ,WebSqlError
);
});
$.when(Variables.DropTableDeferred).done(function() {
window.dbo.transaction(function(myTrans) {
myTrans.executeSql(
'CREATE TABLE IF NOT EXISTS Test'
+ '(TestID Integer NOT NULL PRIMARY KEY'
+ ',TestSort Int'
+ ');',
[],
Variables.CreateTableDeferred.resolve(),
WebSqlError
);
});
});
$.when(Variables.CreateTableDeferred).done(function() {
for (var i=0;i < 10;i++) {
myFunction(i);
};
Variables.Ready.resolve();
function myFunction(i) {
window.dbo.transaction(function(myTrans) {
myTrans.executeSql(
'INSERT INTO Test(TestID,TestSort) VALUES(?,?)',
[
i
,i+100000
]
,function() {}
,WebSqlError
)
});
};
});
$.when(Variables.Ready).done(function() {
$('#Save').removeAttr('disabled');
});
}(jQuery, window);
!function($, window, undefined) {
var Variables = {};
$(document).on('click','#Save',function() {
var local = {};
local.result = barcode.Scan(dom.TestID.val());
console.log(local.result);
});
var mySuccess = function(transaction, argument) {
var local = {};
for (local.i=0; local.i < argument.rows.length; local.i++) {
local.qry = argument.rows.item(local.i);
Variables.result = local.qry.TestSort;
}
Variables.Return = true;
};
var myError = function(transaction, argument) {
dom.msg.text(argument.message);
Variables.result = '';
Variables.Return = true;
}
var barcode = {};
barcode.Scan = function(argument) {
var local = {};
Variables.result = '';
Variables.Return = false;
window.dbo.transaction(function(myTrans) {
myTrans.executeSql(
'SELECT * FROM Test WHERE TestID=?'
,[argument]
,mySuccess
,myError
)
});
for (local.I = 0;local.I < 3; local.I++) { // Try a bunch of times.
if (Variables.Return) break; // Gets set in mySuccess and myError
SpinLock(250);
}
return Variables.result;
}
var SpinLock = function(milliseconds) {
var local = {};
local.StartTime = Date.now();
do {
} while (Date.now() < local.StartTime + milliseconds);
}
function WebSqlError(tx,result) {
if (dom.msg.text()) {
dom.msg.append('<br>');
}
dom.msg.append(result.message);
}
}(jQuery, window);
Is there something different about XHR than there is about the executeSql command?
Kind of.
How can jQuery have an async=false argument when it comes to Ajax?
Ajax, or rather XMLHttpRequest, isn't strictly limited to being asynchronous -- though, as the original acronym suggested, it is preferred.
jQuery.ajax()'s async option is tied to the boolean async argument of xhr.open():
void open(
DOMString method,
DOMString url,
optional boolean async, // <---
optional DOMString user,
optional DOMString password
);
The Web SQL Database spec does also define a Synchronous database API. However, it's only available to implementations of the WorkerUtils interface, defined primarily for Web Workers:
window.dbo = openDatabaseSync('POC','1.0','Proof-Of-Concept', 1024*1024);
var results;
window.dbo.transaction(function (trans) {
results = trans.executeSql('...');
});
If the environment running the script hasn't implemented this interface, then you're stuck with the asynchronous API and returning the result will not be feasible. You can't force blocking/waiting of asynchronous tasks for the reason you suspected:
Maybe my spinning isn't giving the callback chance to come back (See code below).
I have a function that consumes data with a WCF service (in SharePoint). The service does not return a specific field that I need for items so I use the SharePoint Client Object Model to query for the field by using the ID I have in the returned result from the WCF service.
function LoadAllNews() {
var listUrl = "/_vti_bin/ListData.svc/Pages";
$.getJSON(listUrl,
function (data) {
$.each(data.d,
function (i, result) {
GetImageUrl(result.Id, function (image) {
$(ConstructHtml(image, result.Title, result.Path, result.Name)).appendTo("#News");
});
});
});
}
When I debug result here I always get the items returned in the same order but since the GetImageUrl executes a query async the items are not appended in the same order. Most of the times they do must some times it appears to be random since time to get the image varies:
function GetImageUrl(id, callback) {
var context = new SP.ClientContext();
var items = context.get_web().get_lists().getByTitle('Pages').getItemById(id);
context.load(items);
context.executeQueryAsync(function () {
callback(items.get_item('PublishingRollupImage'));
});
}
function ConstructHtml(imageUrl, title, path, name) {
var html = "" // a long html string..
return html;
}
I could post this on sharepoint.stackexchange but the audience is wider here and it's more of a question how to handle this with JavaScript than with SharePoint itself.
Any ideas on how I should approach this? I was thinking something like skip the image in LoadAllNews() and then when all items are appended use JavaScript/jQuery to load the image for each news item.
Thanks in advance.
Based on the fork function from my answer to this question: Coordinating parallel execution in node.js. I would do it like this:
var getImages = [];
var meta = [];
$.each(data.d,
function (i, result) {
getImages.push(function(callback){
GetImageUrl(result.Id, callback);
});
meta.push({
title : result.Title,
path : result.Path,
name : result.Name
});
});
fork(getImages,function(images) {
$.each(images,function(i,image){
$(ConstructHtml(
image,
meta[i].title,
meta[i].path,
meta[i].name
)).appendTo("#News");
});
});
The implementation of fork is simply this:
function fork (async_calls, shared_callback) {
var counter = async_calls.length;
var all_results = [];
function makeCallback (index) {
return function () {
counter --;
var results = [];
// we use the arguments object here because some callbacks
// in Node pass in multiple arguments as result.
for (var i=0;i<arguments.length;i++) {
results.push(arguments[i]);
}
all_results[index] = results;
if (counter == 0) {
shared_callback(all_results);
}
}
}
for (var i=0;i<async_calls.length;i++) {
async_calls[i](makeCallback(i));
}
}
The fork function above gathers asynchronous results in order so it does exactly what you want.
If the order of events matters, make it a synchronous procedure
I am attempting to implement an asynchronous method queue in Javascript as seen in this blog post
Here's what I have so far:
function Queue() {
this._methods = [];
this._response = null;
this._flushed = false;
}
(function(Q){
Q.add = function (fn) {
if (this._flushed) fn(this._response);
else this._methods.push(fn);
}
Q.flush = function (response) {
if (this._flushed) return;
this._response = response;
while (this._methods[0]) {
this._methods.shift()(response);
}
this._flushed = true;
}
})(Queue.prototype);
I can't seem to get it to work as advertised, although the code looks correct to me. When I call the flush function I get this._methods is undefined on the line while (this._methods[0]) {.
How are you using it? If you're doing:
var q = new Queue();
q.flush("foo");
...you shouldn't be getting that error, and I'm not: http://jsbin.com/iduji3