Parse.Cloud.job promise not working - javascript

What I am trying to do here are:
Remove all contents in a class first, because every day the events.json file will be updated. I have my first question here: is there a better way to remove all contents from a database class on Parse?
Then I will send a request to get the events.json and store "name" and "id" of the result into a 2D array.
Then I will send multiple requests to get json files of each "name" and "id" pairs.
Finally, I will store the event detail into database. (one event per row) But now my code will terminate before it downloaded the json files.
Code:
function newLst(results) {
var event = Parse.Object.extend("event");
for (var i = 0; i < results.length; i++){
Parse.Cloud.httpRequest({
url: 'https://api.example.com/events/'+ results[i].name +'/'+ results[i].id +'.json',
success: function(newLst) {
var newJson = JSON.parse(newLst.text);
var newEvent = new event();
newEvent.set("eventId",newJson.data.id);
newEvent.set("eventName",newJson.data.title);
newEvent.save(null, {
success: function(newEvent) {
alert('New object created with objectId: ' + newEvent.id);
},
error: function(newEvent, error) {
alert('Failed to create new object, with error code: ' + error.message);
}
});
},
error: function(newLst) {
}
});
}
};
Parse.Cloud.job("getevent", function(request, status) {
var event = Parse.Object.extend("event");
var query = new Parse.Query(event);
query.notEqualTo("objectId", "lol");
query.limit(1000);
query.find({
success: function(results) {
for (var i = 0; i < results.length; i++) {
var myObject = results[i];
myObject.destroy({
success: function(myObject) {
},
error: function(myObject, error) {
}
});
}
},
error: function(error) {
alert("Error: " + error.code + " " + error.message);
}
});
var params = { url: 'https://api.example.com/events.json'};
Parse.Cloud.httpRequest(params).then(function(httpResponse) {
var results = [];
var jsonobj = JSON.parse(httpResponse.text);
for (var i = 0; i < jsonobj.data.length; i++) {
var tmp2D = {"name":"id"}
tmp2D.name = [jsonobj.data[i].name];
tmp2D.id = [jsonobj.data[i].id];
results.push(tmp2D);
}
newLst(results);
}).then(function() {
status.success("run job");
}, function(error) {
status.error(error);
});
});

I think my original answer is correct as a standalone. Rather than make it unreadable with the additional code, here it is made very specific to your edit.
The key is to eliminate passed callback functions. Everything below uses promises. Another key idea is decompose the activities into logical chunks.
A couple of caveats: (1) There's a lot of code there, and the chances that either your code is mistaken or mine is are still high, but this should communicate the gist of a better design. (2) We're doing enough work in these functions that we might bump into a parse-imposed timeout. Start out by testing all this with small counts.
Start with your question about destroying all instances of class...
// return a promise to destroy all instances of the "event" class
function destroyEvents() {
// is your event class really named with lowercase? uppercase is conventional
var query = new Parse.Query("event");
query.notEqualTo("objectId", "lol"); // doing this because the OP code did it. not sure why
query.limit(1000);
return query.find().then(function(results) {
return Parse.Object.destroyAll(results);
});
}
Next, get remote events and format them as simple JSON. See the comment. I'm pretty sure your idea of a "2D array" was ill-advised, but I may be misunderstanding your data...
// return a promise to fetch remote events and format them as an array of objects
//
// note - this differs from the OP data. this will evaluate to:
// [ { "name":"someName0", id:"someId0" }, { "name":"someName1", id:"someId1" }, ...]
//
// original code was producing:
// [ { "name":["someName0"], id:["someId0"] }, { "name":["someName1"], id:["someId1"] }, ...]
//
function fetchRemoteEvents() {
var params = { url: 'https://api.example.com/events.json'};
return Parse.Cloud.httpRequest(params).then(function(httpResponse) {
var results = [];
var remoteEvents = JSON.parse(httpResponse.text).data;
for (var i = 0; i < remoteEvents.length; i++) {
var remoteEvent = { "name": remoteEvents[i].name, "id": remoteEvents[i].id };
results.push(remoteEvent);
}
return results;
});
}
Please double check all of my work above regarding the format (e.g. response.text, JSON.parse().data, etc).
Its too easy to get confused when you mix callbacks and promises, and even worse when you're generating promises in a loop. Here again, we break out a simple operation, to create a single parse.com object based on one of the single remote events we got in the function above...
// return a promise to create a new native event based on a remoteEvent
function nativeEventFromRemoteEvent(remoteEvent) {
var url = 'https://api.example.com/events/'+ remoteEvent.name +'/'+ remoteEvent.id +'.json';
return Parse.Cloud.httpRequest({ url:url }).then(function(response) {
var eventDetail = JSON.parse(response.text).data;
var Event = Parse.Object.extend("event");
var event = new Event();
event.set("eventId", eventDetail.id);
event.set("eventName", eventDetail.title);
return event.save();
});
}
Finally, we can bring it together in a job that is simple to read, certain to do things in the desired order, and certain to call success() when (and only when) it finishes successfully...
// the parse job removes all events, fetches remote data that describe events
// then builds events from those descriptions
Parse.Cloud.job("getevent", function(request, status) {
destroyEvents().then(function() {
return fetchRemoteEvents();
}).then(function(remoteEvents) {
var newEventPromises = [];
for (var i = 0; i < remoteEvents.length; i++) {
var remoteEvent = remoteEvents[i];
newEventPromises.push(nativeEventFromRemoteEvent(remoteEvent));
}
return Parse.Promise.when(newEventPromises);
}).then(function() {
status.success("run job");
}, function(error) {
status.error(error);
});
});

The posted code does just one http request so there's no need for an array of promises or the invocation of Promise.when(). The rest of what might be happening is obscured by mixing the callback parameters to httpRequest with the promises and the assignment inside the push.
Here's a clarified rewrite:
Parse.Cloud.job("getevent", function(request, status) {
var promises = [];
var params = { url: 'https://api.example.com'};
Parse.Cloud.httpRequest(params).then(function(httpResponse) {
var results = [];
var jsonobj = JSON.parse(httpResponse.text);
for (var i = 0; i < jsonobj.data.length; i++) {
// some code
}
}).then(function() {
status.success("run job");
}, function(error) {
status.error(error);
});
});
But there's a very strong caveat here: this works only if ("// some code") that appears in your original post doesn't itself try to do any asynch work, database or otherwise.
Lets say you do need to do asynch work in that loop. Move that work to a promise-returning function collect those in an array, and then use Promise.when(). e.g....
// return a promise to look up some object, change it and save it...
function findChangeSave(someJSON) {
var query = new Parse.Query("SomeClass");
query.equalTo("someAttribute", someJSON.lookupAttribute);
return query.first().then(function(object) {
object.set("someOtherAttribute", someJSON.otherAttribute);
return object.save();
});
}
Then, in your loop...
var jsonobj = JSON.parse(httpResponse.text);
var promises = [];
for (var i = 0; i < jsonobj.data.length; i++) {
// some code, which is really:
var someJSON = jsonobj.data[i];
promises.push(findChangeSave(someJSON));
}
return Parse.Promise.when(promises);

Related

Parse database job is not getting all results from http request

I have the following code set up as a job in the Parse Cloud Code for my application.
Parse.Cloud.job("requestLocations", function (request, response) {Parse.Cloud.httpRequest({
url: 'https://maps.googleapis.com/maps/api/place/nearbysearch/json?location=29.7030428,-98.1364808&radius=900&types=restaurant&key=AIzaSyCTg0x68Q6lrCAo6-A37zkxge81jDEKpvo'
}).then(function (httpResponse) {
// Success
response.success("Success");
var parsedData = JSON.parse(httpResponse.text);
var Location = Parse.Object.extend("Locations");
for (var i = 0; i < parsedData.results.length; i++) {
var restaurant = new Location();
var placeId = parsedData.results[i].place_id;
var name = parsedData.results[i].name;
var vicinity = parsedData.results[i].vicinity;
var point = new Parse.GeoPoint({
latitude: parsedData.results[i].geometry.location.lat,
longitude: parsedData.results[i].geometry.location.lng
});
restaurant.set("placeId", placeId);
restaurant.set("name", name);
restaurant.set("vicinity", vicinity);
restaurant.set("location", point);
restaurant.save(null, {
success: function (location) {
console.log("Object ID: " + location.id);
},
error: function (location, error) {
console.log("Failed to create object, with error code: " + error.message);
}
});
}
}, function (httpResponse) {
// Error
response.error('request failed with response code ' + httpResponse)
});});
As you can see, this HTTP request should return a total of 14 places. Unfortunately, it will only return 9 places and it would also seem that which 9 are return can change. I am assuming there is a problem with the way my function is put together. Can anyone help me remedy this issue. I would like to return as many places as I want based on the radius of the HTTP request.
Thank You
The http request is done right, with a promise that's fulfilled when the request is complete. But your then() block tries to create several objects in a loop, not waiting for them all to finish, and failing to call response.success. Fix it like this...
// break it into understandable chunks, too, so, here's a function
// to build a Locations object from the http data
function locationFromResult(result) {
var Location = Parse.Object.extend("Locations");
var restaurant = new Location();
var placeId = result.place_id;
var name = result.name;
var vicinity = result.vicinity;
var point = new Parse.GeoPoint({
latitude: result.geometry.location.lat,
longitude: result.geometry.location.lng
});
restaurant.set("placeId", placeId);
restaurant.set("name", name);
restaurant.set("vicinity", vicinity);
restaurant.set("location", point);
return restaurant;
}
Parse.Cloud.job("requestLocations", function (request, response) {
var url = 'https://maps.googleapis.com/maps/api/place/nearbysearch/json?location=29.7030428,-98.1364808&radius=900&types=restaurant&key=AIzaSyCTg0x68Q6lrCAo6-A37zkxge81jDEKpvo';
Parse.Cloud.httpRequest({url: url}).then(function (httpResponse) {
var parsedData = JSON.parse(httpResponse.text);
var locations = parsedData.results.map(function(result) {
return locationFromResult(result);
});
// this is important, saveAll of the new objects before returning
// this can also be accomplished by saving the objects individually and using Parse.Promise.when()
return Parse.Object.saveAll(locations);
}).then(function(result) {
response.success(JSON.stringify(result));
}, function(error) {
response.error(JSON.stringify(error));
});
});

Send bulk email using Parse.com Cloud Code

In the app I'm creating a user will be able to send a description of an object to a number of recipients (from 1 to 200). Using Parse Cloud Code I'll have to use a promise to wait for the email server response for every email (Mailgun).
Is there any other way where I stock all those created emails in some kind of array and send them in 1 shot to the email server? Otherwise I hit the max of 15 seconds a function can run.
Right now I use this:
Parse.Cloud.define("emailobject", function(request, response) {
// ... some company info parameters
var theTraders = request.params.traders;
var objectQ = new Parse.Query("objects");
objectQ.equalTo("objectId", theObjectId);
objectQ.first({
success:function(theObject) {
// ... more code
searchObjectPictures(theObject, {
success:function(pictureObjects) {
for (var a = 0; a < theTraders.length; a++) {
// create the parameters to create the email
var mailingParameters = {};
// ... create the parameters
// when the email html has been compiled
mailgun.sendWelcomeEmail({
to: traderEmail,
from: toString,
subject: subjectString,
html: mailing.getMailingHTML(mailingParameters)
}, {
success:function(httpResponse) {
console.log(httpResponse);
},
error:function(httpResponse) {
console.log(httpResponse);
}
});
}
// emailedObjectsToSave is an array of analytics objects
Parse.Object.saveAll(emailedObjectsToSave, {
success:function(list) {
response.success(true);
},
error:function(error) {
response.error(error);
}
});
},
error:function(error) {
response.error(error);
}
});
},
error:function(error){
response.error(error);
}
});
});
I know promises would be better for nested queries, but I'm still wrapping my head around this.
Thank you
After a lot of searching, trial and error, I have finally found the solution. As #danh said it's better to add these requests to a job queue. The objects are now saved to a "notifications" class that has a bool "notified" to check if this object has already been sent or not.
I'll post the code here for people who might also search for it.
First you add your objects to the new "notifications" class (queue).
Parse.Cloud.define("addToNotificationsQueue", function(request, response) {
var roleName = request.params.role;
var objects = request.params.objects;
var newEmailObjectsToSave = [];
var EmailedObjectClass = Parse.Object.extend("notifications");
var emailObjectACL = new Parse.ACL();
emailObjectACL.setPublicReadAccess(false);
emailObjectACL.setRoleReadAccess(roleName, true);
emailObjectACL.setRoleWriteAccess(roleName, true);
for (var i = 0; i < objects.length; i++) {
// define the parameters for the new objects based on the request params
var emailObject = new EmailedObjectClass();
// set all the details to your new emailObject
emailObject.setACL(emailObjectACL);
newEmailObjectsToSave.push(emailObject);
}
Parse.Object.saveAll(newEmailObjectsToSave, {
success:function(list) {
response.success(true);
},
error:function(error) {
response.error(error);
}
});
});
Then define your job. I run it every 15 minutes for now to test. When the mailserver starts acting faster, I'll set the limit and frequency higher.
Parse.Cloud.job("sendNotifications", function(request, status) {
Parse.Cloud.useMasterKey();
var numberOfNotificationsHandled;
var query = new Parse.Query("notifications");
query.limit(10); // limit because max 15 minutes, slow email server...
query.notEqualTo("notified", true);
query.find().then(function(notifications) {
numberOfNotificationsHandled = notifications.length;
var promise = Parse.Promise.as();
_.each(notifications, function(notification) {
var parameterDict = {};
// add all parameters the emailObject function needs
promise = promise.then(function(){
return Parse.Cloud.run("emailObject", parameterDict).then(function(result) {
notification.set("notified", true);
notification.save();
}, function(error) {
notification.set("notified", false);
notification.save();
});
});
});
return promise;
}).then(function() {
console.log("JOB COMPLETED");
status.success("Sent " + numberOfNotificationsHandled + " emails");
}, function(error) {
console.log("JOB ERROR " + error);
status.error("Job error " + error);
});
});
The emailObject method is just an httpRequest like any other. This method waits for the answers of those httpRequests.

Populating array with response from nested AJAX calls

I have an array that I would like to fill with responses from AJAX calls like so:
var dict = [];
function fillArray(){
$.post('getUsersOnline.php', function(phpReturnVal){
// ...
for(var i = 0; i < phpReturnVal.length; i++){
$.get("https://api.twitch.tv/kraken/streams" , function(data){
dict[data.key] = data;
});
});
}
function doStuff(){
// dict is empty or undefined here
}
How would I fill dict with objects so that I could retrieve them inside doStuff()? Currently, I am able to insert stuff into dict but when I try accessing dict outside the fillArray() function, I get an empty dict variable since I'm assuming the GET call is asynchronous and doesn't happen until after all the JS code has executed...
So, dict is an object that has no push method. You'd need dict=[]; If you had to have {}, then you'd need key:value pairs to populate it, such as:
dict[key] = value;
You are going to have to keep track of the number of calls that you are doing in that for loop and fire a callback function once they are all complete. I'm not totally confident about your current solution, with calling an indefinite amount of ajax requests, but I also don't fully understand the scope of your problem or the server that you're talking to.
So basically you will have to do something like this with what you have currently:
var dict = [],
requestsCompleted = 0;
function dictFilled() {
// do something with your dict variable;
}
function fillArray(){
$.post('getUsersOnline.php', function(phpReturnVal){
// ...
for(var i = 0; i < phpReturnVal.length; i++){
$.get("https://api.twitch.tv/kraken/streams" , function(data){
dict[data.key] = data;
requestsCompleted++;
if (requestsCompleted === phpReturnVal.length) {
dictFilled();
}
});
});
}
This haven't been tested, but basically you will have to define a function that will have access to the array that you are filling and call it once all you asynchronous requests finish successfully. For tasks like this though I recommend you take a look at jQuery's Deferred API. There is always a chance that one of those requests will fail and your application should know what to do if that happens.
I'm assuming the GET call is asynchronous and doesn't happen until
after all the JS code has executed...
Appear correct.
Try
var dict = [];
function fillArray() {
// return jQuery promise object
return $.post('getUsersOnline.php', function(phpReturnVal){
// ...
for(var i = 0; i < phpReturnVal.length; i++) {
// call same `url` `phpReturnVal.length` times here ?,
// returning same `data` response at each request ?,
// populating, overwriting `dict` with same `data.key` property ?
$.get("https://api.twitch.tv/kraken/streams" , function(data) {
dict[data.key] = data;
});
}; // note closing `}` at `for` loop
// return value
return dict
});
}
function doStuff(data) {
// `data`:`dict`, promise value returned from `fillArray`
console.log(data);
}
fillArray().then(doStuff, function error(jqxhr, textStatus, errorThrown) {
console.log(textStatus, errorThrown)
});
var arr = ["a", "b", "c"];
var response = {
"a": 1,
"b": 2,
"c": 3
};
var obj = {};
var dict = [];
function fillArray() {
return $.when(arr).then(function(phpReturnVal) {
for (var i = 0; i < phpReturnVal.length; i++) {
// return same `response` here ?
$.when(response).then(function(data) {
dict[arr[i]] = data;
});
};
return dict
});
}
function doStuff(data) {
console.log(data)
}
fillArray().then(doStuff, function error(err) {
console.log(err)
});
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js">
</script>

status.success being called before my function is done

In my Parse background job, I want both the processUser(user); and matchCenterComparison(eBayResults); functions to be called before calling status.success.
I currently have matchCenterComparison(eBayResults); being returned at the end of processUser(user);, as you can see below.
I figured that since it's returned in processUser(user);, and status.success isn't called until after that function is done, it would therefore wait until matchCenterComparison(eBayResults); is finished before calling it. This doesn't seem to be the case.
In the Parse forums, I was told that I need to have the matchCenterComparison function return a Promise, and make sure that the function call is also part of my main promise chain so that the background job waits till it completes before status.success is called. I'm having trouble figuring out the correct syntax to accomplish this, since I can't return two functions in a row in userQuery.each.
Main Promise Chain:
Parse.Cloud.job("MatchCenterBackground", function(request, status) {
var usersQuery = new Parse.Query(Parse.User);
usersQuery.each(function (user) {
return processUser(user);
}).then(function() {
status.success("background job worked brah!");
}, function(error) {
status.error(error);
});
});
processUser function:
function processUser(user) {
// ... code to setup per-user query ...
var matchCenterItem = Parse.Object.extend("matchCenterItem");
var query = new Parse.Query(matchCenterItem);
// easy way to share multiple arrays
var shared = {
promises: [],
searchTerms: [],
};
return query.find().then(function(results) {
// process results, populate shared data (promises and searchTerms)
console.log('matchCenterItem query results:' + results);
if (results.length > 0) {
console.log('we have entered the matchcenteritem query');
for (i = 0; i < results.length; i++) {
console.log('we have also entered the loop inside the matchCenterItem query');
// later in your loop where you populate promises:
var searchTerm = results[i].get('searchTerm');
// add it to the array just like you add the promises:
shared.searchTerms.push(searchTerm);
url = 'http://svcs.ebay.com/services/search/FindingService/v1';
//push function containing criteria for every matchCenterItem into promises array
shared.promises.push((function() {
if (results[i].get('itemLocation') == 'US')
{
console.log('americuh!');
var httpRequestPromise = Parse.Cloud.httpRequest({
url: url,
params: {// httprequest params in here}
});
}
else if (results[i].get('itemLocation') == 'WorldWide')
{
console.log('Mr worlwide!');
var httpRequestPromise = Parse.Cloud.httpRequest({
url: url,
params: {// httprequest params in here}
});
}
return httpRequestPromise;
})());
}
}
//buildEbayRequestPromises(results, shared);
}).then(function() {
// process promises, return query promise
return Parse.Promise.when(shared.promises).then(function() {
// process the results of the promises, returning a query promise
// ... code here ...
console.log('were in the when.then of promise');
var eBayResults = [];
for (var i = 0; i < arguments.length; i++) {
var httpResponse = arguments[i];
// since they're in the same order, this is OK:
var searchTerm = shared.searchTerms[i];
// pass it as a param:
var top3 = buildEbayRequestPromises(httpResponse.text, searchTerm);
eBayResults.push(top3);
}
return matchCenterComparison(eBayResults);
});
});
}
matchCenterComparison function:
function matchCenterComparison(eBayResults) {
if (eBayResults.length > 0) {
console.log('yes the ebay results be longer than 0');
//Query users MComparisonArray with these criteria
var mComparisonArray = Parse.Object.extend("MComparisonArray");
var mComparisonQuery = new Parse.Query(mComparisonArray);
mComparisonQuery.contains('Name', 'MatchCenter');
//mComparisonQuery.contains("MCItems", eBayResults);
console.log('setup query criteria, about to run it');
mComparisonQuery.find({
success: function(results) {
console.log('MatchCenter comparison results :' + results);
// No new items
if (results.length > 0) {
console.log("No new items, you're good to go!");
}
// New items found
else if (results.length === 0) {
console.log('no matching mComparisonArray, lets push some new shit');
//replace MCItems array with contents of eBayResults
Parse.Object.destroyAll(mComparisonArray);
var newMComparisonArray = new mComparisonArray();
newMComparisonArray.set('Name', 'MatchCenter');
newMComparisonArray.set('MCItems', eBayResults);
//newMComparisonArray.set("parent", Parse.User());
console.log('yala han save il hagat');
// Save updated MComparisonArray
newMComparisonArray.save().then({
success: function() {
console.log('MComparisonArray successfully created!');
//status.success('MComparisonArray successfully created!');
},
error: function() {
console.log('nah no MComparisonArray saving for you bro:' + error);
//status.error('Request failed');
}
});
//send push notification
}
console.log('MatchCenter Comparison Success!');
},
error: function(error) {
console.log('nah no results for you bro:' + error);
}
});
}
}

Calling only once / caching the data from a $http get in an AngularJS service

This may sound like a really simply/stupid question but I need to ask it as I haven't came across this scenario before... okay I have a service in my angularJS app. this service currently contains 4 methods that all perform 80% the same functionality/code and I wish to make this more efficient. Here is what my service looks like (with a lot of code removed):
.factory('townDataService', function ($http) {
var townList = {};
townList.getTownList = function () {
return $http({method: 'GET', url: '/api/country/cities'})
.then(function (response) {
// HERE WE FORMAT THE response as desired... that creates a returnArray
var returnArray = [];
// loop through the countries
var JsonData = response.data;
for (key in JsonData['countries']) {
// formatting code...
}
// end of repeated CODE
return returnArray; // this is array, we don't do any formatting here
});
};
townList.getCurrentTown = function (place) {
return $http({method: 'GET', url: '/api/country/cities'})
.then(function (response) {
// HERE WE FORMAT THE response as desired... that creates a returnArray
var returnArray = [];
// loop through the countries
var JsonData = response.data;
for (key in JsonData['countries']) {
// formatting code...
}
// end of repeated code
// now the format further / work with the returnArray...
for (var i = 0; i < returnArray.length; i++) {
// do stuff
}
return currentTown; // this is a string
});
};
townList.getCurrentCountry = function (place) {
return $http({method: 'GET', url: '/api/country/cities'})
.then(function (response) {
// HERE WE FORMAT THE response as desired... that creates a returnArray
var returnArray = [];
// loop through the countries
var JsonData = response.data;
for (key in JsonData['countries']) {
// formatting code...
}
// end of repeated code
// now the format further / work with the returnArray...
for (var i = 0; i < returnArray.length; i++) {
// do stuff
}
return currentCountry; // this is a string
});
};
return townList;
}
)
;
Now I repeat the same $http 'GET' in each method and the same formatting code (which is a lot of nested loops) before returning a object array or a string. This is far from efficent! What is the best way to put this functionality into it's own function so we only call the GET url once but still return a promise with each method? Should I set the results of the $http({method: 'GET', url: '/api/country/cities'}) as a var and inject / pass it into each method before formatting the data if necessary? Should I use some sort of $cacheFactory?
Sorry if this is a dumb question and if I haven't explained myself well I shall rephrase the questions.
Thanks in advance.
It is just as you say; this code can (and should) be refactored in many ways. One example:
Let us factor the HTTP stuff into a separate service, that will also take care of caching. (Another idea for this would be to have a service for the HTTP/remote calls and another - maybe a general use decorator - to handle caching. LEt us not go into so much detail for now.) And let us put the formatting code in another method:
The remote call service:
.service('townHttpService', function($http, $q) {
var cache;
function getCities() {
var d = $q.defer();
if( cache ) {
d.resolve(cache);
}
else {
$http({method: 'GET', url: '/api/country/cities'}).then(
function success(response) {
cache = response.data;
d.resolve(cache);
},
function failure(reason) {
d.reject(reason);
}
});
}
return d.promise;
}
function clearCache() {
cache = null;
}
return {
getCities: getCities,
clearCache: clearCache
};
})
The formatter:
.service('townFormatter', function() {
return function townFormatter(jsonData) {
// HERE WE FORMAT THE response as desired... that creates a returnArray
var returnArray = [], key;
// loop through the countries
for (key in jsonData['countries']) {
// formatting code...
}
// end of repeated CODE
return returnArray; // this is array, we don't do any formatting here
};
})
Your townDataService, written in terms of the above:
.factory('townDataService', function (townHttpService, townFormatter) {
var townList = {};
townList.getTownList = function () {
return townHttpService.getCities().then(townFormatter);
};
townList.getCurrentTown = function (place) {
return townHttpService.getCities().then(townFormatter).then(function(cityList) {
var currentTown;
for (var i = 0; i < cityList.length; i++) {
// do stuff
}
return currentTown; // this is a string
});
};
townList.getCurrentCountry = function (place) {
return townHttpService.getCities().then(townFormatter).then(function(cityList) {
var currentCountry;
for (var i = 0; i < cityList.length; i++) {
// do stuff
}
return currentCountry; // this is a string
});
return townList;
})
I guess you got two questions there removing repeated logic and best way to cache results.
First - Removing duplicate code:
Looks like townList.getTownList is the common method, the other two methods is an extension of this method.
So,
townList.getCurrentTown = function(place) {
var towns = townList.getTownList();
for (var i = 0; i < returnArray.length; i++) { //additional stuff
}
return currentTown;
};
townList.getCurrentCountry = function(place) {
var towns = townList.getTownList();
for (var i = 0; i < returnArray.length; i++) { //additional stuff
}
return currentCountry;
};
Second - caching values
Now the call is being http made only in townList.getTownList, the logic to cache can be easily implemented here. But this depends on whether the data will be onetime fetch or can be refreshed.
One time fetch: just enable the cache in the http call $http({method: 'GET', url: '/api/country/cities', cache:true});
Refresh based on request: I would pass an refresh variable to inform whether data has to be refreshed or not. So if the refresh is true or the townList is empty the data will be fetched.
var srvc = this;
var townList;
townList.getTownList = function(refresh ) {
if (refresh || !townList) {
srvc.townList = $http({
method: 'GET',
url: '/api/country/cities'
})
.then(function(response) {
var returnArray = [];
var JsonData = response.data;
for (var key in JsonData.countries) {}
return returnArray; // this is array, we don't do any formatting here
});
}
return townList;
};
There is nothing special that you can do and receive some considerable benefit. You would definitely need to cache your GET response and refactor a bit to avoid code duplication and improve readability:
.factory('townDataService', function ($http) {
var getCitiesAsync = function(){
return $http({method: 'GET', url: '/api/country/cities', cache:true});
};
var townList = {};
townList.getTownList = function () {
return getCitiesAsync().then(prepareTownList);
};
var prepareTownList = function(response){
//extract towns and do whatever you need
return result;
};
...
As for using $cacheFactory - seems like an overhead for such a simple scenario, just use built-in cache option.
To avoid timing issues, it is perhaps good to extend the solution a little bit:
function getCities() {
var d = $q.defer();
if( cache ) {
d.resolve(cache);
}
else {
$http({method: 'GET', url: '/api/country/cities'}).then(
function success(response) {
if (!cache) {
cache = response.data;
}
d.resolve(cache);
},
function failure(reason) {
d.reject(reason);
}
});
}
return d.promise;
}
After a (perhaps second or third) call to the webservice succeeds, one checks if the cache variable was set while waiting for server response. If so, we can return the already assigned value. That way, there will be no new assignment to the variable cache if multiple calls were issued:
function success(response) {
if (!cache) {
cache = response.data;
It does not have to make problems but if you rely on having identical objects (for example when working with data binding) it is great to be sure to only fetch data once!

Categories