I'm not well-versed in JS but have to write some Cloud Code.
I have a few httpRequest operations returning JSON that then create Parse objects. After all those operations complete I want to run a saveAll.
Here's a shortened version of the function for readability. There are *** pointing to the async operations that need to be dependent on each other:
Parse.Cloud.job("importPlaylists", function(request, status) {
// Array to store new objects
var playlistsToSave = [];
// Iterate the countries (Sheets)
for (var j = 0; j < urls.length; j++) {
// *** -- 1. HERE IS WHERE I LOOP THROUGH HTTPREQUESTS, GET JSON, AND CREATE A NUMBER OF PARSE OBJECTS
// Cloud function - http request
Parse.Cloud.httpRequest({
url: url
}).then(function(httpResponse) {
// Parse response
var json = JSON.parse(httpResponse.buffer);
// Create playlist objects
for (var i = 0; i < json.feed.entry.length; i++) {
// Create Playlist objcts from json
var Playlist = Parse.Object.extend("Playlist");
var playlist = new Playlist();
// ...
// Add to caching array
playlistsToSave.push(playlist);
}
},function(httpResponse) {
// error
console.error('Request failed with response code ' + httpResponse.status);
status.error("Scheduled messages error: " + error);
});
}
// *** -- 2. THIS IS THE SAVEALL OPERATION THAT NEEDS TO BE DEPENDENT ON ALL OF THOSE HTTPRESPONSES COMPLETING
// Parse - Save objects
Parse.Object.saveAll(playlistsToSave, {
success: function(saveList) {
status.success("Objects created successfully.");
},
error: function(error) {
status.error("Unable to save objects.");
}
});
});
And here's the full code:
Parse.Cloud.job("importPlaylists", function(request, status) {
// ID of the Google Spreadsheet
var spreadsheetID = "someId";
// List of country codes and their Google Sheets Ids
var territories = {
"us" : "od6",
"gb" : "olbnsti",
"it" : "oa6haa5",
"es" : "oxibv7k",
"fr" : "obisdv5",
"nl" : "ohrgz0b",
"de" : "ocqrxlj",
"pl" : "oi0umg5"
}
// Array to store new objects
var playlistsToSave = [];
// Iterate the countries (Sheets)
for (var j = 0; j < territories.length; j++) {
var countryCode = territories[j];
var sheetID = territories[countryCode];
var url = "https://spreadsheets.google.com/feeds/list/" + spreadsheetID + "/" + sheetID + "/public/values?alt=json"; // Make sure it is public or set to Anyone with link can view
// *** - 1. This loops through each Google Sheet and creates Parse objects from the JSON
// Cloud function - http request
Parse.Cloud.httpRequest({
url: url
}).then(function(httpResponse) {
// Parse response
var json = JSON.parse(httpResponse.buffer);
// Create playlist objects
for (var i = 0; i < json.feed.entry.length; i++) {
var each = json.feed.entry[i];
// TODO: This needs error handling - if "gsx$" doesn't exist will crash
var name = each["gsx$name"]["$t"];
var playlistid = each["gsx$playlistid"]["$t"];
var description = each["gsx$description"]["$t"];
var imageurl = each["gsx$imageurl"]["$t"];
var categories = each["gsx$categories"]["$t"].split(','); // json returns a string from Sheet - split on commas
// Create Parse objects
var Playlist = Parse.Object.extend("Playlist");
var playlist = new Playlist();
playlist.set("name", name);
playlist.set("playlistId", playlistid);
playlist.set("description", description);
playlist.set("imageUrl", imageurl);
playlist.set("categories", categories);
playlist.add("territories", countryCode);
// Add to caching array
playlistsToSave.push(playlist);
}
},function(httpResponse) {
// error
console.error('Request failed with response code ' + httpResponse.status);
status.error("Scheduled messages error: " + error);
});
}
// *** - 2. This is the saveAll operation that needs to be dependent on all the httpRequests completing
// Parse - Save objects
Parse.Object.saveAll(playlistsToSave, {
success: function(saveList) {
status.success("Objects created successfully.");
},
error: function(error) {
status.error("Unable to save objects.");
}
});
});
Use this async.js's each. It will allow you to make sure all httpRequests are completed.
Related
I have the following code set up as a job in the Parse Cloud Code for my application.
Parse.Cloud.job("requestLocations", function (request, response) {Parse.Cloud.httpRequest({
url: 'https://maps.googleapis.com/maps/api/place/nearbysearch/json?location=29.7030428,-98.1364808&radius=900&types=restaurant&key=AIzaSyCTg0x68Q6lrCAo6-A37zkxge81jDEKpvo'
}).then(function (httpResponse) {
// Success
response.success("Success");
var parsedData = JSON.parse(httpResponse.text);
var Location = Parse.Object.extend("Locations");
for (var i = 0; i < parsedData.results.length; i++) {
var restaurant = new Location();
var placeId = parsedData.results[i].place_id;
var name = parsedData.results[i].name;
var vicinity = parsedData.results[i].vicinity;
var point = new Parse.GeoPoint({
latitude: parsedData.results[i].geometry.location.lat,
longitude: parsedData.results[i].geometry.location.lng
});
restaurant.set("placeId", placeId);
restaurant.set("name", name);
restaurant.set("vicinity", vicinity);
restaurant.set("location", point);
restaurant.save(null, {
success: function (location) {
console.log("Object ID: " + location.id);
},
error: function (location, error) {
console.log("Failed to create object, with error code: " + error.message);
}
});
}
}, function (httpResponse) {
// Error
response.error('request failed with response code ' + httpResponse)
});});
As you can see, this HTTP request should return a total of 14 places. Unfortunately, it will only return 9 places and it would also seem that which 9 are return can change. I am assuming there is a problem with the way my function is put together. Can anyone help me remedy this issue. I would like to return as many places as I want based on the radius of the HTTP request.
Thank You
The http request is done right, with a promise that's fulfilled when the request is complete. But your then() block tries to create several objects in a loop, not waiting for them all to finish, and failing to call response.success. Fix it like this...
// break it into understandable chunks, too, so, here's a function
// to build a Locations object from the http data
function locationFromResult(result) {
var Location = Parse.Object.extend("Locations");
var restaurant = new Location();
var placeId = result.place_id;
var name = result.name;
var vicinity = result.vicinity;
var point = new Parse.GeoPoint({
latitude: result.geometry.location.lat,
longitude: result.geometry.location.lng
});
restaurant.set("placeId", placeId);
restaurant.set("name", name);
restaurant.set("vicinity", vicinity);
restaurant.set("location", point);
return restaurant;
}
Parse.Cloud.job("requestLocations", function (request, response) {
var url = 'https://maps.googleapis.com/maps/api/place/nearbysearch/json?location=29.7030428,-98.1364808&radius=900&types=restaurant&key=AIzaSyCTg0x68Q6lrCAo6-A37zkxge81jDEKpvo';
Parse.Cloud.httpRequest({url: url}).then(function (httpResponse) {
var parsedData = JSON.parse(httpResponse.text);
var locations = parsedData.results.map(function(result) {
return locationFromResult(result);
});
// this is important, saveAll of the new objects before returning
// this can also be accomplished by saving the objects individually and using Parse.Promise.when()
return Parse.Object.saveAll(locations);
}).then(function(result) {
response.success(JSON.stringify(result));
}, function(error) {
response.error(JSON.stringify(error));
});
});
In the app I'm creating a user will be able to send a description of an object to a number of recipients (from 1 to 200). Using Parse Cloud Code I'll have to use a promise to wait for the email server response for every email (Mailgun).
Is there any other way where I stock all those created emails in some kind of array and send them in 1 shot to the email server? Otherwise I hit the max of 15 seconds a function can run.
Right now I use this:
Parse.Cloud.define("emailobject", function(request, response) {
// ... some company info parameters
var theTraders = request.params.traders;
var objectQ = new Parse.Query("objects");
objectQ.equalTo("objectId", theObjectId);
objectQ.first({
success:function(theObject) {
// ... more code
searchObjectPictures(theObject, {
success:function(pictureObjects) {
for (var a = 0; a < theTraders.length; a++) {
// create the parameters to create the email
var mailingParameters = {};
// ... create the parameters
// when the email html has been compiled
mailgun.sendWelcomeEmail({
to: traderEmail,
from: toString,
subject: subjectString,
html: mailing.getMailingHTML(mailingParameters)
}, {
success:function(httpResponse) {
console.log(httpResponse);
},
error:function(httpResponse) {
console.log(httpResponse);
}
});
}
// emailedObjectsToSave is an array of analytics objects
Parse.Object.saveAll(emailedObjectsToSave, {
success:function(list) {
response.success(true);
},
error:function(error) {
response.error(error);
}
});
},
error:function(error) {
response.error(error);
}
});
},
error:function(error){
response.error(error);
}
});
});
I know promises would be better for nested queries, but I'm still wrapping my head around this.
Thank you
After a lot of searching, trial and error, I have finally found the solution. As #danh said it's better to add these requests to a job queue. The objects are now saved to a "notifications" class that has a bool "notified" to check if this object has already been sent or not.
I'll post the code here for people who might also search for it.
First you add your objects to the new "notifications" class (queue).
Parse.Cloud.define("addToNotificationsQueue", function(request, response) {
var roleName = request.params.role;
var objects = request.params.objects;
var newEmailObjectsToSave = [];
var EmailedObjectClass = Parse.Object.extend("notifications");
var emailObjectACL = new Parse.ACL();
emailObjectACL.setPublicReadAccess(false);
emailObjectACL.setRoleReadAccess(roleName, true);
emailObjectACL.setRoleWriteAccess(roleName, true);
for (var i = 0; i < objects.length; i++) {
// define the parameters for the new objects based on the request params
var emailObject = new EmailedObjectClass();
// set all the details to your new emailObject
emailObject.setACL(emailObjectACL);
newEmailObjectsToSave.push(emailObject);
}
Parse.Object.saveAll(newEmailObjectsToSave, {
success:function(list) {
response.success(true);
},
error:function(error) {
response.error(error);
}
});
});
Then define your job. I run it every 15 minutes for now to test. When the mailserver starts acting faster, I'll set the limit and frequency higher.
Parse.Cloud.job("sendNotifications", function(request, status) {
Parse.Cloud.useMasterKey();
var numberOfNotificationsHandled;
var query = new Parse.Query("notifications");
query.limit(10); // limit because max 15 minutes, slow email server...
query.notEqualTo("notified", true);
query.find().then(function(notifications) {
numberOfNotificationsHandled = notifications.length;
var promise = Parse.Promise.as();
_.each(notifications, function(notification) {
var parameterDict = {};
// add all parameters the emailObject function needs
promise = promise.then(function(){
return Parse.Cloud.run("emailObject", parameterDict).then(function(result) {
notification.set("notified", true);
notification.save();
}, function(error) {
notification.set("notified", false);
notification.save();
});
});
});
return promise;
}).then(function() {
console.log("JOB COMPLETED");
status.success("Sent " + numberOfNotificationsHandled + " emails");
}, function(error) {
console.log("JOB ERROR " + error);
status.error("Job error " + error);
});
});
The emailObject method is just an httpRequest like any other. This method waits for the answers of those httpRequests.
When I try to run count function I get
Error: Parse Objects not allowed here
E2015-11-09T12:36:10.778Z]v184 Ran cloud function count with:
Input: {}
Result: Error: Parse Objects not allowed here
at n (Parse.js:16:1063)
at Parse.js:16:1927
at Array.map (native)
at n (Parse.js:16:1904)
at n (Parse.js:16:1995)
at r.default (Parse.js:16:2422)
at Object.o.default.setCloudController.run (Parse.js:13:2159)
at Object.n [as run] (Parse.js:13:1730)
at e.query.find.success (main.js:10:19)
at e.<anonymous> (Parse.js:14:28224)
The searching result guide me to this question, But All the tutorials mention sending parameters in this way. And this code used to be functioning well.
Count Function :
Parse.Cloud.define('count', function(request, response) {
var query = new Parse.Query('MyS');
query.equalTo("Notify", true);
query.notEqualTo ("MainEventCode", '5');
query.find({
success: function(results) {
Parse.Cloud.run('http', {params : results}).then(
function(result) {
console.log('httpResponse is : ' + result);
response.success('Done !');
}, function(error) {
console.log('Error while RUN !' + error);
});
},
error: function(error) {
response.error(error);
}
});
});
http Function :
Parse.Cloud.define('http', function(request, response) {
var query = new Parse.Query(Parse.Installation);
.
.
.
}
I'm assuming results is an array of PFObjects. Unfortunately you can not send PFObjects or an array containing PFObjects as parameters. Instead you'll need to send an array of their object Ids, and retrieve the actual objects in your http function.
I recomend pass the object id to the function. Then in the cloud function:
var Movie = Parse.Object.extend("MovieClass");
Parse.Cloud.define("averageStars", async (request) => {
//Creating the ParseObject
var pointer = Movie.createWithoutData(request.params.obj_id);
//Creating a query
const query = new Parse.Query("ReviewClass");
query.equalTo("field_pointer_movie",pointer);
const results = await query.find();
let sum = 0;
for (let i = 0; i < results.length; ++i) {
sum += results[i].get("stars");
}
return sum / results.length;
});
This doc help me out
What I am trying to do here are:
Remove all contents in a class first, because every day the events.json file will be updated. I have my first question here: is there a better way to remove all contents from a database class on Parse?
Then I will send a request to get the events.json and store "name" and "id" of the result into a 2D array.
Then I will send multiple requests to get json files of each "name" and "id" pairs.
Finally, I will store the event detail into database. (one event per row) But now my code will terminate before it downloaded the json files.
Code:
function newLst(results) {
var event = Parse.Object.extend("event");
for (var i = 0; i < results.length; i++){
Parse.Cloud.httpRequest({
url: 'https://api.example.com/events/'+ results[i].name +'/'+ results[i].id +'.json',
success: function(newLst) {
var newJson = JSON.parse(newLst.text);
var newEvent = new event();
newEvent.set("eventId",newJson.data.id);
newEvent.set("eventName",newJson.data.title);
newEvent.save(null, {
success: function(newEvent) {
alert('New object created with objectId: ' + newEvent.id);
},
error: function(newEvent, error) {
alert('Failed to create new object, with error code: ' + error.message);
}
});
},
error: function(newLst) {
}
});
}
};
Parse.Cloud.job("getevent", function(request, status) {
var event = Parse.Object.extend("event");
var query = new Parse.Query(event);
query.notEqualTo("objectId", "lol");
query.limit(1000);
query.find({
success: function(results) {
for (var i = 0; i < results.length; i++) {
var myObject = results[i];
myObject.destroy({
success: function(myObject) {
},
error: function(myObject, error) {
}
});
}
},
error: function(error) {
alert("Error: " + error.code + " " + error.message);
}
});
var params = { url: 'https://api.example.com/events.json'};
Parse.Cloud.httpRequest(params).then(function(httpResponse) {
var results = [];
var jsonobj = JSON.parse(httpResponse.text);
for (var i = 0; i < jsonobj.data.length; i++) {
var tmp2D = {"name":"id"}
tmp2D.name = [jsonobj.data[i].name];
tmp2D.id = [jsonobj.data[i].id];
results.push(tmp2D);
}
newLst(results);
}).then(function() {
status.success("run job");
}, function(error) {
status.error(error);
});
});
I think my original answer is correct as a standalone. Rather than make it unreadable with the additional code, here it is made very specific to your edit.
The key is to eliminate passed callback functions. Everything below uses promises. Another key idea is decompose the activities into logical chunks.
A couple of caveats: (1) There's a lot of code there, and the chances that either your code is mistaken or mine is are still high, but this should communicate the gist of a better design. (2) We're doing enough work in these functions that we might bump into a parse-imposed timeout. Start out by testing all this with small counts.
Start with your question about destroying all instances of class...
// return a promise to destroy all instances of the "event" class
function destroyEvents() {
// is your event class really named with lowercase? uppercase is conventional
var query = new Parse.Query("event");
query.notEqualTo("objectId", "lol"); // doing this because the OP code did it. not sure why
query.limit(1000);
return query.find().then(function(results) {
return Parse.Object.destroyAll(results);
});
}
Next, get remote events and format them as simple JSON. See the comment. I'm pretty sure your idea of a "2D array" was ill-advised, but I may be misunderstanding your data...
// return a promise to fetch remote events and format them as an array of objects
//
// note - this differs from the OP data. this will evaluate to:
// [ { "name":"someName0", id:"someId0" }, { "name":"someName1", id:"someId1" }, ...]
//
// original code was producing:
// [ { "name":["someName0"], id:["someId0"] }, { "name":["someName1"], id:["someId1"] }, ...]
//
function fetchRemoteEvents() {
var params = { url: 'https://api.example.com/events.json'};
return Parse.Cloud.httpRequest(params).then(function(httpResponse) {
var results = [];
var remoteEvents = JSON.parse(httpResponse.text).data;
for (var i = 0; i < remoteEvents.length; i++) {
var remoteEvent = { "name": remoteEvents[i].name, "id": remoteEvents[i].id };
results.push(remoteEvent);
}
return results;
});
}
Please double check all of my work above regarding the format (e.g. response.text, JSON.parse().data, etc).
Its too easy to get confused when you mix callbacks and promises, and even worse when you're generating promises in a loop. Here again, we break out a simple operation, to create a single parse.com object based on one of the single remote events we got in the function above...
// return a promise to create a new native event based on a remoteEvent
function nativeEventFromRemoteEvent(remoteEvent) {
var url = 'https://api.example.com/events/'+ remoteEvent.name +'/'+ remoteEvent.id +'.json';
return Parse.Cloud.httpRequest({ url:url }).then(function(response) {
var eventDetail = JSON.parse(response.text).data;
var Event = Parse.Object.extend("event");
var event = new Event();
event.set("eventId", eventDetail.id);
event.set("eventName", eventDetail.title);
return event.save();
});
}
Finally, we can bring it together in a job that is simple to read, certain to do things in the desired order, and certain to call success() when (and only when) it finishes successfully...
// the parse job removes all events, fetches remote data that describe events
// then builds events from those descriptions
Parse.Cloud.job("getevent", function(request, status) {
destroyEvents().then(function() {
return fetchRemoteEvents();
}).then(function(remoteEvents) {
var newEventPromises = [];
for (var i = 0; i < remoteEvents.length; i++) {
var remoteEvent = remoteEvents[i];
newEventPromises.push(nativeEventFromRemoteEvent(remoteEvent));
}
return Parse.Promise.when(newEventPromises);
}).then(function() {
status.success("run job");
}, function(error) {
status.error(error);
});
});
The posted code does just one http request so there's no need for an array of promises or the invocation of Promise.when(). The rest of what might be happening is obscured by mixing the callback parameters to httpRequest with the promises and the assignment inside the push.
Here's a clarified rewrite:
Parse.Cloud.job("getevent", function(request, status) {
var promises = [];
var params = { url: 'https://api.example.com'};
Parse.Cloud.httpRequest(params).then(function(httpResponse) {
var results = [];
var jsonobj = JSON.parse(httpResponse.text);
for (var i = 0; i < jsonobj.data.length; i++) {
// some code
}
}).then(function() {
status.success("run job");
}, function(error) {
status.error(error);
});
});
But there's a very strong caveat here: this works only if ("// some code") that appears in your original post doesn't itself try to do any asynch work, database or otherwise.
Lets say you do need to do asynch work in that loop. Move that work to a promise-returning function collect those in an array, and then use Promise.when(). e.g....
// return a promise to look up some object, change it and save it...
function findChangeSave(someJSON) {
var query = new Parse.Query("SomeClass");
query.equalTo("someAttribute", someJSON.lookupAttribute);
return query.first().then(function(object) {
object.set("someOtherAttribute", someJSON.otherAttribute);
return object.save();
});
}
Then, in your loop...
var jsonobj = JSON.parse(httpResponse.text);
var promises = [];
for (var i = 0; i < jsonobj.data.length; i++) {
// some code, which is really:
var someJSON = jsonobj.data[i];
promises.push(findChangeSave(someJSON));
}
return Parse.Promise.when(promises);
I need to pull data from a series of .csv files off the server. I am converting the csvs into arrays and I am trying to keep them all in an object. The ajax requests are all successful, but for some reason only the data from the last request ends up in the object. Here is my code:
var populate_chart_data = function(){
"use strict";
var genders = ["Boys","Girls"];
var charts = {
WHO: ["HCFA", "IWFA", "LFA", "WFA", "WFL"],
CDC: ["BMIAGE", "HCA", "IWFA", "LFA", "SFA", "WFA", "WFL", "WFS"]
};
var fileName, fileString;
var chart_data = {};
for (var i=0; i < genders.length; i++){
for (var item in charts){
if (charts.hasOwnProperty(item)){
for (var j=0; j<charts[item].length; j++) {
fileName = genders[i] + '_' + item + '_' + charts[item][j];
fileString = pathString + fileName + '.csv';
$.ajax(fileString, {
success: function(data) {
chart_data[fileName] = csvToArray(data);
},
error: function() {
console.log("Failed to retrieve csv");
},
timeout: 300000
});
}
}
}
}
return chart_data;
};
var chart_data = populate_chart_data();
The console in Firebug shows every ajax request successful, but when I step through the loops, my chart_data object is empty until the final loop. This is my first foray into ajax. Is it a timing issue?
There are two things you need to consider here:
The AJAX calls are asynchronous, this means you callback will only be called as soon as you receive the data. Meanwhile your loop keeps going and queueing new requests.
Since you're loop is going on, the value of filename will change before your callback is executed.
So you need to do two things:
Push the requests into an array and only return when the array completes
Create a closure so your filename doesn't change
.
var chart_data = [];
var requests = [];
for (var j=0; j<charts[item].length; j++) {
fileName = genders[i] + '_' + item + '_' + charts[item][j];
fileString = pathString + fileName + '.csv';
var onSuccess = (function(filenameinclosure){ // closure for your filename
return function(data){
chart_data[filenameinclosure] = csvToArray(data);
};
})(fileName);
requests.push( // saving requests
$.ajax(fileString, {
success: onSuccess,
error: function() {
console.log("Failed to retrieve csv");
},
timeout: 300000
})
);
}
$.when.apply(undefined, requests).done(function () {
// chart_data is filled up
});
I'm surprised that any data ends up in the object. The thing about ajax is that you can't depend on ever knowing when the request will complete (or if it even will complete). Therefore any work that depends on the retrieved data must be done in the ajax callbacks. You could so something like this:
var requests = [];
var chart_data = {};
/* snip */
requests.push($.ajax(fileString, {
/* snip */
$.when.apply(undefined, requests).done(function () {
//chart_data should be full
});