Get data from 2nd level pointer in Parse - javascript

I have a setup with Three relevant classes: _User, Article, and Profile. In Article I have a pointer named author for _User, and in Profile, I have the same; a pointer, but named user, for _User.
Now, I want to retrieve data from Article, with the cols firstname and lastname in Profile, where the pointer in Article matches the objectId in _User, and the pointer in Profile.
Basically what I would solve with an inner join in SQL.
How do I go about this with just one parse call?
This is what I have so far:
var Article = Parse.Object.extend("Article");
var query = new Parse.Query(Article);
query.include("category");
query.find({
success: function(results) {
console.log("Successfully retrieved " + results.length + " article(s):");
// Do something with the returned Parse.Object values
for (var i = 0; i < results.length; i++) {
var object = results[i];
console.log(object.get('title'));
console.log(object.get('content'));
console.log(object.get('category').get("categoryName"));
}
},
error: function(error) {
console.log("Error: " + error.code + " " + error.message);
}
});

Its a pleasure answering questions where the OP took the trouble to include a complete (and minimal) description of the data and the desired result.
I think I understand that you want to get Articles, and for each you want to get Profiles, and that the Profiles and articles are (logically) joined via a common pointer to User.
This can be done using an additional query per article. For clarity and maintainability, I like to break these things up into short, logical promise-returning functions, so...
// for handy array functions, like _.map, and turning var args into simple arrays
var _ = require('underscore');
// note: include underscore this way in cloud code (nodejs)
// for browser, see underscorejs.org to add to your project
// this will answer a promise that is fulfilled with an array of the form:
// [ { article:article_object, profile:profile_object }, {...}, ...]
function articlesWithProfiles() {
var query = new Parse.Query("Article");
query.include("category");
query.include("author");
return query.find().then(function(articles) {
var promises = _.map(articles, function(article) {
return profileForArticle(article);
});
return Parse.Promise.when(promises);
});
}
// return a promise that's fulfilled by associating the given article with it's profile
function profileForArticle(article) {
var author = article.get("author");
var query = new Parse.Query("Profile");
query.equalTo("user", author);
return query.first().then(function(profile) {
return { article:article, profile:profile };
});
}
// call it like this
articlesWithProfiles().then(function() {
// see edit below
var result = _.toArray(arguments);
console.log(JSON.stringify(result));
}, function(error) {
// handle error
console.log(JSON.stringify(error));
});

Related

Insert document loop - RangeError: Maximum call stack size exceeded

I am literally giving my first steps with node and mongodb and I have recently hit this RangeError wall.
Here's what I am trying to do, I have a file that contains a list of countries that I would like to add to my mongo db. This would be part of my "seed" mechanism to get the app running.
I load the json and then I iterate through the collection of objects and add them one by one to the 'Countries' collection.
However, everytime I run the code, I get a "RangeError: Maximum call stack size exceeded".
I have googled around but none of the suggested solutions seem to apply for me.
My guess is there is something wrong with my insertCountry function...
Anyways, here's my code:
var mongoose = require('mongoose');
var countries = require('./seed/countries.json');
// mongodb
var Country = mongoose.Schema({
name: String,
code: String,
extra: [Extra]
});
var Extra = mongoose.Schema({
exampleField: Boolean,
anotherField: Boolean
});
var mCountry = mongoose.model('Countries', Country);
var mExtra = mongoose.model('Extras', Extra);
// do connection
mongoose.connect('...');
var db = mongoose.connection;
db.on('error', console.error.bind(console, 'connection error'));
db.once('open', function callback() {
});
// async function
var insertCountry = function(document, callback) {
db.model('Countries').count({code: document.code}, function (err, count) {
if (count < 1) {
db.collection('Countries').insert(document, function (err, result) {
if (!err) {
console.log('country ' + document.name + ' added');
}
else {
console.log('- [' + document.name + '] ' + err);
}
});
}
callback(null,document);
});
};
// doing countries
var Country = mongoose.model('Countries');
var Extras = mongoose.model('Extras');
for(i = 0; i < countries.length; i++)
{
nCountry = new Country();
nCountry.name = countries[i].name;
nCountry.code = countries[i].code;
nCountry.benefits = new Extras();
nCountry.benefits.exampleField = false;
nCountry.benefits.anotherField = false;
insertCountry(nCountry, function (err, value) {
console.log(value.name + ' added to collection (callback)');
});
}
I have been using some guides I have found to build this so this might not be optimal code. Any best pratices, standards, guides or tutorials you can share are most welcome!
Your callback is in the wrong place. It is not waiting for the insert operation to complete before you return from it's own callback. Altering your code:
var insertCountry = function(document, callback) {
db.model('Countries').count({code: document.code}, function (err, count) {
if (count < 1) {
db.collection('Countries').insert(document, function (err, result) {
if (!err) {
console.log('country ' + document.name + ' added');
}
else {
console.log('- [' + document.name + '] ' + err);
}
callback(null,document);
});
}
});
};
That is part of your problem, but it does not completely solve it. The other part is the loop which also does not wait for the wrapping function to complete before moving on. You want something like asyc.eachSeries in order to wait for inserts to complete before performing the next iteration. This is mostly why you are exceeding the call stack:
async.eachSeries(
countries,
function(current,callback) {
// make your nCountry object
insertCountry(nCountry,function(err,value) {
// do something, then
callback(err);
})
},
function(err) {
// called where done, err contains err where set
console.log( "done" );
}
);
There is really still and issue with the array, which must be reasonably large if you are exceeding the call stack limit. You probably should look at using event streams to process that rather that load everything in memory to the array.
Personally, if you were just trying not to insert duplicates for a field and had MongoDB 2.6 available I would just use the Bulk Operations API with "unordered operations" and allow non fatal failures on the duplicate keys. Coupled with the fact that bulk operations are sent in "batches" and not one at a time, this is much more efficient than checking for the presence on every request:
var Country = mongoose.Schema({
name: String,
code: { type: String, unique: true }, // define a unique index
extra: [Extra]
});
var insertCountries = function(countries,callback) {
var bulk = Country.collection.initializeUnorderedBulkOp();
var counter = 0;
async.eachSeries(
countries,
function(current,callback) {
// same object construction
bulk.insert(nCountry);
counter++;
// only send once every 1000
if ( counter % 1000 == 0 ) {
bulk.execute(function(err,result) {
// err should generally not be set
// but result would contain any duplicate errors
// along with other insert responses
// clear to result and callback
bulk = Country.collection.initializeUnorderedBulkOp();
callback();
});
} else {
callback();
}
},
function(err) {
// send anything still queued
if ( counter % 1000 != 0 )
bulk.execute(function(err,result) {
// same as before but no need to reset
callback(err);
});
}
);
};
mongoose.on("open",function(err,conn) {
insertCountries(countries,function(err) {
console.log("done");
});
});
Keeping in mind that unlike the methods implemented directly on the mongoose models, the native driver methods require that a connection is actually established before they can be called. Mongoose "queues" these up for you, but otherwise you need something to be sure the connection is actually open. The example of the "open" event is used here.
Take a look at event streams as well. If you are constructing an array large enough to cause a problem by missing callback execution then you probably should not be loading it all in memory from whatever your source is. Stream processing that source combined with an approach as shown above should provide efficient loading.

In Parse.com's Cloud code, asynchronous code is giving variables in for-loop the wrong value

I'm trying to save different food names without duplicates on parse.com. However, when I run the code, the database consists of the same 2 or 3 foods over and over, instead of 200 or so unique names.
Below is my function. I tried logging the name of the food at two different points, and I get different values. The first point gives the correct name of the food, but the second point only shows either flaxseed muffins or raspberry pie. I think the problem has to do with the code running asynchronously, but I'm not sure how to resolve the issue.
Parse.Cloud.define("recordFavorite", function(request, response) {
var foodList = request.params.foodList; //string array of food names
var Food = Parse.Object.extend("Food");
var query = new Parse.Query(Food);
for (i = 0; i < foodList.length; i++ ) {
var name = foodList[i];
console.log("before name is " + name);
var query = new Parse.Query(Food);
query.exists("name", name);
query.find({
success: function(results) {
if(results.length == 0){
var food = new Food();
food.set("name", name);
food.save(null, {
success: function(food) {
console.log("saved with name " + name);
},
error: function(food, error) {
}
});
} else {
//don't create new food
}
},
error: function(error) {
}
});
}
});
EDIT:
I was able to make some progress by modifying it to the code pasted below. Now it saves all the objects, including duplicates. I noticed that the lines
var query = new Parse.Query(Food);
query.exists("name", name);
returns an array of all the foods and doesn't filter out the objects containing "name". (To be clear, this was probably still occurring in the original code, but I hadn't noticed.)
Parse.Cloud.define("recordFavorite", function(request, response) {
var foodList = request.params.foodList; //string array of food names
var foodListCorrected = new Array();
var Food = Parse.Object.extend("Food");
// Wrap your logic in a function
function process_food(i) {
// Are we done?
if (i == foodList.length) {
Parse.Object.saveAll(foodListCorrected, {
success: function(foodListCorrected) {
},
error: function(foodListCorrected) {
}
});
return;
}
var name = foodList[i];
var query = new Parse.Query(Food);
query.exists("name", name);
query.find({
success: function(results) {
console.log(results.length);
if(results.length == 0){
//console.log("before " + foodListCorrected.length);
var food = new Food();
food.set("name", name);
foodListCorrected.push(food);
// console.log(foodListCorrected.length);
} else {
//don't create new food
}
process_food(i+1)
},
error: function(error) {
console.log("error");
}
});
}
// Go! Call the function with the first food.
process_food(0);
});
I think you're right about the problem being the async logic. The problem is that the outer loop completes as quickly as it can, firing off the various, slower async calls for your food lookup queries as it goes. The outer loop doesn't wait and because of what's know as 'variable hoisting' when you access 'name' inside your success function, its value will be the latest value of 'name' in the outer loop. So when the success function is called, the value of name has moved on to a different food to when you first initiated the exists/save query sequence.
Here's a really simple example:
Say your foodList looked like ['Muffin'], ['Cheesecake']. When you enter the loop for the first time, you have name='Muffin'. You fire off your exists query for name='Muffin' and that now happens asynchronously. Meanwhile, the outer loop happily moves on and sets name='Cheesecake' and fires off another exists query. Meanwhile. your first exists query completes and you are now ready to save the first food. But, because of hoisting, the value of name within your success function is now 'Cheesecake'. So it saves 'Cheesecake' when it should have saved 'Muffin' Then the second set of async queries complete, and this one also saves 'Cheesecake'. So you get two foods, representing your two unique foods, but both are called 'Cheesecake'!
Here's the classic article on variable hoisting, it is well worth a read:
http://www.adequatelygood.com/JavaScript-Scoping-and-Hoisting.html
A way of solving this would be to only trigger the processing of the next food once all the async calls for the current food have completed. You can do this like this:
Parse.Cloud.define("recordFavorite", function(request, response) {
var foodList = request.params.foodList; //string array of food names
var Food = Parse.Object.extend("Food");
var query = new Parse.Query(Food);
// Wrap your logic in a function
function process_food(i) {
// Are we done?
if (i == foodList.length) return;
var name = foodList[i];
console.log("before name is " + name);
var query = new Parse.Query(Food);
query.exists("name", name);
query.find({
success: function(results) {
if(results.length == 0){
var food = new Food();
food.set("name", name);
food.save(null, {
success: function(food) {
console.log("saved with name " + name);
// Move onto the next food, only after all the async operations
// have completed.
process_food(i+1)
},
error: function(food, error) {
}
});
} else {
//don't create new food
}
},
error: function(error) {
}
});
}
// Go! Call the function with the first food.
process_food(0);
});
(Note, I've not tested this code, so there might be syntax errors).
I've not come across Parse before... I saw your question, went off to read about it, and thought it looked very interesting! I will remember it for my next PHP API project. I think there are some smarter things you can try to do. For example, your approach requires 2 async calls per food, one to see if it exists, and one to save it. For 200 foods, that's 400 async calls. However, the Parse API looks very helpful, and I think it will offer tools to help you cut this down. You could probably try something along the following lines:
You already have an array of strings of the names you want to save:
var foodList = request.params.foodList; //string array of food names
Say it looks like ["Cupcakes", "Muffins", "Cake"].
Now build a Parse query that gets all food names already on the server. (I don't know how to do this!). But you should get back an array, let's say ["Cupcakes", "Cheesecake"].
Now you an strip the duplicates in JavaScript. There'll be some nice questions here on StackOverflow to help with this! The result will be that "Cupcake" is a duplicate, so we are left with the array ["Muffins", "Cake"]
Now it looks like in Parse you can Batch some operations:
https://parse.com/docs/rest#objects-batch
so your goal is to save this array of ["Muffins", "Cake"] with one API call.
This approach will scale well with the number of foods, so even with 200 foods, you should be able to do it in one query, and one batch update per 50 foods (I think 50 is a batch limit, from the Parse docs), so at most you will need 5 API calls.
I believe this (https://www.parse.com/docs/js_guide#promises-series) is the solution you're looking for. You need to utilize promises to force synchronicity.

Fetching Object's values from a returned Array of a Query (Parse.com + Node.js)

I've used the Parse SDK before, but for native mobile dev - both Objective-C & Java.
However, I am by no means, a JavaScript developer.
I'm experimenting with Node and am using Parse to store some stuff for an API im making.
This is probably as much a JavaScript incompetency as it is a Parse issue.
App Context
I do a specific twitter scrape each day - a TweetDay - which is
basically just a Parse row, with an Array value, this is an Array of
TweetPairs.
A TweetPair is just a pairing of the tweet text and its associated id.
{ "id": "NCnjSDnjScn",
"text" : "Be cool Yolanda, be cool!" }
//Look for a Day in the database against the day we supply
query.equalTo("createdAt", req.params.date );
query.find({
success: function(results) {
console.log("Successfully retrieved " + results.length + " item");
// assume there is only ever one result
var object = results[0];
console.log(object.id);
var tweetsPointer = object.get("Pairs");
//## IT'LL BLOW UP HERE ##
// tweetsPointer isn't a Parse object, so it doesn't know about .fetch
tweetsPointer.fetch({
success: function(tweets) {
// The object was refreshed successfully.
console.log(tweets);
var arr = new Array();
for (var i = 0; i < tweets.length; i++) {
arr[i] = tweets[i]["text"];
}
// just return an array of the tweet messages for that day.
res.send(arr);
},
error: function(myObject, error) {
// The object was not refreshed successfully.
// error is a Parse.Error with an error code and description.
}
});
},
error: function(error) {
console.log("Error: " + error.code + " " + error.message);
res.send("Error: " + error.code + " " + error.message);
}
});
This is what I'm trying to do. Usually in the iOS SDK i could do a query, then call a fetch on the pointers that the query returned to get the actual values (because the query returned PFObject pointers). It's worth noting that i get the correct Parse object id back, and my Pairs array is just a collection of the correct pointers.
What I'm Asking
Once a Parse query returns me a result (lazily loaded, no values), how do i then fetch the values for that result.
ie: My query returns an Object, with an array of Parse IDs, as opposed the actual values.
How do i now populate my Object's array with the values.
What i currently receive
If in the above code, for the success case, i simply return the results object.
ie:
success: function(results) {
// assuming only ever one result
var object = results[0];
res.send(object);
...
}
I happily see in my browser
{
Pairs: [
{
__type: "Pointer",
className: "TweetPair",
objectId: "wzDNeKJO2n"
},
{
__type: "Pointer",
className: "TweetPair",
objectId: "cwXMSPTYEb"
},
{
__type: "Pointer",
className: "TweetPair",
objectId: "0FEPlokeIo"
},
..
..
],
objectId: "5TX1Do98jY",
createdAt: "2014-04-27T07:30:51.658Z",
updatedAt: "2014-04-27T07:30:51.658Z"
}
This is what i expect from my experience with Parse. What i want however is not just the pointers to the tweets, but the tweets themselves.
ie:
{
Pairs: [
{
id: "<twitter id>",
text: "Be cool yolanda, be cool!"
},
{
id: "sdjvbesjvhBJH",
text: "Zeds dead baby, Zeds dead."
},
..
..
],
objectId: "5TX1Do98jY",
createdAt: "2014-04-27T07:30:51.658Z",
updatedAt: "2014-04-27T07:30:51.658Z"
}
This is what the .fetch is usually used for. ie: i would call .fetch on this array, to replace the pointers with their actual values (the tweet messages).
I have been able to do this in the past with the iOS SDK, I'm really just asking about the JS SDK specifically, how do i go about calling fetch on a result. (in iOS the result would already be of type PFObject, so it was easy.)
The Parse Console (for completeness)
Cheers.
So i found this handy little method called .include();
Code as above
query.equalTo("createdAt", req.params.date );
query.include("Pairs"); // <-----------------------------win
query.find({
success: function(results) {
console.log("Successfully retrieved " + results.length + " item");
//Assume one result
var object = results[0];
var pairs = object.get("Pairs");
var arr = new Array();
for (var i = 0; i < pairs.length; i++) {
arr[i] = pairs[i].get("text");
}
res.send(arr);
...
}
Returns a list of tweets, not just the pointers to the tweets.
[
"I can feel the collingwood jealousy haha",
"#Real_Liam_Payne Now you know how i feel",
"Oh good, I feel much better, after wasting what was meant to be a study day. Classic Charle.",
"RT #NatalieTosh: Almost feel as nervous as if I was watching my own team. #ALeagueFinals #GoRoar",
"#BronB28 Feel the warmth of the ground All roads lead to us around Through endless sunsets and towns I can feel it sitting down here"
]
see: https://www.parse.com/questions/javascriptjquery-pointer

Serial Promises and Response.Success in Cloud Code

I'm a little confused about where to place a response.success() when using serial Promises.
Here's the situation: I've got a cloud function that accepts an array of email addresses and the current user. The function does the following:
Finds the current user based upon it's user object id.
Iterates over the array of emails addresses
Find if there is an existing user for each given email address
If there is an existing user, we check to see if the existing user and the current user are friends
If they are not friends, it creates a friendship.
Now when I run this function without a response.success(), it does exactly what I expect it to and the friendships entries are created. But no matter where I place the response in the code, I get the resulting response.success message and none of the serialized promises execute.
Why the resulting success/failure matters: I'm executing this function from an iOS app and I'd like to properly handle the success or failure cases correctly on the iOS side.
Here is the cloud function:
Parse.Cloud.define("friendExistingUsers", function(request, response) {
// Get our parameters
var addresses = request.params.emailAddresses;
var userId = request.params.user;
// Query for our user
var userQuery = new Parse.Query("User");
userQuery.equalTo("objectId", userId)
userQuery.first().then(function(currentUser) {
// if we find the user, walk the addresses
var promise = Parse.Promise.as("success");
_.each(addresses, function(address) {
console.log(address);
// add a then to our promise to handle whether a relationship is
// being created.
promise = promise.then(function() {
// find if there is a user for that address
var emailQuery = new Parse.Query("User");
emailQuery.equalTo("email", address);
emailQuery.first().then(function(addressUser) {
if (typeof addressUser != 'undefined') {
// found one.
console.log(addressUser);
// figure out if our current user and this user are
// friends.
var friendQuery = new Parse.Query("FVFriendship");
friendQuery.equalTo("from", currentUser);
friendQuery.equalTo("to", addressUser);
friendQuery.first().then(function(relationship) {
if (typeof relationship != 'undefined') {
// if they are, we need to pass.
console.log("Found a relationship: " = relationship)
} else {
// They are not. Add the friendship
var Friendship = Parse.Object.extend("FVFriendship");
var friendship = new Friendship();
friendship.set("from", currentUser);
friendship.set("to", addressUser);
friendship.save().then(function(result) {
console.log("Created a friendship: " + result)
});
};
});
} else {
// we did not find a user for that address
console.log("No user for " + address);
};
});
});
});
console.log(promise);
return promise;
}).then(function() {
response.success("success");
});
});
Thanks in Advance. Let me know if there's anything else I can add.
Your .then callback function attached to promise should return a promise. Missing this is a common mistake when using promises.
Also Parse doesn't seem to show objects with console.log as browsers do, so I wrap them into JSON.stringify().

How to use "q" module for refactoring mongoose code?

I'm using mongoose to insert some data into mongodb. The code looks like:
var mongoose = require('mongoose');
mongoose.connect('mongo://localhost/test');
var conn = mongoose.connection;
// insert users
conn.collection('users').insert([{/*user1*/},{/*user2*/}], function(err, docs) {
var user1 = docs[0], user2 = docs[1];
// insert channels
conn.collection('channels').insert([{userId:user1._id},{userId:user2._id}], function(err, docs) {
var channel1 = docs[0], channel2 = docs[1];
// insert articles
conn.collection('articles').insert([{userId:user1._id,channelId:channel1._id},{}], function(err, docs) {
var article1 = docs[0], article2 = docs[1];
}
});
};
You can see there are a lot of nested callbacks there, so I'm trying to use q to refactor it.
I hope the code will look like:
Q.fcall(step1)
.then(step2)
.then(step3)
.then(step4)
.then(function (value4) {
// Do something with value4
}, function (error) {
// Handle any error from step1 through step4
})
.end();
But I don't know how to do it.
You'll want to use Q.nfcall, documented in the README and the Wiki. All Mongoose methods are Node-style. I'll also use .spread instead of manually destructuring .then.
var mongoose = require('mongoose');
mongoose.connect('mongo://localhost/test');
var conn = mongoose.connection;
var users = conn.collection('users');
var channels = conn.collection('channels');
var articles = conn.collection('articles');
function getInsertedArticles() {
return Q.nfcall(users.insert.bind(users), [{/*user1*/},{/*user2*/}]).spread(function (user1, user2) {
return Q.nfcall(channels.insert.bind(channels), [{userId:user1._id},{userId:user2._id}]).spread(function (channel1, channel2) {
return Q.nfcall(articles.insert.bind(articles), [{userId:user1._id,channelId:channel1._id},{}]);
});
})
}
getInsertedArticles()
.spread(function (article1, article2) {
// you only get here if all three of the above steps succeeded
})
.fail(function (error) {
// you get here if any of the above three steps failed
}
);
In practice, you will rarely want to use .spread, since you usually are inserting an array that you don't know the size of. In that case the code can look more like this (here I also illustrate Q.nbind).
To compare with the original one is not quite fair, because your original has no error handling. A corrected Node-style version of the original would be like so:
var mongoose = require('mongoose');
mongoose.connect('mongo://localhost/test');
var conn = mongoose.connection;
function getInsertedArticles(cb) {
// insert users
conn.collection('users').insert([{/*user1*/},{/*user2*/}], function(err, docs) {
if (err) {
cb(err);
return;
}
var user1 = docs[0], user2 = docs[1];
// insert channels
conn.collection('channels').insert([{userId:user1._id},{userId:user2._id}], function(err, docs) {
if (err) {
cb(err);
return;
}
var channel1 = docs[0], channel2 = docs[1];
// insert articles
conn.collection('articles').insert([{userId:user1._id,channelId:channel1._id},{}], function(err, docs) {
if (err) {
cb(err);
return;
}
var article1 = docs[0], article2 = docs[1];
cb(null, [article1, article2]);
}
});
};
}
getInsertedArticles(function (err, articles) {
if (err) {
// you get here if any of the three steps failed.
// `articles` is `undefined`.
} else {
// you get here if all three succeeded.
// `err` is null.
}
});
With alternative deferred promise implementation, you may do it as following:
var mongoose = require('mongoose');
mongoose.connect('mongo://localhost/test');
var conn = mongoose.connection;
// Setup 'pinsert', promise version of 'insert' method
var promisify = require('deferred').promisify
mongoose.Collection.prototype.pinsert = promisify(mongoose.Collection.prototype.insert);
var user1, user2;
// insert users
conn.collection('users').pinsert([{/*user1*/},{/*user2*/}])
// insert channels
.then(function (users) {
user1 = users[0]; user2 = users[1];
return conn.collection('channels').pinsert([{userId:user1._id},{userId:user2._id}]);
})
// insert articles
.match(function (channel1, channel2) {
return conn.collection('articles').pinsert([{userId:user1._id,channelId:channel1._id},{}]);
})
.done(function (articles) {
// Do something with articles
}, function (err) {
// Handle any error that might have occurred on the way
});
Considering Model.save instead of Collection.insert (quite the same in our case).
You don't need to use Q, you can wrap yourself the save method and return directly a Mongoose Promise.
First create an utility method to wrap the save function, that's not very clean but something like:
//Utility function (put it in a better place)
var saveInPromise = function (model) {
var promise = new mongoose.Promise();
model.save(function (err, result) {
promise.resolve(err, result);
});
return promise;
}
Then you can use it instead of save to chain your promises
var User = mongoose.model('User');
var Channel = mongoose.model('Channel');
var Article = mongoose.model('Article');
//Step 1
var user = new User({data: 'value'});
saveInPromise(user).then(function () {
//Step 2
var channel = new Channel({user: user.id})
return saveInPromise(channel);
}).then(function (channel) {
//Step 3
var article = new Article({channel: channel.id})
return saveInPromise(article);
}, function (err) {
//A single place to handle your errors
});
I guess that's the kind of simplicity we are looking for.. right? Of course the utility function can be implemented with better integration with Mongoose.
Let me know what you think about that.
By the way there is an issue about that exact problem in the Mongoose Github:
Add 'promise' return value to model save operation
I hope it's gonna be solved soon. I think it takes some times because they are thinking of switching from mpromise to Q: See here and then here.
Two years later, this question just popped up in my RSS client ...
Things have moved on somewhat since May 2012 and we might choose to solve this one in a different way now. More specifically, the Javascript community has become "reduce-aware" since the decision to include Array.prototype.reduce (and other Array methods) in ECMAScript5. Array.prototype.reduce was always (and still is) available as a polyfill but was little appreciated by many of us at that time. Those who were running ahead of the curve may demur on this point, of course.
The problem posed in the question appears to be formulaic, with rules as follows :
The objects in the array passed as the first param to conn.collection(table).insert() build as follows (where N corresponds to the object's index in an array):
[ {}, ... ]
[ {userId:userN._id}, ... ]
[ {userId:userN._id, channelId:channelN._id}, ... ]
table names (in order) are : users, channels, articles.
the corresopnding object properties are : user, channel, article (ie the table names without the pluralizing 's').
A general pattern from this article by Taoofcode) for making asynchronous call in series is :
function workMyCollection(arr) {
return arr.reduce(function(promise, item) {
return promise.then(function(result) {
return doSomethingAsyncWithResult(item, result);
});
}, q());
}
With quite light adaptation, this pattern can be made to orchestrate the required sequencing :
function cascadeInsert(tables, n) {
/*
/* tables: array of unpluralisd table names
/* n: number of users to insert.
/* returns promise of completion|error
*/
var ids = []; // this outer array is available to the inner functions (to be read and written to).
for(var i=0; i<n; i++) { ids.push({}); } //initialize the ids array with n plain objects.
return tables.reduce(function (promise, t) {
return promise.then(function (docs) {
for(var i=0; i<ids.length; i++) {
if(!docs[i]) throw (new Error(t + ": returned documents list does not match the request"));//or simply `continue;` to be error tolerant (if acceptable server-side).
ids[i][t+'Id'] = docs[i]._id; //progressively add properties to the `ids` objects
}
return insert(ids, t + 's');
});
}, Q());
}
Lastly, here's the promise-returning worker function, insert() :
function insert(ids, t) {
/*
/* ids: array of plain objects with properties as defined by the rules
/* t: table name.
/* returns promise of docs
*/
var dfrd = Q.defer();
conn.collection(t).insert(ids, function(err, docs) {
(err) ? dfrd.reject(err) : dfrd.resolve(docs);
});
return dfrd.promise;
}
Thus, you can specify as parameters passed to cascadeInsert, the actual table/property names and the number of users to insert.
cascadeInsert( ['user', 'channel', 'article'], 2 ).then(function () {
// you get here if everything was successful
}).catch(function (err) {
// you get here if anything failed
});
This works nicely because the tables in the question all have regular plurals (user => users, channel => channels). If any of them was irregular (eg stimulus => stimuli, child => children), then we would need to rethink - (and probably implement a lookup hash). In any case, the adaptation would be fairly trivial.
Today we have mongoose-q as well. A plugin to mongoose that gives you stuff like execQ and saveQ which return Q promises.

Categories