I have just started using Bluebird's Promise.coroutine which is a promise version of Generator functions from ES6.
Everything works fine when I create a function and put it in a variable. Like:
let success = Promise.coroutine(function* (_context) {
...
});
exports.success = Promise.coroutine(function* (_context) {
...
});
But when I try to create an standalone function. Like:
Promise.coroutine(function *success() {
...
});
It never defines a function and I get the error:
success is not defined
How do I access an standalone generator function? or more straight forward, how to create it?
Edit:
I am using validatejs, it requires success and error functions for async validations:
exports.create = function (req, res) {
var constraints = {
...
}
validate.async(req, constraints).then(Promise.coroutine(success), Promise.coroutine(error));
function success() { //generator
}
function error(e) { //generator
}
}
You can define a generator function as shown below.
function* functionName([param[, param[, ... param]]]) {
statements..
}
Please note that symbol * is with word function and not the functionname. The declaration function keyword followed by an asterisk defines a generator function.
Update1: Usage with the Promise.coroutine method. In javascript, function are first class citizen and hence can be passed as an parameter. So, you can replace the function expression with the functionname.
Promise.coroutine(functionName);
Your success() function doesn't have to be named because you are actualyy not calling that, but calling the coroutine Promise. See the example below. You should assign your coroutine to whatever you are trying to call it from, and then yield a Promise for your delayed processing (whatever that may be). Then you need to call the coroutine that will take care of returning the promise.
var Promise = require("bluebird");
function Test() {
}
Test.prototype.foo = Promise.coroutine(function* success() {
console.log("Called success")
var i = 0;
while (i < 3) {
console.log("Waiting and Yield " + i++);
yield Promise.delay(1000);
}
console.log("Test " + i);
});
var a = new Test();
a.foo();
console.log("Done!");
Then you will get this output :
>node index.js
Called success
Waiting and Yield 0
Done!
Waiting and Yield 1
Waiting and Yield 2
Test 3
Related
I have a function connectImpl referenced in multiple places. I am trying to invoke this promise and return its value out to the calling function synchronously through intermediating via a generator. If I call .next() on the generator it is returned in a pending state
{ value: { state: 'pending' }, done: false }
I would like to wait on the value of this generator until it is no longer pending. I have tried multiple versions of waitOn to accomplish this, but I cannot seem to make it work properly.
I am open to implementation suggestions. This is driving me a bit batty. Surprisingly the final created log of the promise fires later in the execution chain - after the generator is done. I am obviously missing something:
let models = null ;
let connectImpl = function() {
console.log('connectImpl')
let orm = setupImpl()
let config = getConfigImpl()
let qInitialize = q.nbind(orm.initialize, orm)
if(models) {
console.log('connectImpl:cached')
return q(models)
} else {
console.log('connectImpl:create')
return qInitialize(config).then(function(m){
console.log('connectImpl:created')
models = m
return models
})
}
}
let waitOn = function(generator){
console.log('waitOn')
let done = false ;
let generatorValue = null
while(!done){
var generatorResult = generator.next()
console.log(generatorResult)
done = generatorResult.done
generatorValue = generatorResult.value
}
return generatorValue
}
let domainImpl = function() {
console.log('domainImpl')
let getConnection = function *() {
console.log('domainImpl:getConnection')
yield connectImpl()
}
var generator = getConnection()
return waitOn(generator)
}
console.log('START')
console.log(domainImpl())
console.log('END')
I am able to invoke and get the
START
domainImpl
waitOn
domainImpl:getConnection
connectImpl
connectImpl:create
{ value: { state: 'pending' }, done: false }
{ value: undefined, done: true }
undefined
END
connectImpl:created
I am able to add execute the connectImpl promise to work with the middleware via this function - but I can't seem to adapt this to my above use case:
let domainMiddlewareImpl = function () {
return function *(next) {
let models = yield connectImpl()
this.request.models = models.collections;
this.request.connections = models.connections;
yield next
};
};
This looks fun. Let's see how we can yield promises. Our end goal is to write something like:
waitOn(function*(){
console.log("hello");
yield Q.delay(2000); // a placeholder, your calls in your example
console.log("World"); // this should run two seconds late.
});
Your issue here is that you're yielding them without waiting for them in advance. First of all, you can skip to the end for a 'ready' solution (don't!) and here is a fiddle of what we're making. Let's go through implementing waitOn with generators:
Let's start:
function waitOn(gen){
}
So, our function takes a generator, the first thing we'll have to do is invoke it since we need to execute the generator to get its results:
function waitOn(gen){
let sequence = gen(); // call the generator
}
Next, we'll want to wrap everything in a Promise since our waitOn will yield promises and return a promise for being done itself:
function waitOn(gen){
let sequence = gen(); // call the generator
return Promise.resolve(); // this is Q.resolve with Q
}
Now, what cases do we have:
The generator is done and returned a value - that is a return
The generator yielded a regular value and we do not have to wait for it
The generator yielded a promise and we have to wait for it. We also have to deal with exceptions (what if we yield a promise that rejects?)
So our basic structure is something like:
function waitOn(gen){
let sequence = gen(); // call the generator
return Promise.resolve().then(function cont(value){
let {value, done} = en.next(value); // get the next item
// depending on the case do what's appropriate
});
}
Note the destructuring assignment - I assume that's ok since your code has ES6 statements in it too. Note since this is the first call, value is undefined but generally we'll want to pass the value from our last call on. Now to handle the cases:
function waitOn(gen){
let sequence = gen(); // call the generator
return Promise.resolve().then(function cont(value){
let {done, value} = en.next(value); // get the next item
if(done) return value; // return case
if(!value || !value.then) return cont(value); // value case, recurse
return value.catch(e => gen.throw(e)).then(cont); // promise case
});
}
Note the .catch clause - we're throwing our code from the promise back to the generator for it to handle so we can try/catch the promises.
That's it! In 9 lines of JavaScript we've implemented generators for promises. Now to your code, you can yield any promise:
let conn = q.nBind(orm.initialize, orm);
waitOn(function*(){
console.log("Starting")
let handle = yield conn(config);
console.log("Handle created!", handle); // connected here
});
Happy coding and enjoy the power of coroutines. After we've had our fun - it's worth mentioning that Q already ships with Q.async and other newer promise libraries like Bluebird ship with their own (Bluebird has Promise.coroutine). If you're using a promise library - you can utilise those. This implementation works with native promises too.
I'm trying to work through this js/async scenario and i'm trying to know how the rest of the js world handles this.
function doStuff(callback) {
cursor.each(function(err, blahblah) {
...doing stuff here takes some time
});
... Execute this code ONLY after the `cursor.each` loop is finished
callback();
EDIT
Here's a more concrete example updated using most of the suggestions below which still doesn't work.
function doStuff(callback) {
MongoClient.connect(constants.mongoUrl, function(err, db) {
var collection = db.collection('cases2');
var cursor = collection.find();
var promises = []; // array for storing promises
cursor.each(function(err, item) {
console.log('inside each'); // NEVER GETS LOGGED UNLESS I COMMENT OUT THIS LINE: return Q.all(promises).then(callback(null, items));
var def = Q.defer(); // Create deferred object and store
promises.push(def.promise); // Its promise in the array
if(item == null) {
return def.resolve();
}
def.resolve(); // resolve the promise
});
console.log('items'); // ALWAYS GETS CALLED
console.log(items);
// IF I COMMENT THIS LINE OUT COMPLETELY,
// THE LOG STATEMENT INSIDE CURSOR.EACH ACTUALLY GETS LOGGED
return Q.all(promises).then(callback(null, items));
});
}
without using promises or any other dependencies/libraries you can simply
function doStuff(callback) {
add a counter
var cursor = new Array(); // init with some array data
var cursorTasks = cursor.length;
function cursorTaskComplete()
{
cursorTasks--;
if ( cursorTasks <= 0 ) {
// this gets get called after each task reported to be complete
callback();
}
}
for ( var i = 0; i < cursor.length; i++ ) {
...doing stuff here takes some time and does some async stuff
check after each async request
...when async operation is complete call
cursorTaskComplete()
}
}
Without knowing the details of the async calls you're making within the cursor.each loop, I shall assume that you have the ability to invoke a callback each time the functions invoked therein have completed their async task:
function doStuff() {
var promises = []; // array for storing promises
cursor.each(function(err, blahblah) {
var def = Q.defer(); // create deferred object and store
promises.push(def.promise); // its promise in the array
call_async_function(..., def.resolve); // resolve the promise in the async function's callback
});
// pass the array to Q.all, only when all are resolved will "callback" be called
return Q.all(promises);
}
and the usage then becomes:
doStuff().then(callback)
Note how the invocation of the callback now never touches the doStuff function - that function now also returns a promise. You can now register multiple callbacks, failure callbacks, etc, all without modifying doStuff. This is called "separation of concerns".
[NB: all the above based on the Q promises library - https://github.com/kriskowal/q]
EDIT further discussion and experimentation has determined that the .each call is itself async, and gives no indication to the outside when the last row has been seen. I've created a Gist that demonstrates a resolution to this problem.
if you want to do it with the async module, you can make use of the async forEachSeries function
Code snippet:
function doStuff(callback) {
async.forEachSeries(cursor, function(cursorSingleObj,callbackFromForEach){
//...do stuff which takes time
//this callback is to tell when everything gets over execute the next function
callbackFromForEach();
},function(){
//over here the execution of forEach gets over and then the main callback is called
callback();
});
}
In my mind an elegant/ideal solution would be to have something like
cursor.each(........).then( function() { ....your stuff});
But without that you can do this....UPDATED
http://plnkr.co/edit/27l7t5VLszBIW9eFW4Ip?p=preview
The gist of this is as shown below...notice....when
var doStuff = function(callback) {
cursor.forEach(function(cursorStep) {
var deferred = $q.defer();
var promise = deferred.promise;
allMyAsyncPromises.push(promise);
cursorStep.execFn(cursorStep.stepMeta);
promise.resolve;
});
$q.when(allMyAsyncPromises).then(callback);
}
After hitting the start button wait for few seconds...the async tasks have been simulated to finish in 5 seconds so the status will update accordingly.
Not having access to a real cursor object..I had to resort of fake cursor like and array.
Suppose you have the following JS function:
function YourProxy($orm, $usr) {
this.addToDB = function(obj) {
/* Do some validation on obj */
return function(callback){
var oo = $orm.createNew(obj);
oo.save(options, function(err,ok){
if(err) callback(err);
callback(null,ok);
}
}
}
}
which you can use on node.js with ES6 generators to wait for that operation to happen with something like:
function *(){
var yourProxy = new YourProxy();
try {
var result = yield yourProxy.addToDB(anObject);
} catch(e) {
/* Something went wrong sync. Here you have err from save's callback */
}
/* result contains ok, the one from save's callback */
}
To test that I've done something like this, using mocha and sinon (and mocha-sinon):
describe('addToDB', function(){
it('adds the object to the db', function(){
var callback = sinon.spy();
myProxy.addToDB(anObject)(callback);
expect( callback ).to.be.calledOnce;
});
});
but all I got is that the callback is never called because addToDB() exits before the save's callback gets called.
How would you test that?
Try using co-mocha and yield the generator as you did it in your example.
describe('addToDB', function(){
it('adds the object to the db', function* (){
var callback = sinon.spy();
yield myProxy.addToDB(anObject)(callback);
expect( callback ).to.be.calledOnce;
});
});
I'm trying to figure out how to get the value of a promise via yield, possibly with "co":
function *(){
var someVar = yield functionThatReturnsAPromise();
}
The called function is not a generator, just a normal function. With the above, someVar == Promise, but I want the resolved value. Does co or some other library have a way of doing this?
Typically a yield acts returns the same value to its own paused execution (left hand side of the yield function) as to the calling function of the generator. In this simple example counting from 1 to 5 example the input of the yield is the output of the yield to the generator function as well as to the generator's execution path:
function* inc() {
var g = 0;
while (true) {
g = yield g + 1;
}
}
var incGen = inc();
for (i = incGen.next().value; i <= 5; i = incGen.next(i).value) {
console.log(i); // ^ input of generator is last output
}
However, the calling function may also call the generator, but replace the output the last yield with another value or even throw an exception to the generator's execution. In the case of promise a function that returns a promise, may yield the result of that promise instead of the promise itself. So in this case:
var someVar = yield functionThatReturnsAPromise();
^ output != ^ input
you want the yield to act as a function that takes a promise as an input and returns a resolved promise as an output to the generator function.
It so happens co is able to do exactly this for you. All you need to do is feed your generator function to the co function:
co(function *(){
var someVar = yield functionThatReturnsAPromise();
})
To better understand how this works, here is an example of a function, that does the same thing as co:
function async(makeGenerator){
return function (){
var generator = makeGenerator.apply(this, arguments)
function handle(result){
if (result.done) return result.value
return result.value.then(function (res){
return handle(generator.next(res)) // <- sets output of yield to the promise result
}, function (err){ // and returns input promise
return handle(generator.throw(err)) // <- throw promise exception to generator function
})
}
return handle(generator.next()) // <- first time call, input of yield is a promise
}
}
source is from Forbes Lindesay's now famous presentation about this concept
Yes, co (https://github.com/tj/co) can do that. You'll have to wrap parent function inside co call:
co(function *(){
var someVar = yield functionThatReturnsAPromise();
})()
someVar inside will become resolved value. If promise gets rejected, error can be cought with basic try {} catch (e) {} statements.
I have a handler (callback), an object to handle and four functions, which collect the data to object. In my case I wish to asynchronously call four data retrievers and when execution of all four is complete, handle the resulting object (something similar to the following):
var data = {};
function handle (jsObj) {}
// data retrieving
function getColorData () {}
function getSizeData () {}
function getWeightData () {}
function getExtraData () {}
data.color = getColorData();
data.size = getSizeData();
data.weight = getWeightData();
data.extra = getExtraData();
handle( data );
Of course, this code will not work properly. And if I chain data retrieving functions, they will be called one after another, right?
All four functions should be called asynchronously, cause they are being executed for too long to call them one by one.
Updated:
Thanks to everybody for your suggestions! I prefered $.Deferred(), but I found it slightly difficult to make it work the way I need. What I need is to asynchronously make a view, which requires four kinds of data (extraData, colorData, sizeData & weightData) and I have three objects: App, Utils & Tools.
Just a small description: view is created by calling App.getStuff passed App.handleStuff as a callback. Callback in the body of App.getStuff is called only $.when(App.getExtraData(), App.getColorData(), App.getSizeData(), App.getWeightData()). Before that Utils.asyncRequest passed Tools.parseResponse as a callback is called.
So, now the question is should I create four deferred objects inside each App.get*Data() and also return deferred.promise() from each of them?
And should I deferred.resolve() in the last function in my order (Tools.parseResponse for App.getExtraData in my example)?
var view,
App,
Utils = {},
Tools = {};
// Utils
Utils.asyncRequest = function (path, callback) {
var data,
parseResponse = callback;
// do something with 'data'
parseResponse( data );
};
// Tools
Tools.parseResponse = function (data) {
var output = {};
// do something to make 'output' from 'data'
/* So, should the deferred.resolve() be done here? */
deferred.resolve(output);
/// OR deferred.resolve();
/// OR return output;
};
// App
App = {
// Only one method really works in my example
getExtraData : function () {
var deferred = new jQuery.Deferred();
Utils.asyncRequest("/dir/data.txt", Tools.parseResponse);
return deferred.promise();
},
// Others do nothing
getColorData : function () { /* ... */ },
getSizeData : function () { /* ... */ },
getWeightData : function () { /* ... */ }
};
App.getStuff = function (callback) {
$.when(
App.getExtraData(),
App.getColorData(),
App.getSizeData(),
App.getWeightData()
)
.then(function (extraData, colorData, sizeData, weightData) {
var context,
handleStuff = callback;
// do something to make all kinds of data become a single object
handleStuff( context );
});
};
App.handleStuff = function (stuff) { /* ... */ };
/// RUN
view = App.getStuff( App.handleStuff );
I did not expect the code in my example above to work, it is for illustrative purposes.
I've been trying to solve this for quiet a long time and it still gives no result. The documentation for jQuery.Deferred() and discussions around this, unfortunately, did not help me. So, I would be very glad and greatful for any help or advise.
Conceptually, you would use a counter that gets incremented as each asynchronous call completes. The main caller should proceed after the counter has been incremented by all the asynchronous calls.
I think what you're looking for are Promises / Deferreds.
With promises you can write something like:
when(getColorData(), getSizeData(), getWeightData(), getExtraData()).then(
function (colorData, sizeData, weightData, extraData) {
handle(/*..*/);
}
)
The get*Data() functions will return a promise that they fulfill when their assynchronous call is complete.
Ex:
function getData() {
var promise = new Promise();
doAjax("getData", { "foo": "bar" }, function (result) {
promise.resolve(result);
});
return promise;
}
The when simply counts the number arguments, if all it's promises are resolved, it will call then with the results from the promises.
jQuery has an OK implementation: http://api.jquery.com/jQuery.when/
What I could suggest for this scenario would be something like that.
write a function like this
var completed = 0;
checkHandler = function() {
if(completed == 4) {
handle(data);
}
}
where completed is the number of positive callbacks you must receive.
As soon as every function receives a callback you can increment the "completed" counter and invoke the checkHandler function. and you're done!
in example
function getColorData() {
$.get('ajax/test.html', function(data) {
completed++;
checkHandler();
});
}