Using Koa with bluebird and pg - javascript

I have a question about the best way to use Koa with Postgres. I also (really) like using Bluebird, so I've gone with this approach.
'use strict';
var db = require('./modules/db.js');
var koa = require('koa');
var app = koa();
app.use(function *(){
yield db.withConnection(function *(client){
let id = this.request.query.id;
let simple_data = yield client.queryAsync('select name from table1 where id = $1', [id]);
this.response.body = simple_data;
}.bind(this));
});
app.listen(3000);
This is the db.js file, basically it uses things mentioned in the Bluebird docs.
... bla bla, some imports
Promise.promisifyAll(pg);
function getSqlConnection() {
var close;
return pg.connectAsync(connectionString).spread(function(client, done) {
close = done;
return client;
}).disposer(function() {
if (close) close();
});
}
function* withConnection(func){
yield Promise.using(getSqlConnection(), function (client){
return Promise.coroutine(func)(client);
});
}
module.exports.withConnection = withConnection;
Do you have any suggestions on improving this. I really like it for now, I've tested it extensively (under load, making errors/exceptions, etc), and it seems to work correctly. I'm pretty new with these generators and other ES6 stuff, so it's possible that I'm missing something.
My question is basically why so little people use this approach (i find it hard to find examples on this online)?
I'm also fine with using other libraries besides pg and bluebird, but i like those due to the number of downloads they have,I prefer using popular stuff because i find it easier to find blog posts, help and documentation for those. Thanks!

Bluebird is a promise library, a very good one at that, but it should not be used as a guidance of how or what database library to use. All that Promise.promisifyAll(pg); stuff is actually quite poor next to all the promise solutions that exist out there - knex, massive.js, pg-promise, etc.
And if you want the best combination of pg + bluebird, then try pg-promise.
var promise = require('bluebird');
var options = {
promiseLib: promise // Use Bluebird as the promise library
};
var pgp = require("pg-promise")(options);
var db = pgp('postgres://username:password#host:port/database');
db.query('select name from table1 where id = $1', [1])
.then(function (data) {
})
.catch(function (error) {
});
The library supports ES6 generators also, so you can write the code exactly like in your example.
From Tasks example:
db.task(function * (t) {
let user = yield t.one("select * from users where id=$1", 123);
return yield t.any("select * from events where login=$1", user.name);
})
.then(function (events) {
// success;
})
.catch(function (error) {
// error;
});

You could also try pg-native.
From pg module docs:
"node-postgres contains a pure JavaScript protocol implementation
which is quite fast, but you can optionally use native bindings for a
20-30% increase in parsing speed. Both versions are adequate for
production workloads. To use the native bindings, first install
pg-native. Once pg-native is installed, simply replace require('pg')
with require('pg').native."
https://github.com/brianc/node-postgres

Related

Getting value of generator function, koa

I have a function that pulls out from database a random question from Questions collection.
Game_Questions.js - console.log below prints out correct value (string I need), so I thought that return will let yield give me back same value.
exports.random_Question = function *() {
yield Questions.findRandom().limit(1).exec(function(err,question){
console.log("rand q: " + question[0].text);
return question[0].text;
});
}
Game.js:
var Game_Questions = require('./backend/Game_Questions');
And here I want to access question[0].text value from random_Question function from code snippet above (Game_Questions.js). What I've tried so far:
var found_Question = Game_Questions.random_Question();
var found_Question = Game_Questions.random_Question().next().value;
Those two return [Object object] which after using JSON.stringify() shows that the object is:
{"value":{"emitter":{"domain":null,"_events":{}},"emitted":{},"ended":true},"done":false}
I also tried using co(function*()) but it also didn't let me take out the value. Please help how to access it?
The answer by #remus is a callback approach and Koa was designed explicitly to ditch callbacks. So while it's perfectly good code and would fit an Express application it is completely at odds with the design philosophy behind Koa.
From the looks of it you are using Mongoose which has supported promises for async operations since version 4.0 (which was released Apr 2015) which should allow a yield approach to be taken. Note I'm making an assumption you are working with Mongoose - I hope I'm not wrong!
Here is some nice documentation on how Mongoose would fit nicely with koa.
So first of all make sure you are using a version of Mongoose that supports using yield. If not you'll have to use the #remus approach or manually wrap each of your methods so they are yield compatible (i.e. wrapping with promises).
But if you are using a compatible version (4.0 and upwards) then your code would look something like the following:
exports.random_Question = function *() {
var result;
try {
result = yield Questions.findRandom().limit(1).exec();
} catch(e) {
console.log(e.stack);
throw e;
}
console.log("rand q: " + result[0].text);
return result[0].text;
}
Note that I'm assuming the result is an array based on the code you supplied.
The above example doesn't necessarily have to be a generator function. It could also be a normal function that returns a Promise. So alternatively something like this could also be done:
exports.random_Question = function() {
return Questions.findRandom()
.limit(1)
.exec()
.then(function() {
// I'm assuming mongoose assigns the value
// being resolved in exec() to 'this'
var question = this[0];
console.log("rand q: " + question.text);
return question.text;
}).catch(function(e) {
console.log(e.stack);
throw e;
});
}
So for the randomQuestion function all that is important is that it can be yielded by co which handles the Koa application flow control – check tj/co on GitHub for the different objects you can yield.
So finally getting back to the Koa Middleware we can yield either of the above code snippets in the exact same manner. So we'd do:
var koa = require("koa");
var app = module.exports = koa();
var Game_Questions = require('./backend/Game_Questions');
app.use(function*() {
var resultText;
try {
resultText = yield Game_Questions.random_Question();
} catch(e) {
this.throw(500);
}
this.body = resultText;
this.status = 200;
});
app.listen(3000);
Something else to note is that I'm a little unsure of the findRandom method in the mongoose query since I don't know if it plays nicely with the Promise features of mongoose. Personally I'd get a normal mongoose query working using yield before reintroducing findRandom just to make sure it's not causing an issue.
My answer is getting a bit long at this point so I'll leave it at that.
Your syntax is pretty strange, but not sure if that's specific to Koa or not?
Because Node.js is event based, use a callback instead:
exports.random_Question = function(callback) {
Questions.findRandom().limit(1).exec(function(err, question){
callback(err, question);
});
}
And use it:
var Game_Questions = require('./backend/Game_Questions');
Game_Questions.random_Question(function(err, question) {
console.log(question);
});
Of some concern as well is your question states you're trying to reference Game_Questions.randomQuestion() when your function is actually named random_Question.

Writing insert statements with knex.js on Node API confusion

I've a problem I can't really seem to wrap my head around. It's very specific to the Knex.JS implementation and I'm sure has nothing to do with PostgreSQL.
The following implementation works. When inserting a moderate amount (~500 statements). On larger amounts this fails due to other reasons. Regardless, the following will not work for my use case, I need something like the next section.
import knex = require("knex");
(function (items) {
let db = knex.table("items");
db.truncate();
let foo = [];
items.forEach(function(item) {
foo.push({
id : item.id,
item_data : JSON.stringify(item)
});
});
db.insert(foo).then(function () { /*..*/ });
}(items))
But the following doesn't:
import knex = require("knex");
(function (items) {
let db = knex.table("items");
db.truncate();
let foo = [];
items.forEach(function(item) {
db.then(function() {
return db.insert(foo).into("items");
});
});
db.then(function () { console.log("Done"); });
}(items))
What doesn't work is this:
An inconsistent amount of rows are inserted. In some implementations it's a lot MORE than I have items (?!)
I get a lot of duplicate key errors in this implementation, since I have a unique constraint
Additional information:
The set contains no duplicate keys
I'm using PostgreSQL as backend
The question is mostly how to implement the desired behaviour. The ideal situation deals in chunks of say 500 "items". I've already posted a question with the project (https://github.com/tgriesser/knex/issues/826) but I'm hoping some people of the Knex.JS community are more active here on SO.
Your solution is correct (promise chaining), however since you're using Knex it ships with Bluebird which already provides a utility method for this:
var Promise = require("bluebird"); // also used internally by Knex so free to require
Promise.each(items, db.insert.bind(db));
Would do the same thing as:
items.forEach(function(item) {
chain = chain.then(function () {
return db.insert(item);
});
});
I've found the solution. I'm not entirely convinced the problem is the fault of Knex.js or if it's my own lack of experiences with Promises in general.
I found inspiration in the work done by Tim Griesser here: https://github.com/tgriesser/knex/blob/batch-insert/lib/util/batch-insert.js
Basically what he did is to add chunks to a promise chain. Perhaps this can be done directly on the Knex library, but for readability I've kept it separate.
import knex = require("knex");
(function (items) {
let db = knex.table("items");
// This is the basic operation to add a promise to the chain.
chain = chain.then(function() { return db.truncate(); });
let foo = [];
items.forEach(function(item) {
// Add db.insert() promises to our promise chain
// This can easily be changed to include chunks and/or streams
chain = chain.then(function () {
return db.insert(item);
});
});
// Start resolving the promises once our db.then() is invoked.
return db.then(function(){
return chain.then();
});
}(items));

A bit confused with q and promises in nodejs

I currently have a few js files in nodejs which are loaded as module and augment the app object (using express).
So their signatures look like:
module.exports = function(app, callback) {
// ...
callback();
}
So currently as I have about 5 of them my code would look like:
require("./setup/a")(app, function() {
require("./setup/b")(app, function(){
require("./setup/c")(app, function(){
require("./setup/d")(app, function(){
require("./setup/e")(app, function(){
startApp();
})
})
})
})
});
Now that looks unsightly as its the "pyramid of doom", however I am not entirely sure how I need to change this pattern to use Q, as I was assuming I would use Q.fcall(...a).then(...b).etc.done(). However I am unsure how I pass the app into it and if I need to return the callback for it to process as a promise.
Ideally I do not want to start whacking Q all through my code I only want it in the places where I want to remove the pyramid use cases, so in the above example how do I use Q with promises to pass the app into each required module and then start the app at the end?
Assuming your modules don't already use promises you can do something like this:
module.exports = function(app) {
// do some stuff with app
return new Promise(function(resolve,reject){
// when ready to resolve after some actions on app
resolve(); // you can also return a value here as a cb param
});
};
Promise.all(["./setup/a","./setup/b","./setup/c"].map(require.bind(null,app)))
.then(startApp);
You should however use promises at the lowest level possible, which would mean that you can simply return the promise which you used in the process:
module.exports = function(app){
return something(app).then(function(){
return somethingElseAsyncWithApp(app);
});
};
So the promise constructor isn't required. Note that this answer uses native promises but will also work on libraries that use that syntax like Bluebird. For Q change new Promise to new Q.Promise and Promise.all to Q.all.
Alternatively, you can change every require(x) to Q.fcall(require,x) and use Q.all on that directly, but that is both slow (Although Q is slow anyway) and more error prone than promisifying the modules directly. It is best to promisify the lowest level API possible.
Promises are not the silver bullet to callback pyramid of doom. I have seen code where even with promises it looked like a pyramid of doom.
You could get rid of the pyramid and stay with the callback style by doing something like this :
// var app;
// ...etc
var paths = ['a','b','c','d'];
setupNext();
function setupNext() {
var p = paths.pop(); // or shift
var next = paths.length > 0 ? setupNext : startApp
require(p)(app, next);
}
function startApp() {}

How can I promisify the MongoDB native Javascript driver using bluebird?

I'd like to use the MongoDB native JS driver with bluebird promises. How can I use Promise.promisifyAll() on this library?
The 2.0 branch documentation contains a better promisification guide https://github.com/petkaantonov/bluebird/blob/master/API.md#promisification
It actually has mongodb example which is much simpler:
var Promise = require("bluebird");
var MongoDB = require("mongodb");
Promise.promisifyAll(MongoDB);
When using Promise.promisifyAll(), it helps to identify a target prototype if your target object must be instantiated. In case of the MongoDB JS driver, the standard pattern is:
Get a Db object, using either MongoClient static method or the Db constructor
Call Db#collection() to get a Collection object.
So, borrowing from https://stackoverflow.com/a/21733446/741970, you can:
var Promise = require('bluebird');
var mongodb = require('mongodb');
var MongoClient = mongodb.MongoClient;
var Collection = mongodb.Collection;
Promise.promisifyAll(Collection.prototype);
Promise.promisifyAll(MongoClient);
Now you can:
var client = MongoClient.connectAsync('mongodb://localhost:27017/test')
.then(function(db) {
return db.collection("myCollection").findOneAsync({ id: 'someId' })
})
.then(function(item) {
// Use `item`
})
.catch(function(err) {
// An error occurred
});
This gets you pretty far, except it'll also help to make sure the Cursor objects returned by Collection#find() are also promisified. In the MongoDB JS driver, the cursor returned by Collection#find() is not built from a prototype. So, you can wrap the method and promisify the cursor each time. This isn't necessary if you don't use cursors, or don't want to incur the overhead. Here's one approach:
Collection.prototype._find = Collection.prototype.find;
Collection.prototype.find = function() {
var cursor = this._find.apply(this, arguments);
cursor.toArrayAsync = Promise.promisify(cursor.toArray, cursor);
cursor.countAsync = Promise.promisify(cursor.count, cursor);
return cursor;
}
I know this has been answered several times, but I wanted to add in a little more information regarding this topic. Per Bluebird's own documentation, you should use the 'using' for cleaning up connections and prevent memory leaks.
Resource Management in Bluebird
I looked all over the place for how to do this correctly and information was scarce so I thought I'd share what I found after much trial and error. The data I used below (restaurants) came from the MongoDB sample data. You can get that here: MongoDB Import Data
// Using dotenv for environment / connection information
require('dotenv').load();
var Promise = require('bluebird'),
mongodb = Promise.promisifyAll(require('mongodb'))
using = Promise.using;
function getConnectionAsync(){
// process.env.MongoDbUrl stored in my .env file using the require above
return mongodb.MongoClient.connectAsync(process.env.MongoDbUrl)
// .disposer is what handles cleaning up the connection
.disposer(function(connection){
connection.close();
});
}
// The two methods below retrieve the same data and output the same data
// but the difference is the first one does as much as it can asynchronously
// while the 2nd one uses the blocking versions of each
// NOTE: using limitAsync seems to go away to never-never land and never come back!
// Everything is done asynchronously here with promises
using(
getConnectionAsync(),
function(connection) {
// Because we used promisifyAll(), most (if not all) of the
// methods in what was promisified now have an Async sibling
// collection : collectionAsync
// find : findAsync
// etc.
return connection.collectionAsync('restaurants')
.then(function(collection){
return collection.findAsync()
})
.then(function(data){
return data.limit(10).toArrayAsync();
});
}
// Before this ".then" is called, the using statement will now call the
// .dispose() that was set up in the getConnectionAsync method
).then(
function(data){
console.log("end data", data);
}
);
// Here, only the connection is asynchronous - the rest are blocking processes
using(
getConnectionAsync(),
function(connection) {
// Here because I'm not using any of the Async functions, these should
// all be blocking requests unlike the promisified versions above
return connection.collection('restaurants').find().limit(10).toArray();
}
).then(
function(data){
console.log("end data", data);
}
);
I hope this helps someone else out who wanted to do things by the Bluebird book.
Version 1.4.9 of mongodb should now be easily promisifiable as such:
Promise.promisifyAll(mongo.Cursor.prototype);
See https://github.com/mongodb/node-mongodb-native/pull/1201 for more details.
We have been using the following driver in production for a while now. Its essentially a promise wrapper over the native node.js driver. It also adds some additional helper functions.
poseidon-mongo - https://github.com/playlyfe/poseidon-mongo

Using Callbacks With nodejs in KOA

Recently I work on a new project and this project use JavaScript callbacks in nodejs. Now we use KOA but the problem happens when we try to use ES6 Generators and callbacks.
//Calback function
function load(callback){
result = null;
//Do something with xmla4js and ajax
callback(result);
return result;
}
Now in KOA I need to call load and response json to client so i use this code below :
router= require('koa-router');
app = koa();
app.use(router(app));
app.get('load',loadjson);
function *loadJson(){
var that = this;
load(function(result){
that.body = result;
});
}
but i get this error :
_http_outgoing.js:331
throw new Error('Can\'t set headers after they are sent.');
^
Error: Can't set headers after they are sent.
at ServerResponse.OutgoingMessage.setHeader (_http_outgoing.js:331:11)
at Object.module.exports.set (G:\NAP\node_modules\koa\lib\response.js:396:16)
at Object.length (G:\NAP\node_modules\koa\lib\response.js:178:10)
at Object.body (G:\NAP\node_modules\koa\lib\response.js:149:19)
at Object.body (G:\NAP\node_modules\koa\node_modules\delegates\index.js:91:31)
at G:\NAP\Server\OlapServer\index.js:40:19
at G:\NAP\Server\OlapServer\OLAPSchemaProvider.js:1599:9
at _LoadCubes.xmlaRequest.success (G:\NAP\Server\OlapServer\OLAPSchemaProvider.js:1107:13)
at Object.Xmla._requestSuccess (G:\NAP\node_modules\xmla4js\src\Xmla.js:2113:50)
at Object.ajaxOptions.complete (G:\NAP\node_modules\xmla4js\src\Xmla.js:2024:34)
Just to clarify things, let's write your callback as
//Calback function
function load(callback){
setTimeout(function() {
var result = JSON.stringify({ 'my': 'json'});
callback(/* error: */ null, result);
}, 500);
}
in Koa world, this is called a thunk, meaning that it is an asynchronous function that takes only one argument: a callback with the prototype (err, res). you can check https://github.com/visionmedia/node-thunkify for a better explanation.
now you have to write your middleware with
function *loadJson(){
this.type = 'application/json';
this.body = yield load;
}
this is mainly because KOA is generator based, if your on the top of the middleware it does not support callbacks. so its not waiting for the function to finish. best solution would be to convert your function into a promise. promise works great with KOA.
I had a very similar problem using braintree (regular callbacks) and koa. Based on your code, the only change I needed to do was with the load function and how it was called.
router = require('koa-router');
app = koa();
app.use(router(app));
app.get('/load',loadjson);
function *loadJson(){
this.body = yield load;
}
// Callback function
function load(callback) {
// Prepare some data with xmla4js and ajax
whatever_inputs = {...};
final_method(whatever_inputs, callback);
}
The explanation by Jerome and Evan above is absolutely correct, and thunkify looks like a suitable process for automatically doing it.
While thunks were a nice idea, in my view a Promise is a better long-term approach. Many libraries are already moving to promises for async instead of the old node standard callback(err, data), and they're dead-simple to wrap around any async code to make a promise. Other devs will have experiences with Promises and naturally understand your code, while most would have to look up what a "thunk" is.
e.g. here I am wrapping the not-yet-promise-based jsdom up in a promise, so I can yield it in my koa generator.
const jsdom = require('node-jsdom');
const koa = require('koa');
const app = koa();
​
app.use(function *() {
this.body = yield new Promise((resolve, reject) => jsdom.env({
url: `http://example.org${this.url}`,
done(errors, { document }) {
if (errors) reject(errors.message);
resolve(`<html>${document.body.outerHTML}</html>`);
},
}));
});
​
app.listen(2112);
Semantically, promises and generators go hand-in-hand to really clarify async code. A generator can be re-entered many times and yield several values, while a promise means "I promise I'll have some data for you later". Combined, you get one of the most useful things about Koa: the ability to yield both promises and synchronous values.
edit: here's your original example wrapped with a Promise to return:
const router = require('koa-router');
const { load } = require('some-other-lib');
const app = koa();
app.use(router(app));
app.get('load', loadjson);
function* loadJson() {
this.body = yield new Promise(resolve => {
load(result => resolve(result));
});
}
To bypass Koa's built-in response handling, you may explicitly set this.respond = false;. Use this if you want to write to the raw res object instead of letting Koa handle the response for you.
Header is already written by built-in response handling before your callback is invoked.

Categories