I am attempting to util.promisify the bonjour npm package. This is the original use case described in the docs:
var bonjour = require('bonjour')()
// advertise an HTTP server on port 3000
bonjour.publish({ name: 'My Web Server', type: 'http', port: 3000 })
// browse for all http services
bonjour.find({ type: 'http' }, function (service) {
console.log('Found an HTTP server:', service)
})
And this will not exit - bonjour.find() stays open looking for http servers.
I want to promisify this and successfully resolve the promise after scanning for servers. Something like:
var bonjour = require('bonjour')
const util = require('util');
// advertise an HTTP server on port 3000
bonjour().publish({ name: 'My Web Server', type: 'http', port: 3000 })
// promisify the 'find' function
const find = util.promisify(bonjour().find.bind(bonjour()));
(async () => {
try {
const content = await find({ type: 'http' });
console.log(content)
} catch (err) {
// It will always error, although, the value of 'err' is the correct content.
console.error('There was an error');
}
})();
As noted in the comments, it will always throw an error, even though the value of err is the desired output.
The promisify'd process expects an exit code 0, which I suspect isn't happening since the process doesn't return. Is this the right assumption? Does anyone have any other insights or solutions to get my promise to not throw an error but return the value that is currently being thrown?
Thanks!
That's because the bonjour callback does not comply with the Node.js callback signature (err, data) => {}. Indeed, bonjour.find's callback has the success value as first parameter. You could wrap it in a proxy function like this:
function findWrapper(options, func) {
function callback(data, err) {
func(err, data);
}
return bonjour().find(options, callback);
}
const find = util.promisify(findWrapper);
Related
In nodejs Rest api call, puppeteer page evaluate not returning the response and throws the error.
How to return object after executing all the steps, without async
app.get("/api/register", function (req, res) {
res = page.evaluate((res) => {
webex.meetings
.register()
.then(() => {
console.log("Authentication#register() :: successfully registered");
return res.status(200).json({ message: "Successfully Registered" });
})
.catch((error) => {
console.error( "Authentication#register() :: error registering", error);
return res.status(400).json({ message: "Successfully Registered" });
})
}, res);
});
error
:\Users\sansubbu\git\webRTC\node_modules\puppeteer-core\lib\cjs\puppeteer\common\Connection.js:115
const stringifiedMessage = JSON.stringify(Object.assign({}, message, { id }));
^
TypeError: Converting circular structure to JSON
--> starting at object with constructor 'Socket'
| property 'parser' -> object with constructor 'HTTPParser'
--- property 'socket' closes the circle Recursive objects are not allowed.
at JSON.stringify ()
at Connection._rawSend (C:\Users\sansubbu\git\webRTC\node_modules\puppeteer-core\lib\cjs\puppeteer\common\Connection.js:115:41)
at CDPSessionImpl.send (C:\Users\sansubbu\git\webRTC\node_modules\puppeteer-core\lib\cjs\puppeteer\common\Connection.js:320:82)
at ExecutionContext._ExecutionContext_evaluate (C:\Users\sansubbu\git\webRTC\node_modules\puppeteer-core\lib\cjs\puppeteer\common\ExecutionContext.js:211:46)
res is a complex, circular structure that only works in the Node environment. Even if you could, passing it to the browser console via page.evaluate() would take it out of Node, where it belongs, leaving it in an environment where it doesn't make any sense (browsers can't respond to requests as if they were a server).
Instead, try returning a boolean and branching on that on the Node side, where req/res are in their natural environment:
app.get("/api/register", async (req, res) => {
const success = await page.evaluate(async () => {
try {
await webex.meetings.register();
return true;
}
catch (err) {
return false;
}
});
if (success) {
console.log("Authentication#register() :: successfully registered");
return res.status(200).json({message: "Successfully Registered"});
}
console.error("Authentication#register() :: error registering", error);
// probably not the message you want but left as-is...
return res.status(400).json({message: "Successfully Registered"});
});
This is untested since you haven't provided a complete, reproducible example.
page.exposeFunction is another possible tool for triggering Node code based on a condition in the browser, but that seems like overkill here.
Finally, I'm not sure what page is, but typically you need a different page for each request. See this answer for suggested Express + Puppeteer boilerplate.
I'm working with MongoDB in NodeJS,
const { MongoClient, ObjectId } = require("mongodb");
const MONGO_URI = `mongodb://xxx:xxx#xxx/?authSource=xxx`; // prettier-ignore
class MongoLib {
constructor() {
this.client = new MongoClient(MONGO_URI, {
useNewUrlParser: true,
});
this.dbName = DB_NAME;
}
connect() {
return new Promise((resolve, reject) => {
this.client.connect(error => {
if (error) {
reject(error);
}
resolve(this.client.db(this.dbName));
});
});
}
async getUser(collection, username) {
return this.connect().then(db => {
return db
.collection(collection)
.find({ username })
.toArray();
});
}
}
let c = new MongoLib();
c.getUser("users", "pepito").then(result => console.log(result));
c.getUser("users", "pepito").then(result => console.log(result));
and when the last c.getUser statement is executed (that's to say, when I make a SECOND connectio) Mongodb outputs this warning:
the options [servers] is not supported
the options [caseTranslate] is not supported
the options [username] is not supported
the server/replset/mongos/db options are deprecated, all their options are supported at the top level of the options object [poolSize,ssl,sslValidate,sslCA,sslCert,sslKey,sslPass,sslCRL,autoReconnect,noDelay,keepAlive,keepAliveInitialDelay,connectTimeoutMS,family,socketTimeoutMS,reconnectTries,reconnectInterval,ha,haInterval,replicaSet,secondaryAcceptableLatencyMS,acceptableLatencyMS,connectWithNoPrimary,authSource,w,wtimeout,j,forceServerObjectId,serializeFunctions,ignoreUndefined,raw,bufferMaxEntries,readPreference,pkFactory,promiseLibrary,readConcern,maxStalenessSeconds,loggerLevel,logger,promoteValues,promoteBuffers,promoteLongs,domainsEnabled,checkServerIdentity,validateOptions,appname,auth,user,password,authMechanism,compression,fsync,readPreferenceTags,numberOfRetries,auto_reconnect,minSize,monitorCommands,retryWrites,useNewUrlParser]
But I'm not using any deprecated options. Any ideas?
EDIT
After a little discussion with molank in the comments, it looks like open several connections from the same server is not a good practice, so maybe that's what the warning is trying to say (badly I think). So if you have the same problem, save the connection instead of the mongo client.
Reposting from https://jira.mongodb.org/browse/NODE-1868:
The deprecation messages are likely because client.connect is being called multiple times. Overall, calling client.connect multiple times currently (as of driver v3.1.13) has undefined behavior, and it is not recommended. It is important to note that once the promise returned from connect resolves, the client remains connected until you call client.close:
const client = new MongoClient(...);
client.connect().then(() => {
// client is now connected.
return client.db('foo').collection('bar').insertOne({
}).then(() => {
// client is still connected.
return client.close();
}).then(() => {
// client is no longer connected. attempting to use it will result in undefined behavior.
});
The client by default maintains multiple connections to each server it is connected to, and can be used for multiple simultaneous operations*. You should be fine running client.connect once, and then running your operations on the client object
* Note that the client is NOT thread-safe or fork-safe, so it cannot be shared across forks, and it not compatible with node's cluster or worker_threads modules.
The function .connect() takes 3 arguments and is defined as such MongoClient.connect(url[, options], callback). So you need to provide an URL first, then the options and only then you give it the callback. Here is an example from the docs
MongoClient.connect("mongodb://localhost:27017/integration_tests", { native_parser: true }, function (err, db) {
assert.equal(null, err);
db.collection('mongoclient_test').update({ a: 1 }, { b: 1 }, { upsert: true }, function (err, result) {
assert.equal(null, err);
assert.equal(1, result);
db.close();
});
});
Another way to go, since you already created your MongoClient is to use .open instead. It only takes a callback, but you call it from the mongoClient you created (this.client). You ca use it like this
this.client.open(function(err, mongoclient) {
// Do stuff
});
Note
Make sure you check out the MongoClient docs, you'll find a lot of good examples that may guide you even better.
poolSize is deprecated, use maxPoolSize.
I am writing integration test for a nodejs/sails js application, where I have an Async controller method/route that throws error when input parameters are not provided.
I am using supertest to write integration test, everything works fine from my perspective, but when the test run error is written onto the console.
describe("a controller method", () => {
it("should throw error message", () => {
server('127.0.0.1')
.get('url to getData')
.set('Cookie', 'cookie token')
.end((err, res) => {
if(err) return done(err);
//console.log(res);
expect(res.status).to.equal(500);
expect(res.body).to.have.property('error', 'Provide a jobName');
done();
});
});
});
This following piece of code works fine cause I wrap this within an anonymous function and expect that function to throw. But I am not sure how to assert against those error.
it("throws", () => {
expect( function () {
server('127.0.0.1')
.get('some url')
.set('Cookie', 'cookie token')
}).to.throw;
});
The controller code looks like following. This is the method that is being called when URL end is requested.
getData : async (req, res) => {
let jobName = req.params.jobName || '',
buildId = req.params.buildId || '';
if(!jobName.trim() ){
return res.negotiate({error:'Provide a jobName'});
}
if(isNaN(buildId)) {
return res.negotiate({error:'Invalid build id supplied.'});
}
try {
let rawResult = await getData(jobName, buildId);
let promotions = formatData(rawResult);
let result = {
total : promotions.length || 0,
items : promotions
};
return res.json(result);
} catch(error) {
sails.log.error(`Request Parameter: Job name = ${req.param('jobName')} & build id = ${req.param('buildId')}`);
sails.log.error(error);
return res.negotiate({error: sails.config.errorMessage});
}
}
Why is the error being written to console ? What am I doing wrong here?
Any help/pointer is highly appreciated!!
How are you sending that error from express? In general express follows the way of passing errors rather than throwing it, for any errors in the program you can pass the error object to 'next' function. http://expressjs.com/en/guide/error-handling.html
e.g
app.use(function (err, req, res, next) {
console.error(err.stack)
res.status(500).send('Something broke!')
})
async returns a promise and although you have placed a try catch block, my guess is the error is not in the block. It is most likely due to the promise rejection not getting handled.
The below should be of help to you as applying to the sails context.
since async functions return a promise, you need to .catch() any
promise rejections and pass them along to next(). Express error
handlers are only triggered by errors passed to next(), not exceptions
that you throw. Source
And for promise()=> reject
I'm creating an app that will use the https://github.com/vpulim/node-soap to communicate with a soap server.
I would like to create a client component which I will forward the necessary methods to the soap-client created with this module.
I'm having trouble to return an object that will use this client, since the client is created asynchronously.
var soap = require('soap');
var url = 'http://someurl?wsdl';
soap.createClient(url, function(err, client) {
if (err) {
console.log(err);
return;
}
console.log(client.describe());
// I need to publish this client so that other functions in this file will be able to use it
});
module.exports = {
doSomething: function() {
//how can I access the client here?
}
};
How would I go about doing this?
One solution to this problem is to use promises:
var soap = require('soap');
var url = 'http://someurl?wsdl';
var clientPromise = new Promise(function(resolve, reject) {
soap.createClient(url, function(err, client) {
if (err) {
// reject the promise when an error occurs
reject(err);
return;
}
// resolve the promise with the client when it's ready
resolve(client);
});
});
module.exports = {
doSomething: function() {
// promise will wait asynchronously until client is ready
// then call the .then() callback with the resolved value (client)
return clientPromise.then(function(client) {
// do something with client here
}).catch(function(err) {
// handle errors here
console.error(err);
});
}
};
A few advantages to this:
Promises are native JavaScript objects (as of Node 4.0.0, with packages such as bluebird providing support for prior versions)
Promises can be "reused": if clientPromise has already resolved once, it will immediately resolve when doSomething is later called.
Some disadvantages:
doSomething and all other exported functions are inherently asynchronous.
Is not directly compatible with Node-type callbacks.
Not sure if my response helps you, but this is how I do. I create createClient every time and then within the client, I call the actual SOAP method (here GetAccumulators). May be not a great way, but this works for me. Here is my code sample
soap.createClient(url, function (err, client) {
if (err) {
logger.error(err, 'Error creating SOAP client for %s', tranId);
reject('Could not create SOAP client');
}
client.addSoapHeader(headers);
// envelope stuff
client.wsdl.definitions.xmlns.soapenv = 'http://schemas.xmlsoap.org/soap/envelope/';
client.wsdl.definitions.xmlns.acc = 'http://exampleurl/';
client.wsdl.xmlnsInEnvelope = client.wsdl._xmlnsMap();
client.GetAccumulators(args, function (err, result) {
if (err) {
if (isNotFoundError(err)) {
logger.debug('Member not found for tranId %s', tranId);
reject({status:404, description:'No match found'});
}
reject({status:500, description:'GetAccumulators error'});
}
return resolve({data: result, tranId: tranId});
});
I have a simple node module which connects to a database and has several functions to receive data, for example this function:
dbConnection.js:
import mysql from 'mysql';
const connection = mysql.createConnection({
host: 'localhost',
user: 'user',
password: 'password',
database: 'db'
});
export default {
getUsers(callback) {
connection.connect(() => {
connection.query('SELECT * FROM Users', (err, result) => {
if (!err){
callback(result);
}
});
});
}
};
The module would be called this way from a different node module:
app.js:
import dbCon from './dbConnection.js';
dbCon.getUsers(console.log);
I would like to use promises instead of callbacks in order to return the data.
So far I've read about nested promises in the following thread: Writing Clean Code With Nested Promises, but I couldn't find any solution that is simple enough for this use case.
What would be the correct way to return result using a promise?
Using the Promise class
I recommend to take a look at MDN's Promise docs which offer a good starting point for using Promises. Alternatively, I am sure there are many tutorials available online.:)
Note: Modern browsers already support ECMAScript 6 specification of Promises (see the MDN docs linked above) and I assume that you want to use the native implementation, without 3rd party libraries.
As for an actual example...
The basic principle works like this:
Your API is called
You create a new Promise object, this object takes a single function as constructor parameter
Your provided function is called by the underlying implementation and the function is given two functions - resolve and reject
Once you do your logic, you call one of these to either fullfill the Promise or reject it with an error
This might seem like a lot so here is an actual example.
exports.getUsers = function getUsers () {
// Return the Promise right away, unless you really need to
// do something before you create a new Promise, but usually
// this can go into the function below
return new Promise((resolve, reject) => {
// reject and resolve are functions provided by the Promise
// implementation. Call only one of them.
// Do your logic here - you can do WTF you want.:)
connection.query('SELECT * FROM Users', (err, result) => {
// PS. Fail fast! Handle errors first, then move to the
// important stuff (that's a good practice at least)
if (err) {
// Reject the Promise with an error
return reject(err)
}
// Resolve (or fulfill) the promise with data
return resolve(result)
})
})
}
// Usage:
exports.getUsers() // Returns a Promise!
.then(users => {
// Do stuff with users
})
.catch(err => {
// handle errors
})
Using the async/await language feature (Node.js >=7.6)
In Node.js 7.6, the v8 JavaScript compiler was upgraded with async/await support. You can now declare functions as being async, which means they automatically return a Promise which is resolved when the async function completes execution. Inside this function, you can use the await keyword to wait until another Promise resolves.
Here is an example:
exports.getUsers = async function getUsers() {
// We are in an async function - this will return Promise
// no matter what.
// We can interact with other functions which return a
// Promise very easily:
const result = await connection.query('select * from users')
// Interacting with callback-based APIs is a bit more
// complicated but still very easy:
const result2 = await new Promise((resolve, reject) => {
connection.query('select * from users', (err, res) => {
return void err ? reject(err) : resolve(res)
})
})
// Returning a value will cause the promise to be resolved
// with that value
return result
}
With bluebird you can use Promise.promisifyAll (and Promise.promisify) to add Promise ready methods to any object.
var Promise = require('bluebird');
// Somewhere around here, the following line is called
Promise.promisifyAll(connection);
exports.getUsersAsync = function () {
return connection.connectAsync()
.then(function () {
return connection.queryAsync('SELECT * FROM Users')
});
};
And use like this:
getUsersAsync().then(console.log);
or
// Spread because MySQL queries actually return two resulting arguments,
// which Bluebird resolves as an array.
getUsersAsync().spread(function(rows, fields) {
// Do whatever you want with either rows or fields.
});
Adding disposers
Bluebird supports a lot of features, one of them is disposers, it allows you to safely dispose of a connection after it ended with the help of Promise.using and Promise.prototype.disposer. Here's an example from my app:
function getConnection(host, user, password, port) {
// connection was already promisified at this point
// The object literal syntax is ES6, it's the equivalent of
// {host: host, user: user, ... }
var connection = mysql.createConnection({host, user, password, port});
return connection.connectAsync()
// connect callback doesn't have arguments. return connection.
.return(connection)
.disposer(function(connection, promise) {
//Disposer is used when Promise.using is finished.
connection.end();
});
}
Then use it like this:
exports.getUsersAsync = function () {
return Promise.using(getConnection()).then(function (connection) {
return connection.queryAsync('SELECT * FROM Users')
});
};
This will automatically end the connection once the promise resolves with the value (or rejects with an Error).
Node.js version 8.0.0+:
You don't have to use bluebird to promisify the node API methods anymore. Because, from version 8+ you can use native util.promisify:
const util = require('util');
const connectAsync = util.promisify(connection.connectAsync);
const queryAsync = util.promisify(connection.queryAsync);
exports.getUsersAsync = function () {
return connectAsync()
.then(function () {
return queryAsync('SELECT * FROM Users')
});
};
Now, don't have to use any 3rd party lib to do the promisify.
Assuming your database adapter API doesn't output Promises itself you can do something like:
exports.getUsers = function () {
var promise;
promise = new Promise();
connection.connect(function () {
connection.query('SELECT * FROM Users', function (err, result) {
if(!err){
promise.resolve(result);
} else {
promise.reject(err);
}
});
});
return promise.promise();
};
If the database API does support Promises you could do something like: (here you see the power of Promises, your callback fluff pretty much disappears)
exports.getUsers = function () {
return connection.connect().then(function () {
return connection.query('SELECT * FROM Users');
});
};
Using .then() to return a new (nested) promise.
Call with:
module.getUsers().done(function (result) { /* your code here */ });
I used a mockup API for my Promises, your API might be different. If you show me your API I can tailor it.
2019:
Use that native module const {promisify} = require('util'); to conver plain old callback pattern to promise pattern so you can get benfit from async/await code
const {promisify} = require('util');
const glob = promisify(require('glob'));
app.get('/', async function (req, res) {
const files = await glob('src/**/*-spec.js');
res.render('mocha-template-test', {files});
});
When setting up a promise you take two parameters, resolve and reject. In the case of success, call resolve with the result, in the case of failure call reject with the error.
Then you can write:
getUsers().then(callback)
callback will be called with the result of the promise returned from getUsers, i.e. result
Using the Q library for example:
function getUsers(param){
var d = Q.defer();
connection.connect(function () {
connection.query('SELECT * FROM Users', function (err, result) {
if(!err){
d.resolve(result);
}
});
});
return d.promise;
}
Below code works only for node -v > 8.x
I use this Promisified MySQL middleware for Node.js
read this article Create a MySQL Database Middleware with Node.js 8 and Async/Await
database.js
var mysql = require('mysql');
// node -v must > 8.x
var util = require('util');
// !!!!! for node version < 8.x only !!!!!
// npm install util.promisify
//require('util.promisify').shim();
// -v < 8.x has problem with async await so upgrade -v to v9.6.1 for this to work.
// connection pool https://github.com/mysqljs/mysql [1]
var pool = mysql.createPool({
connectionLimit : process.env.mysql_connection_pool_Limit, // default:10
host : process.env.mysql_host,
user : process.env.mysql_user,
password : process.env.mysql_password,
database : process.env.mysql_database
})
// Ping database to check for common exception errors.
pool.getConnection((err, connection) => {
if (err) {
if (err.code === 'PROTOCOL_CONNECTION_LOST') {
console.error('Database connection was closed.')
}
if (err.code === 'ER_CON_COUNT_ERROR') {
console.error('Database has too many connections.')
}
if (err.code === 'ECONNREFUSED') {
console.error('Database connection was refused.')
}
}
if (connection) connection.release()
return
})
// Promisify for Node.js async/await.
pool.query = util.promisify(pool.query)
module.exports = pool
You must upgrade node -v > 8.x
you must use async function to be able to use await.
example:
var pool = require('./database')
// node -v must > 8.x, --> async / await
router.get('/:template', async function(req, res, next)
{
...
try {
var _sql_rest_url = 'SELECT * FROM arcgis_viewer.rest_url WHERE id='+ _url_id;
var rows = await pool.query(_sql_rest_url)
_url = rows[0].rest_url // first record, property name is 'rest_url'
if (_center_lat == null) {_center_lat = rows[0].center_lat }
if (_center_long == null) {_center_long= rows[0].center_long }
if (_center_zoom == null) {_center_zoom= rows[0].center_zoom }
_place = rows[0].place
} catch(err) {
throw new Error(err)
}