Better place to setup an initialisation in SailsJS - javascript

I'm new to Sails and don't know exactly where to put the initialisation of an object to be unique in all the app. After reading the docs I assumed that I can have it in the global sails object, but not sure if is the better place.
I'm using the new Appcelerator ArrowDB to store my users and objects. Docs talk about declare the appropriate vars and use it, with the APP_KEY.
var ArrowDB = require('arrowdb'),
arrowDBApp = new ArrowDB('<App Key>');
function login(req, res) {
var data = {
login: req.body.username,
password: req.body.password,
// the req and res parameters are optional
req: req,
res: res
};
arrowDBApp.usersLogin(data, function(err, result) {
if (err) {
console.error("Login error:" + (err.message || result.reason));
} else {
console.log("Login successful!");
console.log("UserInfo: " + JSON.stringify(result.body.response.users[0]));
}
});
}
But I will need to use constantly that arrowDBApp var to create, update, delete objects in the database, so I think the best way is to initialize it in the starting script app.js and share across the app.
I tried it, but I was not able to store it in the sails var, it seems that this var is not available (or lose its config) until sails.lift() is executed.
This code (app.js file) shows nothing in the console:
// Ensure we're in the project directory, so relative paths work as expected
// no matter where we actually lift from.
process.chdir(__dirname);
// Ensure a "sails" can be located:
(function() {
var sails;
try {
sails = require('sails');
} catch (e) {
console.error('To run an app using `node app.js`, you usually need to have a version of `sails` installed in the same directory as your app.');
console.error('To do that, run `npm install sails`');
console.error('');
console.error('Alternatively, if you have sails installed globally (i.e. you did `npm install -g sails`), you can use `sails lift`.');
console.error('When you run `sails lift`, your app will still use a local `./node_modules/sails` dependency if it exists,');
console.error('but if it doesn\'t, the app will run with the global sails instead!');
return;
}
// Try to get `rc` dependency
var rc;
try {
rc = require('rc');
} catch (e0) {
try {
rc = require('sails/node_modules/rc');
} catch (e1) {
console.error('Could not find dependency: `rc`.');
console.error('Your `.sailsrc` file(s) will be ignored.');
console.error('To resolve this, run:');
console.error('npm install rc --save');
rc = function () { return {}; };
}
}
// My own code
var APP_KEY = 'mykey';
var ArrowDB = require('arrowdb');
sails.arrowDBApp = new ArrowDB(APP_KEY);
console.log("Hi" + JSON.stringify(sails));
// Start server
sails.lift(rc('sails'));
console.log("Finish");
})();
No "HI" and no "Finish" are printed. If I try to use sails.arrowDBApp in another controller, it is undefined.
Tips are welcome.

It's not advisable to modify app.js unless you really need to.
The usual space to save all configuration information (e.g. the APP_KEY) is in the config directory in your project root.
One-time initializations (e.g. ArrowDB initialization) can be added to config/bootstrap.js.
Update
In config/arrowdb.js (you need to create this file yourself):
module.exports.arrowdb = {
APP_KEY: 'yourappkey',
ArrowDBApp: null
};
In config/bootstrap.js:
var ArrowDB = require('arrowdb');
module.exports.bootstrap = function(next){
sails.config.arrowdb['ArrowDBApp'] = new ArrowDB(sails.config.arrowdb['APP_KEY']);
next(); // Don't forget to add this
};
In your controller:
'task': function(req, res, next) {
sails.config.arrowdb['ArrowDBApp'].usersLogin(...);
// and so on.
// You could also add something like
// var ADB = sails.config.arrowdb['ArrowDBApp'];
// at the top in case you need to use it on and on.
}

Use config/bootstrap.js to initialize something before Sails lifted. Sometimes if we want to put something in global variable, this approach is good to use, like define/ override native Promise with Bluebird Promise.
Use api/services to put some method or other things that you will use regularly in your code (controllers, models, etc.), like Mail Service, that handle sending email within your application.
Use config at config folder to predefined something at sails.config[something]. It can be an object, function, or whatever in order to become configurable, like put Twitter API Key to use Twitter REST API.
To achieve what you wanted, I'll try to use service and bootstrap.js. Try this example.
Create service file at api/services/ArrowDBService.js
Put with this code:
var ArrowDB = require('arrowdb'),
arrowDBApp = new ArrowDB('<App Key>');
module.exports = {
arrowDBApp : arrowDBApp,
login : function (req, res) {
var data = {
login: req.body.username,
password: req.body.password,
// the req and res parameters are optional
req: req,
res: res
};
arrowDBApp.usersLogin(data, function(err, result) {
if (err) {
console.error("Login error:" + (err.message || result.reason));
} else {
console.log("Login successful!");
console.log("UserInfo: " + JSON.stringify(result.body.response.users[0]));
}
});
}
};
Now you can use it by sails.services.arrowdbservice.login(req,res) or simply ArrowDBService.login(req,res) (notice about case sensitive thing). Since I don't know about ArrowDB, so you may explore by yourself about login method that your example provide.

Related

Parsing XML to JSON in Amazon Lambda using external js libraries

I am trying to convert an XML String that I get from a server to JSON inside my Lambda function.
I have set up this rather simple example to simulate the XML answer that i get from the server using DynamoDB. (Currently I'm just trying to get the convertion going)
'use strict';
var AWS = require('aws-sdk');
var docClient = new AWS.DynamoDB.DocumentClient({region: 'eu-west-1'});
exports.handler = function (e, ctx, callback){
let table = "dsbTable";
let bpNumber = 1337;
var test;
var x2js = new X2JS();
let params = {
TableName: table,
Key:{
"bpNumber": bpNumber
},
};
docClient.get(params, function(err, data) {
if (err) {
console.error("Unable to read item. Error JSON:", JSON.stringify(err, null, 2));
callback(err, null);
} else {
console.log("GetItem succeeded:", JSON.stringify(data, null, 2));
console.log('test' +data.Item.getBp);
//var jsonObj = x2js.xml_str2json(data.Item.getBp);
//console.log(jsonObj);
callback(null, data);
}
});
} ;
getting the item works just fine and is returned like this
{
"Item": {
"getBp": "<message version=\"1.0\" system=\"AVS/3\"><header><client>553</client><avs3-sales-organization>7564</avs3-sales-organization><avs3-service-provider>DSD</avs3-service-provider></header><body><business-partner><salutation-code>01</salutation-code><titel-code-academic/><titel-academic/><titel-code-royal/><titel-royal/><job-titel/><last-name1>Pickle</last-name1><last-name2/><first-name>N</first-name><street/><street-suffix/><street-number/><street-number-suffix/><address-line-1>10 Waterside Way</address-line-1><address-line-2/><address-line-3/><zipcode>NN4 7XD</zipcode><country-code>GB</country-code><city>NORTHAMPTON</city><district/><region-code>NH</region-code><region-text>Northamptonshire</region-text><company1/><company2/><company3/><department/><po-box/><po-box-zipcode/><po-box-city/><po-box-country-code/><major-customer-zipcode/><address-source/><advertisement>Y</advertisement><category/><bp-number>1100000772</bp-number><bp-number-external/><bp-group>ABON</bp-group><eu-sales-tax-number/><bic-master-number/><sector/><communication><communication-type>WW</communication-type><communication-value>kate.southorn#dsbnet.co.uk</communication-value><communication-default>Y</communication-default></communication><attribute><attribute-type>ACC</attribute-type><attribute-value>Y</attribute-value></attribute><attribute><attribute-type>OIEMEX</attribute-type><attribute-value>N20121211</attribute-value></attribute><attribute><attribute-type>OINLIN</attribute-type><attribute-value>N20121211</attribute-value></attribute><attribute><attribute-type>OISMEX</attribute-type><attribute-value>N20121211</attribute-value></attribute><attribute><attribute-type>OISMIN</attribute-type><attribute-value>N20121211</attribute-value></attribute><attribute><attribute-type>OOEMIN</attribute-type><attribute-value>N20121211</attribute-value></attribute><attribute><attribute-type>OOFXEX</attribute-type><attribute-value>N20121211</attribute-value></attribute><attribute><attribute-type>OOFXIN</attribute-type><attribute-value>N20121211</attribute-value></attribute><attribute><attribute-type>OOPTEX</attribute-type><attribute-value>N20121211</attribute-value></attribute><attribute><attribute-type>OOPTIN</attribute-type><attribute-value>N20121211</attribute-value></attribute><attribute><attribute-type>OOTEEX</attribute-type><attribute-value>N20121211</attribute-value></attribute><attribute><attribute-type>OOTEIN</attribute-type><attribute-value>N20121211</attribute-value></attribute><attribute><attribute-type>THEDSU</attribute-type><attribute-value/></attribute></business-partner></body></message>",
"bpNumber": 1337
}
}
My main issue now is that I can not figure out how i can import any XMLtoJSON library files like this one here
I hope my code in this case is not completely worthless and there is a rather simple solution.
You're going through the path that many new Lambda users have gone.
With Lambda, it is absolutely easy, you just write your code and validate that it works as expected - I mean on your computer.
Once you have validated it, do as follows:
Zip the entire folder's content, including node_modules directory and any dependency that you use.
Upload it to Lambda.
If you accidentally zipped the containing folder as well, that is fine, just make sure to update Lambda to run the script from: dir_name/file_name.function_name (don't forget to export function_name from your module).
Always the handler name is the <filename>.<handler> function name> and if the filename is incorrectly mentioned then also such error is thrown in cloudwatch logs.

Reuse parameter of callback / Reuse channel

I would like to reuse a RabbitMQ channel in different node modules. Since the channel is created asynchronously, I am not sure what the best approach would be to "inject" this channel object to other modules.
If possible, I would like to avoid external dependencies like DI containers for this.
Below, you find my simplified code.
Thank you in advance for any advice.
web.js
require('./rabbitmq')(function (err, conn) {
...
// Start web server
var http = require('./http');
var serverInstance = http.listen(process.env.PORT || 8000, function () {
var host = serverInstance.address().address;
var port = serverInstance.address().port;
});
});
rabbitmq.js:
module.exports = function (done) {
...
amqp.connect(rabbitMQUri, function (err, conn) {
...
conn.createChannel(function(err, ch) {
...
// I would like to reuse ch in other modules
});
});
}
someController.js
module.exports.post = function (req, res) {
// Reuse/inject "ch" here, so I can send messages
}
you could always attach your channel handle to global object and it can be accessed across module:
// rabbitmq.js
global.ch = ch
// someController.js
global.ch // <- is the channel instance
I would prefer to inject the handle into each function because i think it is more explicit and easier to reason about and unittest. You could do that with partial function application
with lodash:
var post = function (req, res, rabbitMQchannel) {
// Reuse/inject "ch" here, so I can send messages
}
var ch = getYourChannel();
// post is function object that accepts two params, and the
// third is bound to your channel
module.exports.post = _.partialRight(post, ch);
a third option is to have rabbitmq.js export a function to getting a channel. A channel could be a singleton so there would only ever be one instance of it. Then any method that whats a channel handle would request one from the method, which wouldn't require channels to be passed around through functions, or globally.

Node.js respond with asynchronous data

Recently I started learning a little bit about Node.js and it's capabilities and tried to use it for some web services.
I wanted to create a web service which will serve as a proxy for web requests.
I wanted my service to work that way:
User will access my service -> http://myproxyservice.com/api/getuserinfo/tom
My service will perform request to -> http://targetsite.com/user?name=tom
Responded data would get reflected to the user.
To implement it I used the following code:
app.js:
var express = require('express');
var bodyParser = require('body-parser');
var app = express();
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
var proxy = require('./proxy_query.js')
function makeProxyApiRequest(name) {
return proxy.getUserData(name, parseProxyApiRequest);
}
function parseProxyApiRequest(data) {
returned_data = JSON.parse(data);
if (returned_data.error) {
console.log('An eror has occoured. details: ' + JSON.stringify(returned_data));
returned_data = '';
}
return JSON.stringify(returned_data);
}
app.post('/api/getuserinfo/tom', function(request, response) {
makeProxyApiRequest('tom', response);
//response.end(result);
});
var port = 7331;
proxy_query.js:
var https = require('https');
var callback = undefined;
var options = {
host: 'targetsite.com',
port: 443,
method: 'GET',
};
function resultHandlerCallback(result) {
var buffer = '';
result.setEncoding('utf8');
result.on('data', function(chunk){
buffer += chunk;
});
result.on('end', function(){
if (callback) {
callback(buffer);
}
});
}
exports.getUserData = function(name, user_callback) {
callback = user_callback
options['path'] = user + '?name=' + name;
var request = https.get(options, resultHandlerCallback);
request.on('error', function(e){
console.log('error from proxy_query:getUserData: ' + e.message)
});
request.end();
}
app.listen(port);
I wish I didn't screwed this code because I replaced some stuff to fit my example.
Anyway, the problem is that I want to post the response to the user when the HTTP request is done and I cant find how to do so because I use express and express uses asynchronous calls and so do the http request.
I know that if I want to do so, I should pass the makeProxyApiRequest the response object so he would be able to pass it to the callback but it is not possible because of asyn problems.
any suggestions?
help will be appreciated.
As you're using your functions to process requests inside your route handling, it's better to write them as express middleware functions, taking the specific request/response pair, and making use of express's next cascade model:
function makeProxyApiRequest(req, res, next) {
var name = parseProxyApiRequest(req.name);
res.locals.userdata = proxy.getUserData(name);
next();
}
function parseProxyApiRequest(req, res, next) {
try {
// remember that JSON.parse will throw if it fails!
data = JSON.parse(res.locals.userdata);
if (data .error) {
next('An eror has occoured. details: ' + JSON.stringify(data));
}
res.locals.proxyData = data;
next();
}
catch (e) { next("could not parse user data JSON."); }
}
app.post('/api/getuserinfo/tom',
makeProxyApiRequest,
parseProxyApiRequest,
function(req, res) {
// res.write or res.json or res.render or
// something, with this specific request's
// data that we stored in res.locals.proxyData
}
);
Even better would be to move those middleware functions into their own file now, so you can simply do:
var middleware = require("./lib/proxy_middleware");
app.post('/api/getuserinfo/tom',
middleware.makeProxyApiRequest,
middleware.parseProxyApiRequest,
function(req, res) {
// res.write or res.json or res.render or
// something, with this specific request's
// data that we stored in res.locals.proxyData
}
);
And keep your app.js as small as possible. Note that the client's browser will simply wait for a response by express, which happens once res.write, res.json or res.render etc is used. Until then the connection is simply kept open between the browser and the server, so if your middleware calls take a long time, that's fine - the browser will happily wait a long time for a response to get sent back, and will be doing other things in the mean time.
Now, in order to get the name, we can use express's parameter construct:
app.param("name", function(req, res, next, value) {
req.params.name = value;
// do something if we need to here, like verify it's a legal name, etc.
// for instance:
var isvalidname = validator.checkValidName(name);
if(!isvalidname) { return next("Username not valid"); }
next();
});
...
app.post("/api/getuserinfo/:name", ..., ..., ...);
Using this system, the :name part of any route will be treated based on the name parameter we defined using app.param. Note that we don't need to define this more than once: we can do the following and it'll all just work:
app.post("/api/getuserinfo/:name", ..., ..., ...);
app.post("/register/:name", ..., ..., ... );
app.get("/api/account/:name", ..., ..., ... );
and for every route with :name, the code for the "name" parameter handler will kick in.
As for the proxy_query.js file, rewriting this to a proper module is probably safer than using individual exports:
// let's not do more work than we need: http://npmjs.org/package/request
// is way easier than rolling our own URL fetcher. In Node.js the idea is
// to write as little as possible, relying on npmjs.org to find you all
// the components that you need to glue together. If you're writing more
// than just the glue, you're *probably* doing more than you need to.
var request = require("request");
module.exports = {
getURL: function(name, url, callback) {
request.get(url, function(err, result) {
if(err) return callback(err);
// do whatever processing you need to do to result:
var processedResult = ....
callback(false, processedResult);
});
}
};
and then we can use that as proxy = require("./lib/proxy_query"); in the middleware we need to actually do the URL data fetching.

Best approach for a modular node.js/socket.io/express application

I'm currectly creating an app using Node.JS that makes use of Express and Socket.io. As time progresses it's becoming increasingly difficult to deal with one file, I'm in the process of moving certain things out that I know how but was wondering on the best approach to do this.
I have a private area constructor similar to:
privateArea.js
function privateArea(props) {
this.id = props.id;
this.name = props.name;
this.users = [];
}
privateArea.prototype.addUser = function(socketId) {
this.users.push(socketId);
};
module.exports = privateArea;
I'd like to have this also have access to the socket.io variable that's been setup for use in a separate sockets.js file that can be included via the main app.js and a seperate file for express.js
So I'd like the structure as follows:
project
| app.js - joins it all together
| express.js - initialises and manages all express routing
| privateArea.js - constructor for private areas - must be able to reference socket.io
| sockets.js - initialises and manages all socket.io sockets and events
Any help/examples would be very appreciated.
Thanks
I use socket.io and express quite often in my projects, and I've developed a template which makes things easy. I like to have a fail-over in case the socket connections drops for some reason, or if a socket connection cannot be established. So I create http channels as well as socket channels. Here's a basic module template:
module.exports = function () {
var exported = {};
var someFunction = function (done) {
//.. code here..//
if (typeof done === "function") {
done(null, true);
}
};
// export the function
exported.someFunction = someFunction;
var apicalls = function (app) {
app.get("/module/someFunction", function (req, res) {
res.header("Content-Type", "application/json");
someFunction(function (err, response) {
if (err) return res.send(JSON.stringify(err));
res.send(JSON.stringify(response));
});
});
};
exported.apicalls = apicalls;
var socketcalls = function (io) {
io.on("connection", function (socket) {
socket.on('module-someFunction', function () {
someFunction(function (err, response) {
if (err) return socket.emit('module-someFunction', err);
socket.emit('module-someFunction', response);
});
});
});
};
exported.socketcalls = socketcalls;
return exported;
}
So to use this, I'd first need to include the module in my app.js file like this:
var mymod = require('./myModule.js');
And then I can enable access to this service from HTTP and over the websocket like this:
mymod.apicalls(app); // passing express to the module
mymod.socketcalls(io); // passing socket.io to the module
Finally, from the front-end, I can check to see if I have a socket connection, and if so, I use the socket to emit "module-someFunction". If I don't have a socket connection, the front-end will do an AJAX call instead to "/module/someFunction" which will hit the same function on the server side that it would've had I used the socket connection.
As an added bonus, if I need to utilize the function within the server, I could do that as well since the function is exported. That would look like this:
mymod.someFunction(function (err, response) {
// ... handle result here ... //
});

"Meteor code must always run within a Fiber" when calling Collection.insert on server

I have the following code in server/statusboard.js;
var require = __meteor_bootstrap__.require,
request = require("request")
function getServices(services) {
services = [];
request('http://some-server/vshell/index.php?type=services&mode=json', function (error, response, body) {
var resJSON = JSON.parse(body);
_.each(resJSON, function(data) {
var host = data["host_name"];
var service = data["service_description"];
var hardState = data["last_hard_state"];
var currState = data["current_state"];
services+={host: host, service: service, hardState: hardState, currState: currState};
Services.insert({host: host, service: service, hardState: hardState, currState: currState});
});
});
}
Meteor.startup(function () {
var services = [];
getServices(services);
console.log(services);
});
Basically, it's pulling some data from a JSON feed and trying to push it into a collection.
When I start up Meteor I get the following exception;
app/packages/livedata/livedata_server.js:781
throw exception;
^
Error: Meteor code must always run within a Fiber
at [object Object].withValue (app/packages/meteor/dynamics_nodejs.js:22:15)
at [object Object].apply (app/packages/livedata/livedata_server.js:767:45)
at [object Object].insert (app/packages/mongo-livedata/collection.js:199:21)
at app/server/statusboard.js:15:16
at Array.forEach (native)
at Function.<anonymous> (app/packages/underscore/underscore.js:76:11)
at Request._callback (app/server/statusboard.js:9:7)
at Request.callback (/usr/local/meteor/lib/node_modules/request/main.js:108:22)
at Request.<anonymous> (/usr/local/meteor/lib/node_modules/request/main.js:468:18)
at Request.emit (events.js:67:17)
Exited with code: 1
I'm not too sure what that error means. Does anyone have any ideas, or can suggest a different approach?
Just wrapping your function in a Fiber might not be enough and can lead to unexpected behavior.
The reason is, along with Fiber, Meteor requires a set of variables attached to a fiber. Meteor uses data attached to a fiber as a dynamic scope and the easiest way to use it with 3rd party api is to use Meteor.bindEnvironment.
T.post('someurl', Meteor.bindEnvironment(function (err, res) {
// do stuff
// can access Meteor.userId
// still have MongoDB write fence
}, function () { console.log('Failed to bind environment'); }));
Watch these videos on evented mind if you want to know more:
https://www.eventedmind.com/posts/meteor-dynamic-scoping-with-environment-variables
https://www.eventedmind.com/posts/meteor-what-is-meteor-bindenvironment
As mentioned above it is because your executing code within a callback.
Any code you're running on the server-side needs to be contained within a Fiber.
Try changing your getServices function to look like this:
function getServices(services) {
Fiber(function() {
services = [];
request('http://some-server/vshell/index.php?type=services&mode=json', function (error, response, body) {
var resJSON = JSON.parse(body);
_.each(resJSON, function(data) {
var host = data["host_name"];
var service = data["service_description"];
var hardState = data["last_hard_state"];
var currState = data["current_state"];
services+={host: host, service: service, hardState: hardState, currState: currState};
Services.insert({host: host, service: service, hardState: hardState, currState: currState});
});
});
}).run();
}
I just ran into a similar problem and this worked for me. What I have to say though is that I am very new to this and I do not know if this is how this should be done.
You probably could get away with only wrapping your insert statement in the Fiber, but I am not positive.
Based on my tests you have to wrap the insert in code I tested that is similar to the above example.
For example, I did this and it still failed with Fibers error.
function insertPost(args) {
if(args) {
Fiber(function() {
post_text = args.text.slice(0,140);
T.post('statuses/update', { status: post_text },
function(err, reply) {
if(reply){
// TODO remove console output
console.log('reply: ' + JSON.stringify(reply,0,4));
console.log('incoming twitter string: ' + reply.id_str);
// TODO insert record
var ts = Date.now();
id = Posts.insert({
post: post_text,
twitter_id_str: reply.id_str,
created: ts
});
}else {
console.log('error: ' + JSON.stringify(err,0,4));
// TODO maybe store locally even though it failed on twitter
// and run service in background to push them later?
}
}
);
}).run();
}
}
I did this and it ran fine with no errors.
function insertPost(args) {
if(args) {
post_text = args.text.slice(0,140);
T.post('statuses/update', { status: post_text },
function(err, reply) {
if(reply){
// TODO remove console output
console.log('reply: ' + JSON.stringify(reply,0,4));
console.log('incoming twitter string: ' + reply.id_str);
// TODO insert record
var ts = Date.now();
Fiber(function() {
id = Posts.insert({
post: post_text,
twitter_id_str: reply.id_str,
created: ts
});
}).run();
}else {
console.log('error: ' + JSON.stringify(err,0,4));
// TODO maybe store locally even though it failed on twitter
// and run service in background to push them later?
}
}
);
}
}
I thought this might help others encountering this issue. I have not yet tested calling the asynchy type of external service after internal code and wrapping that in a Fiber. That might be worth testing as well. In my case I needed to know the remote action happened before I do my local action.
Hope this contributes to this question thread.

Categories