Hapi.js - adding mechanism to check every route - javascript

I am trying to implement a mechanism that will be run before any route is hit. In that mechanism I want to take a value from the header and check for authentication.
I have come up with this:
server.js:
// Create a server with a host and port
'use strict';
var Hapi = require('hapi');
var mongojs = require('mongojs');
var plugins = [
require('./routes/entities')
];
var server = new Hapi.Server();
server.connection({
port: 3000
});
//Connect to db
server.app.db = mongojs('hapi-rest-mongo', ['entities']);
server.app.checkHeader = function (request) {
var header = request.headers['x-authorization'];
if(header === "letmein"){
return true
}
return false
};
//Load plugins and start server
server.register(plugins, function (err) {
if (err) {
throw err;
}
// Start the server
server.start(function (err) {
console.log('Server running at:', server.info.uri);
});
});
and in routes.entities:
'use strict';
var Boom = require('boom');
var uuid = require('node-uuid');
var Joi = require('joi');
exports.register = function (server, options, next) {
var db = server.app.db;
server.route({
method: 'GET',
path: '/entities',
handler: function handler(request, reply) {
if(!server.app.checkHeader(request))
{
return reply(Boom.unauthorized());
};
//request.server.myFunc();
db.entities.find(function (err, docs) {
if (err) {
return reply(Boom.wrap(err, 'Internal MongoDB error'));
}
reply(docs);
});
}
});
So in short while starting the server I have registered my function server.app.checkHeader
And in the routes I am calling it and sending a request object to it. Request object contains information about the headers.
While this works, I am having a feeling I am not following the best practices with the Hapi.
How could I do it more elegantly?

There are a few options.
You can, of course, tap into the request lifecycle - note the events that occur in the pipeline prior to the route handler.
Although, I'd urge you to consider implementing an auth strategy that can be set as the default for all routes or selectively on appropriate routes.

The best way to require authentication for all or selected route is to use hapi’s integrated functionality.
You should set a default authentication strategy that is applied to each route handler. The sample below uses basic auth. You’d want to create a custom authentication strategy for hapi to check your x-authentication header.
const Hapi = require('hapi')
const BasicAuth = require('hapi-auth-basic')
const server = new Hapi.Server()
server.register(BasicAuth, function (err) {
if (err) {
console.log('error', 'failed to install plugins')
throw err
}
// TODO: add authentication strategy & set as default
server.auth.strategy('simple', 'basic', true, { validateFunc: basicValidationFn })
// or set strategy separately as default auth strategy
server.auth.strategy('simple', 'basic', { validateFunc: basicValidationFn })
server.auth.default('simple')
// TODO: add routes
server.start(function (err) {
})
})
You can also inject hapi’s request lifecycle and extend it at given points. Extending the request lifecycle should be done by using plugins:
register: function (server, options, next) {
// do some processing before 'onPreAuth'
// or pick another extension point
server.ext('onPreAuth', (request, reply) => {
// your functionality
})
}
Hope that helps!

Related

How to modularize an express app - with a function, a class and req.pipe?

Here below there are two servers and two gqlServers. All combinations of them work.
The challenge is to extend express with some additional predefined code patterns shared across several apps, exposed through additional methods.
Which combination of a server and gqlServer is considered best practice and best for performance?
server:
server_A is a function that returns a class
server_B is a function that returns a function
gqlServer:
gqlServer_01 uses req.pipe
gqlServer_02 has the original express() passed into it
function gqlServer_01(options) {
let gqlApp = express();
gqlApp.use(options.route, function(req, res, next) {
res.send('gqlServer 01');
// next();
});
gqlApp.listen(8001, err => {
if (err) throw err;
console.log(`>> GQL Server running on 8001`);
});
}
function gqlServer_02(app, options) {
app.use(options.route, function(req, res, next) {
res.send('gqlServer 02');
// next();
});
}
// THIS SERVER ?
function server_A(config = {}) {
config = deepmerge(def_opt, config);
let app = express();
app.get('/', function(req, res, next) {
res.send('root');
// next();
});
class Server {
constructor(opt) {
this.opt = opt;
}
gql(props = {}) {
// THIS GQL SERVER ?
gqlServer_01({ route: '/gql-01' });
app.use('/gql-01', function(req, res) {
req.pipe(request(`http://localhost:8001/gql-01`)).pipe(res);
});
// OR THIS GQL SERVER ?
gqlServer_02(app, { route: '/gql-02' });
}
}
app.listen(8000, err => {
if (err) throw err;
console.log(`>> Server running on 8000`);
});
return new Server(app, config);
}
// OR THIS SERVER ?
function server_B(config = {}) {
config = deepmerge(def_opt, config);
let app = express();
app.get('/', function(req, res, next) {
res.send('root');
// next();
});
app.gql = function(props = {}) {
// THIS GQL SERVER ?
gqlServer_01({ route: '/gql-01' });
app.use('/gql-01', function(req, res) {
req.pipe(request(`http://localhost:8001/gql-01`)).pipe(res);
});
// OR THIS GQL SERVER ?
gqlServer_02(app, { route: '/gql-02' });
};
app.listen(8000, err => {
if (err) throw err;
console.log(`>> Server running on 8000`);
});
return app;
}
The goal is to have the best solution in order to create an npm package out of this and reuse the methods over several projects easily. The project was highly simplified for the sake of clarity.
I don't think you will have performance issues in any of these examples, so the question remains which of them is more modular.
If you are willing to make an npm package out of these, you shouldn't be calling express() inside your server code. Instead you should be passing the app as a parameter. This will allow you to reuse existing express apps initialized elsewhere. For this reason I would go for gqlServer_02
You also want to create a new server each time you call the module function, so I'd go with server_A for this reason. However it needs to receive the express app as parameter, in order to reuse existing express objects. I would also put the app.listen call inside a function in the Server class.

No logs after Passport.js authenticate middleware

I am trying to debug a failing JWT auth setup, which always returns a 401.
My passport setup (middleware/auth.js)
import passport from 'passport'
import { Strategy as JwtStrategy, ExtractJwt } from 'passport-jwt'
module.exports = function() {
var options = {};
options.jwtFromRequest = ExtractJwt.fromAuthHeader()
options.secretOrKey = 'superdupersecret'
var strategy = new JwtStrategy(options, function(payload, done) {
console.log('this is not printing') <---------------
var user = payload.sub || null;
if (user) {
return done(null, { id: user._id });
} else {
return done(new Error("User not found"), null);
}
});
passport.use(strategy);
return {
initialize: () => {
console.log('this only prints on boot'); <---------------
return passport.initialize();
},
authenticate: () => {
console.log('this too') <---------------
return passport.authenticate("jwt", {session: false});
}
};
};
My server.js file where I initialize passport:
import express from 'express'
(...)
var auth = require("./middleware/auth.js")();
// Instantiate app
const app = express();
// Initialize passport for auth use
app.use(auth.initialize())
And my protected route that always returns a 401:
import express from 'express'
var auth = require("../middleware/auth.js")();
const userRouter = express.Router()
userRouter.get('/dashboard', auth.authenticate(), (req, res) => {
res.send('It worked! User id is: ' + req.user + '.')
})
export default userRouter
I have tried to add print statements within the actual passport.js module itself, as well as passport-jwt, with no success.
After the authentication middleware on the protected route, nothing logs.
I have tried a ton of setup permutations over the past 3 days now. Any help would be greatly appreciated
Ok, I followed the tutorial you mentioned and it seems to work.
Here are some notes (some may be obvious, no offense).
Copy exactly the code as the tutorial
After you have everything, you need to "login". Make a POST request to /token. Content type has to be application/json and on the body of the request you need to sent an object with email and password (from tutorial).
After you login, the server returns a token.
Take that token and now make a GET request to /user. In the headers of the request add: Authorization: JWT [your token here]. You have to write "JWT" and the token separated by one space.
The server returns a status 200. I modified so it returns the user.
app.get("/user", auth.authenticate(), function(req, res) {
res.json({user: req.user});
});

Securing Loopback with third party OpenID Connect

I'm trying to secure my loopback service with my third party OpenID Connect service (Keycloak) but it doesn't seem to be validating requests have accesstokens at all.
My server.js:
var loopback = require('loopback');
var boot = require('loopback-boot');
var app = module.exports = loopback();
// Passport configurators..
var loopbackPassport = require('loopback-component-passport');
var PassportConfigurator = loopbackPassport.PassportConfigurator;
var passportConfigurator = new PassportConfigurator(app);
var cont = function(req, res){
next();
};
/**
* Flash messages for passport
*
* Setting the failureFlash option to true instructs Passport to flash an
* error message using the message given by the strategy's verify callback,
* if any. This is often the best approach, because the verify callback
* can make the most accurate determination of why authentication failed.
*/
var flash = require('express-flash');
// attempt to build the providers/passport config
var config = {};
try {
config = require('../providers.json');
} catch (err) {
console.trace(err);
process.exit(1); // fatal
}
// -- Add your pre-processing middleware here --
// boot scripts mount components like REST API
boot(app, __dirname);
// The access token is only available after boot
app.middleware('auth', loopback.token({
model: app.models.accessToken
}));
app.middleware('session:before', loopback.cookieParser(app.get('cookieSecret')));
app.middleware('session', loopback.session({
secret: 'kitty',
saveUninitialized: true,
resave: true
}));
passportConfigurator.init();
// We need flash messages to see passport errors
app.use(flash());
passportConfigurator.setupModels({
userModel: app.models.user,
userIdentityModel: app.models.userIdentity,
userCredentialModel: app.models.userCredential
});
for (var s in config) {
var c = config[s];
c.session = c.session !== false;
passportConfigurator.configureProvider(s, c);
}
var ensureLoggedIn = require('connect-ensure-login').ensureLoggedIn;
app.start = function () {
// start the web server
return app.listen(function () {
app.emit('started');
var baseUrl = app.get('url').replace(/\/$/, '');
console.log('Web server listening at: %s', baseUrl);
if (app.get('loopback-component-explorer')) {
var explorerPath = app.get('loopback-component-explorer').mountPath;
console.log('Browse your REST API at %s%s', baseUrl, explorerPath);
}
});
};
// Bootstrap the application, configure models, datasources and middleware.
// Sub-apps like REST API are mounted via boot scripts.
boot(app, __dirname, function (err) {
if (err) throw err;
// start the server if `$ node server.js`
if (require.main === module)
app.start();
});
provider.json
{
"oAuth2": {
"provider": "keycloak",
"module": "passport-openidconnect",
"authorizationURL": "https://xxx",
"tokenURL": "https://xxxx",
"clientID": "xxx",
"clientSecret": "-",
"failureFlash": true
}
}
I've been trying to follow this example:
https://github.com/strongloop/loopback-example-passport
But that doesn't explain how to connect to an OpenID Connect service and secure my APIs.
I've also tried this for specific APIs:
app.get('/api/Clients', ensureLoggedIn('/login'), cont);
I want to really lock down all APIs and check if a valid token is presented in the query which should be validated by my third party authentication service.
Thanks in advance!

Call a hapi route from another route

I am pretty new to HapiJS. I am building a service where I have two routes /route1 and /route2 both are using the plugin architecture. I have registered both as plugins on my manifest file.
I want to call /route1 from /route2 so /route2 depends on the payload reply from /route1. I've been looking at putting the logic of /route2 on /route1 on the pre-handler but I want to keep them separately.
Don't know how to call a registered plugin from another the thing is that both plugins (routes) are making networks requests. Thanks for reading.
Thanks.
As you specify that you don't want to use a shared handler/route prerequisite (which would be my first choice), you could make an actual request using a http client (Wreck, request, http or the like).
Another, more efficient way that doesn't involve actually making a network request is to use hapi's built-in server.inject() method provided by Shot. This will inject a request into your server and get the response, which you can use. Here's an example:
var Hapi = require('hapi');
var server = new Hapi.Server();
server.connection({ port: 4000 });
var plugin1 = function (server, options, next) {
server.route({
method: 'GET',
path: '/route1',
handler: function (request, reply) {
reply('Hello');
}
});
next();
};
plugin1.attributes = { name: 'plugin1' };
var plugin2 = function (server, options, next) {
server.route({
method: 'GET',
path: '/route2',
handler: function (request, reply) {
server.inject('/route1', function (res) {
reply(res.payload + ' World!');
});
}
});
next();
};
plugin2.attributes = { name: 'plugin2' };
server.register([plugin1, plugin2], function (err) {
if (err) throw err;
server.start(function (err) {
if (err) throw err;
console.log('Started');
});
});
Note that the fact the routes are in plugins here is irrelevant. I've merely included it so it's close to your situation.
Shot and server.inject() are primarily used for testing but there are legitimate runtime uses like this too.
If you make a request to /route2, this will invoke /route1 handler and get the payload:
$ curl localhost:4000/route2
Hello World!

Unable to receive upstream GCM messages with Node.js + XMPP

I'm new to node.js and XMPP but not Javascript or GCM. I'm unable to receive upstream messages using node-xmpp and none of the callbacks are called, not even error. I've looked through the other SO threads but none of the solutions have worked. Here is my entire route:
var express = require('express');
var router = express.Router();
var xmpp = require('node-xmpp');
router.get('/', function(req, res, next) {
var options = {
type: 'client',
jid: 'project-12345#gcm.googleapis.com',
password: 'apiKey12345',
port: 5235,
host: 'gcm.googleapis.com',
legacySSL: true,
preferredSaslMechanism : 'PLAIN'
};
// this prints correctly
console.log('Creating xmpp app');
var cl = new xmpp.Client(options);
cl.connection.socket.setTimeout(0);
cl.connection.socket.setKeepAlive(true, 10000);
// None of these callbacks are called
cl.on('online', function() {
console.log('online');
});
cl.on('connection', function() {
console.log('online');
});
cl.on('authenticate', function(opts, cb) {
console.log('authenticated');
});
cl.on('error',function(e) {
console.error(e);
});
cl.on('stanza', function(stanza) {
console.log(stanza);
});
res.render('index', { title: 'GCM upstream test' });
});
module.exports = router;
Thanks
OP here: The issue was because the route was terminating the XMPP action upon reaching res.render. Upon removing the XMPP code from the route, I get an XMPP authentication failure, which is most likely due to an incorrect jid/password. The project requirements have changed and I no longer need upstream messaging, so I will not attempt to fix the auth failure.
Thanks for the responses

Categories