I'm trying to secure my loopback service with my third party OpenID Connect service (Keycloak) but it doesn't seem to be validating requests have accesstokens at all.
My server.js:
var loopback = require('loopback');
var boot = require('loopback-boot');
var app = module.exports = loopback();
// Passport configurators..
var loopbackPassport = require('loopback-component-passport');
var PassportConfigurator = loopbackPassport.PassportConfigurator;
var passportConfigurator = new PassportConfigurator(app);
var cont = function(req, res){
next();
};
/**
* Flash messages for passport
*
* Setting the failureFlash option to true instructs Passport to flash an
* error message using the message given by the strategy's verify callback,
* if any. This is often the best approach, because the verify callback
* can make the most accurate determination of why authentication failed.
*/
var flash = require('express-flash');
// attempt to build the providers/passport config
var config = {};
try {
config = require('../providers.json');
} catch (err) {
console.trace(err);
process.exit(1); // fatal
}
// -- Add your pre-processing middleware here --
// boot scripts mount components like REST API
boot(app, __dirname);
// The access token is only available after boot
app.middleware('auth', loopback.token({
model: app.models.accessToken
}));
app.middleware('session:before', loopback.cookieParser(app.get('cookieSecret')));
app.middleware('session', loopback.session({
secret: 'kitty',
saveUninitialized: true,
resave: true
}));
passportConfigurator.init();
// We need flash messages to see passport errors
app.use(flash());
passportConfigurator.setupModels({
userModel: app.models.user,
userIdentityModel: app.models.userIdentity,
userCredentialModel: app.models.userCredential
});
for (var s in config) {
var c = config[s];
c.session = c.session !== false;
passportConfigurator.configureProvider(s, c);
}
var ensureLoggedIn = require('connect-ensure-login').ensureLoggedIn;
app.start = function () {
// start the web server
return app.listen(function () {
app.emit('started');
var baseUrl = app.get('url').replace(/\/$/, '');
console.log('Web server listening at: %s', baseUrl);
if (app.get('loopback-component-explorer')) {
var explorerPath = app.get('loopback-component-explorer').mountPath;
console.log('Browse your REST API at %s%s', baseUrl, explorerPath);
}
});
};
// Bootstrap the application, configure models, datasources and middleware.
// Sub-apps like REST API are mounted via boot scripts.
boot(app, __dirname, function (err) {
if (err) throw err;
// start the server if `$ node server.js`
if (require.main === module)
app.start();
});
provider.json
{
"oAuth2": {
"provider": "keycloak",
"module": "passport-openidconnect",
"authorizationURL": "https://xxx",
"tokenURL": "https://xxxx",
"clientID": "xxx",
"clientSecret": "-",
"failureFlash": true
}
}
I've been trying to follow this example:
https://github.com/strongloop/loopback-example-passport
But that doesn't explain how to connect to an OpenID Connect service and secure my APIs.
I've also tried this for specific APIs:
app.get('/api/Clients', ensureLoggedIn('/login'), cont);
I want to really lock down all APIs and check if a valid token is presented in the query which should be validated by my third party authentication service.
Thanks in advance!
Related
I was following a tutorial on Youtube on making a decentralized voting app on truffle: https://youtu.be/3681ZYbDSSk.
The source code is as follows: https://github.com/dappuniversity/election/tree/2019_update
The system has a backend with lite-server, the same as the official pet-shop tutorial and a simple frontend where users vote for their candidates.
This is fine until I wanted to add a login and sign up feature into the system. After some research, I found out that lite-server is not able to send Mysql queries like express.js or php, so I tried to implement this feature in express.js.
I created a new dir, init truffle, npm install express, then put everything from the original project to the new one. I created index.js, which I run as the server
var express = require("express");
var app = express();
const path = require("path");
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
app.use(express.static(path.join(__dirname, "public")));
app.use(express.static(path.join(__dirname, "build")));
// http://localhost:3000/
app.get("/", function (request, response) {
// Render login template
response.sendFile(path.join(__dirname + "/index.html"));
});
app.listen(3000);
I hope to include the compiled contracts using app.use(express.static(path.join(__dirname, "build")));, however somehow it can't recognize it, so I was forced to move the compiled contract into the public dir. Then I nodemon index.js again.
The system failed to render the candidates detail, so it's blank. This is how it should have look like.
In the app, the client-side app.js makes call to the web3.js api to get the details of candidates deployed on the blockchain. In my express app, it can only get the blockchain address but not the details. Here is the app.js (which can also be found on the github link above).
App = {
web3Provider: null,
contracts: {},
account: "0x0",
hasVoted: false,
init: function () {
return App.initWeb3();
},
initWeb3: function () {
// TODO: refactor conditional
if (typeof web3 !== "undefined") {
// If a web3 instance is already provided by Meta Mask.
App.web3Provider = web3.currentProvider;
web3 = new Web3(web3.currentProvider);
} else {
// Specify default instance if no web3 instance provided
App.web3Provider = new Web3.providers.HttpProvider(
"http://localhost:7545"
);
web3 = new Web3(App.web3Provider);
}
console.log("We are in initWeb3");
return App.initContract();
},
initContract: function () {
$.getJSON("Election.json", function (election) {
// Instantiate a new truffle contract from the artifact
App.contracts.Election = TruffleContract(election);
// Connect provider to interact with contract
App.contracts.Election.setProvider(App.web3Provider);
App.listenForEvents();
return App.render();
});
},
// Listen for events emitted from the contract
listenForEvents: function () {
App.contracts.Election.deployed().then(function (instance) {
// Restart Chrome if you are unable to receive this event
// This is a known issue with Metamask
// https://github.com/MetaMask/metamask-extension/issues/2393
instance
.votedEvent(
{},
{
fromBlock: 0,
toBlock: "latest",
}
)
.watch(function (error, event) {
console.log("event triggered", event);
// Reload when a new vote is recorded
App.updateCounts();
});
});
},
render: function () {
var electionInstance;
var loader = $("#loader");
var content = $("#content");
console.log("In render");
loader.show();
content.hide();
// Load account data
web3.eth.getCoinbase(function (err, account) {
if (err === null) {
App.account = account;
$("#accountAddress").html("Your Account: " + account);
}
});
// Load contract data
App.contracts.Election.deployed()
.then(function (instance) {
electionInstance = instance;
console.log(electionInstance.candidatesCount());
return electionInstance.candidatesCount();
})
.then(function (candidatesCount) {
var candidatesResults = $("#candidatesResults");
candidatesResults.empty();
var candidatesSelect = $("#candidatesSelect");
candidatesSelect.empty();
for (var i = 1; i <= candidatesCount; i++) {
electionInstance.candidates(i).then(function (candidate) {
var id = candidate[0];
var name = candidate[1];
var voteCount = candidate[2];
// Render candidate Result
var candidateTemplate =
"<tr><th>" +
id +
"</th><td>" +
name +
"</td><td id='vc_" +
id +
"'>" +
voteCount +
"</td></tr>";
candidatesResults.append(candidateTemplate);
// Render candidate ballot option
var candidateOption =
"<option value='" + id + "' >" + name + "</ option>";
candidatesSelect.append(candidateOption);
});
}
return electionInstance.voters(App.account);
})
.then(function (hasVoted) {
// Do not allow a user to vote
if (hasVoted) {
$("form").hide();
}
loader.hide();
content.show();
})
.catch(function (error) {
console.warn(error);
});
},
updateCounts: function () {
$("#content").hide();
$("#loader").show();
App.contracts.Election.deployed()
.then(function (instance) {
electionInstance = instance;
return electionInstance.candidatesCount();
})
.then(function (candidatesCount) {
for (var i = 1; i <= candidatesCount; i++) {
electionInstance.candidates(i).then(function (candidate) {
var id = candidate[0];
var voteCount = candidate[2];
var cell = document.getElementById("vc_" + id);
if (cell != null) {
cell.innerHTML = voteCount;
}
});
}
return electionInstance.voters(App.account);
})
.then(function (hasVoted) {
// Do not allow a user to vote
if (hasVoted) {
$("form").hide();
}
});
$("#content").show();
$("#loader").hide();
},
castVote: function () {
var candidateId = $("#candidatesSelect").val();
App.contracts.Election.deployed()
.then(function (instance) {
return instance.vote(candidateId, { from: App.account });
})
.then(function (result) {
// Wait for votes to update
$("#content").hide();
$("#loader").show();
})
.catch(function (err) {
console.error(err);
});
},
};
$(function () {
$(window).load(function () {
App.init();
console.log("Init success");
});
});
I noticed that web3.eth.getCoinbase works perfectly because the address is correctly shown, but somehow at the line App.contracts.Election.deployed(), it cannot get the details from the contract.
My question is, how do I fix my code so it can correctly show the details of the candidates? Because the sign up and login system can only work if the basic voting component is in place. Plus, how do I put it correctly so express.js can recognize my contracts in build/contracts?
Or are there other ways to make it work such as configuring lite-server to send mysql queries, or implementing this on xampp then use php to connect to mysql? Thank you very much.
P.S. I've tried to use express-box for this project. For whatever reasons, it cannot work. Whenever I tried to truffle compile, the vs code pops up and truffle did nothing, so I turned to creating a new express project instead.
After a series of painstaking trial-and-error experiments. I've finally found the solution. On the line " $.getJSON("Election.json", function (election)", it sends out a http get request to the server asking for "Election.json". This compiled contract resides in build/contracts, so the server has to serve this json if the voting system has to work, otherwise, it would turn up a blank section for candidates.
The solution is simply add one more line in index.js that imports the json. Then, add one more endpoint that just serves that json when requested. The final code would look like this
var express = require("express");
var app = express();
const path = require("path");
var json = require("./build/contracts/Election.json");
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
app.use(express.static(path.join(__dirname, "public")));
// http://localhost:3000/
app.get("/", function (request, response) {
// Render login template
response.sendFile(path.join(__dirname + "/index.html"));
});
app.get("/Election.json", function (request, response) {
response.send(json);
});
app.listen(3000);
Now the system can find the json it needs to let user see the votes and vote as well.
If you have a better solution, please let me know!
I am building a nodejs api that uses fastify, Prisma and Postgres. I have the API working with fastify-cookies and fastify-session and i can get cookies just fine but i need to be able to store the session cookies in the database. I saw a tutorial on doing this but it was without prisma, so im lost on how to connect fastify-session to the Prisma database pool.
I user the prisma client to connect to the database to do my normal calls in my routes, const data = await prisma.model.create({});
server.js
const fastify = require('fastify')({ logger: true });
const PORT = process.env.PORT || 3000;
// Session state
fastify.register(require('./sessions'));
// Register all our routes here.
...
// Startup code for the fastify server.
const start = async () => {
try {
await fastify.listen(PORT, '0.0.0.0');
} catch (error) {
fastify.log.error(error);
process.exit(1);
}
};
// Start the fastify server.
start();
sessions.js
const cookie = require('fastify-cookie');
const session = require('fastify-session');
const fp = require('fastify-plugin');
/**
* #param {import('fastify').FastifyInstance} fastify
*/
const plugin = async (fastify) => {
// All plugin data here is global to fastify.
fastify.register(cookie);
fastify.register(session, {
secret: process.env.SESSION_SECRET,
store: new SessionStore({
tableName: 'UserSession',
pool: ???, <--------------------------------- how to connect?
}),
saveUninitialized: false,
cookie: {
httpOnly: true,
secure: false,
},
});
fastify.addHook('preHandler', (req, reply, next) => {
req.session.user = {};
next();
});
};
module.exports = fp(plugin);
If you want to use the Prisma connection pool you would have to create a session storage library similar to connect-pg-simple or modify the codebase to accept a Prisma connection. This is definitely a non-trivial implementation and I don't think it would make a lot of sense without exceptional circumstances.
I would suggest creating a new pg.Pool or pgPromise instance and connecting with that like it was shown in the tutorial video you linked to. There's no reason you can't have two separate connection pools open to the same database (One with Prisma and one with pg.Pool)
Here's the current issue i'm struggling with. I'm on a webapp project, in which I have 2 scripts :
A script called start.js in which I initialize the server and initialize a variable, token. This script is ran when I start the webapp.
A script called viewer.js which initialize a viewer. That viewer requires the previous token to work.
I can't generate the token from the client side, because it requires NodeJS, and as far as I understood NodeJS doesn't work on Client side.
I've tried to use global variables, global methods, or HTTP requests, but none of these methods seems to work so far. Any tip on how to do it ?
Here is what I tried:
// start.js
const ForgeSDK = require('forge-apis');
const express = require('express');
const path = require('path');
var app = express();
app.use('/static', express.static(__dirname + '/static'));
/**
* Token generation
*/
oAuth2TwoLegged.authenticate().then(function(credentials){
setToken(credentials.access_token)
}, function(err){
console.error(err);
});
function setToken(newToken) {
console.log("Definition du nouveau token")
token = newToken;
console.log(token)
};
app.get('/', function(req, res) {
res.sendFile(path.join(__dirname + '/index.html'));
});
app.listen(3000, function () {
console.log('Token provider listening on port 3000')
});
// viewer.js
var token = '';
/**
* Viewer initialization
*/
You can pass a callback to your config options to obtain the token (usually via ajax) to requests:
var options = {
env: 'AutodeskProduction',
getAccessToken: function(onGetAccessToken) {
//
// TODO: Replace static access token string below with call to fetch new token from your backend
// Both values are provided by Forge's Authentication (OAuth) API.
//
// Example Forge's Authentication (OAuth) API return value:
// {
// "access_token": "<YOUR_APPLICATION_TOKEN>",
// "token_type": "Bearer",
// "expires_in": 86400
// }
//
var accessToken = '<YOUR_APPLICATION_TOKEN>';
var expireTimeSeconds = 86400;
onGetAccessToken(accessToken, expireTimeSeconds);
}
}
Autodesk.Viewing.Initializer(options, function onInitialized(){
...
See here for details.
And see here and here to create an endpoint to generate access tokens in your Node backend.
I am trying to implement a mechanism that will be run before any route is hit. In that mechanism I want to take a value from the header and check for authentication.
I have come up with this:
server.js:
// Create a server with a host and port
'use strict';
var Hapi = require('hapi');
var mongojs = require('mongojs');
var plugins = [
require('./routes/entities')
];
var server = new Hapi.Server();
server.connection({
port: 3000
});
//Connect to db
server.app.db = mongojs('hapi-rest-mongo', ['entities']);
server.app.checkHeader = function (request) {
var header = request.headers['x-authorization'];
if(header === "letmein"){
return true
}
return false
};
//Load plugins and start server
server.register(plugins, function (err) {
if (err) {
throw err;
}
// Start the server
server.start(function (err) {
console.log('Server running at:', server.info.uri);
});
});
and in routes.entities:
'use strict';
var Boom = require('boom');
var uuid = require('node-uuid');
var Joi = require('joi');
exports.register = function (server, options, next) {
var db = server.app.db;
server.route({
method: 'GET',
path: '/entities',
handler: function handler(request, reply) {
if(!server.app.checkHeader(request))
{
return reply(Boom.unauthorized());
};
//request.server.myFunc();
db.entities.find(function (err, docs) {
if (err) {
return reply(Boom.wrap(err, 'Internal MongoDB error'));
}
reply(docs);
});
}
});
So in short while starting the server I have registered my function server.app.checkHeader
And in the routes I am calling it and sending a request object to it. Request object contains information about the headers.
While this works, I am having a feeling I am not following the best practices with the Hapi.
How could I do it more elegantly?
There are a few options.
You can, of course, tap into the request lifecycle - note the events that occur in the pipeline prior to the route handler.
Although, I'd urge you to consider implementing an auth strategy that can be set as the default for all routes or selectively on appropriate routes.
The best way to require authentication for all or selected route is to use hapi’s integrated functionality.
You should set a default authentication strategy that is applied to each route handler. The sample below uses basic auth. You’d want to create a custom authentication strategy for hapi to check your x-authentication header.
const Hapi = require('hapi')
const BasicAuth = require('hapi-auth-basic')
const server = new Hapi.Server()
server.register(BasicAuth, function (err) {
if (err) {
console.log('error', 'failed to install plugins')
throw err
}
// TODO: add authentication strategy & set as default
server.auth.strategy('simple', 'basic', true, { validateFunc: basicValidationFn })
// or set strategy separately as default auth strategy
server.auth.strategy('simple', 'basic', { validateFunc: basicValidationFn })
server.auth.default('simple')
// TODO: add routes
server.start(function (err) {
})
})
You can also inject hapi’s request lifecycle and extend it at given points. Extending the request lifecycle should be done by using plugins:
register: function (server, options, next) {
// do some processing before 'onPreAuth'
// or pick another extension point
server.ext('onPreAuth', (request, reply) => {
// your functionality
})
}
Hope that helps!
I am using openshift with express and no matter what configuration I change socket.io to it breaks my application. What am I missing?
I have commented out the sections that use socket.io and the app runs fine.
When I uncomment socket.io everything goes wrong. I have tried changing the position of the code to accept the standard io.listen(app), but it still breaks. I have also tried numerous examples from the internet.
Is this possible? //self.io.listen(self.app); if not what should I have socket.io listen to in the context of my app? I cannot call io.listen(server)..
var express = require('express');
//etc
// configuration
mongoose.connect(configDB.url); // connect to our database
require('./config/passport')(passport);
var App = function(){
// Scope
var self = this;
// Setup
self.dbServer = new mongodb.Server(process.env.OPENSHIFT_MONGODB_DB_HOST,parseInt(process.env.O PENSHIFT_MONGODB_DB_PORT));
self.db = new mongodb.Db(process.env.OPENSHIFT_APP_NAME, self.dbServer, {auto_reconnect: true});
self.dbUser = process.env.OPENSHIFT_MONGODB_DB_USERNAME;
self.dbPass = process.env.OPENSHIFT_MONGODB_DB_PASSWORD;
self.ipaddr = process.env.OPENSHIFT_NODEJS_IP;
self.port = parseInt(process.env.OPENSHIFT_NODEJS_PORT) || 8080;
if (typeof self.ipaddr === "undefined") {
console.warn('No OPENSHIFT_NODEJS_IP environment variable');
};
// Web app urls
self.app = express();
//self.io = require('socket.io');
//self.clients = [];
/*self.io.sockets.on('connection', function (socket) {
self.clients.push(socket);
socket.emit('welcome', { message: 'Welcome!' });
// When socket disconnects, remove it from the list:
socket.on('disconnect', function() {
var index = self.clients.indexOf(socket);
if (index != -1) {
self.clients.splice(index, 1);
}
});
});*/
// set up our express application
self.app.use(morgan('dev')); // log every request to the console
self.app.use(cookieParser()); // read cookies (needed for auth)
self.app.use(bodyParser.json()); // get information from html forms
self.app.use(bodyParser.urlencoded({ extended: true }));
self.app.use(bodyParser());
self.app.use(multer({ dest: process.env.OPENSHIFT_DATA_DIR}));
self.app.use(compression());
self.app.use(express.static(__dirname + '/public'));
self.app.use("/public2", express.static(process.env.OPENSHIFT_DATA_DIR));
self.app.set('view engine', 'ejs'); // set up ejs for templating
self.app.use(function (req, res, next) {
res.setHeader('Access-Control-Allow-Origin', req.headers.origin);
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS, PUT, PATCH, DELETE');
res.setHeader('Access-Control-Allow-Headers', 'X-Requested-With,content-type');
res.setHeader('Access-Control-Allow-Credentials', 'true');
next();
}
);
// required for passport
self.app.use(session({
secret:'example',
maxAge: 6 * 3 * 60 * 1000,
store: new MongoStore({ url: process.env.OPENSHIFT_MONGODB_DB_URL,
clear_interval: 6 * 3 * 60 * 1000 })
}));
self.app.use(passport.initialize());
self.app.use(passport.session()); // persistent login sessions
self.app.use(flash()); // use connect-flash for flash messages stored in session
require('./app/routes.js')(self.app, passport); // load our routes and pass in our app and fully configured passport
self.connectDb = function(callback){
self.db.open(function(err, db){
if(err){ throw err };
self.db.authenticate(self.dbUser, self.dbPass, {authdb: "admin"}, function(err, res){
if(err){ throw err };
callback();
});
});
};
//starting the nodejs server with express
self.startServer = function(){
self.app.listen(self.port, self.ipaddr, function(){
console.log('%s: Node server started on %s:%d ...', Date(Date.now()), self.ipaddr, self.port);
});
//websockets
//self.io.listen(self.app);
};
// Destructors
self.terminator = function(sig) {
if (typeof sig === "string") {
console.log('%s: Received %s - terminating Node server ...', Date(Date.now()), sig);
process.exit(1);
};
console.log('%s: Node server stopped.', Date(Date.now()) );
};
process.on('exit', function() { self.terminator(); });
self.terminatorSetup = function(element, index, array) {
process.on(element, function() { self.terminator(element); });
};
['SIGHUP', 'SIGINT', 'SIGQUIT', 'SIGILL', 'SIGTRAP', 'SIGABRT', 'SIGBUS', 'SIGFPE', 'SIGUSR1', 'SIGSEGV', 'SIGUSR2', 'SIGPIPE', 'SIGTERM'].forEach(self.terminatorSetup);
};
//make a new express app
var app = new App();
//call the connectDb function and pass in the start server command
app.connectDb(app.startServer);
Thank you for your comments. The solution was to create a self.server variable to pass the express server into socket.io. I have tested the connection and it is working fine now; with all of the other server dependencies.
//starting the nodejs server with express
self.startServer = function(){
self.server = self.app.listen(self.port, self.ipaddr, function(){
console.log('%s: Node server started on %s:%d ...', Date(Date.now()), self.ipaddr, self.port);
});
//websockets
self.io = require('socket.io').listen(self.server);
};