I have set up my express/connect app to use sessions with redis as the store:
app.use(express.session({
secret: "secret key here",
store: app.redisStore,
cookie: { maxAge: 600000 }
}));
I have created my own flash message system by doing the following:
module.exports = function (app) {
'use strict';
//set-up session messages
app.use(function (req, res, next) {
/* set up flash*/
if (req.session.flash === "undefined") {
req.session.flash = [];
}
res.locals.messages = function () {
var messages = req.session.flash;
//clear out messages
req.session.flash = [];
return messages;
};
...
Then essentially I just push flash message objects into the flash array whenever I need them. Every time the message function is used they get cleared out. This seems to work for the most part; however, while logging out I use the regenerate function and flash becomes undefined:
function logout(req, res) {
var currentUser = req.session.currentUser;
req.session.regenerate(function (err) {
req.session.flash.push({"type": "info", "message": "You have been logged out."});
console.log(currentUser + " Logged Out");
res.redirect("/login");
return;
});
}
Which seems to make sense. Regenerate obliterates the session, and since it happens after the initial request flash becomes undefined. To avoid any future problems like this I am wondering if there is there some sort of initialize function for sessions that I can override or hook into? I'd use this to set some default session values every time a session is started or regenerated.
Side-question: Is flash actually getting saved in redis?
To avoid any future problems like this I am wondering if there is
there some sort of initialize function for sessions that I can
override or hook into? I'd use this to set some default session values
every time a session is started or regenerated.
No, the way you are doing this is correct. If you want to attach multiple actions to regenerate method, then you can also try something like this:
var EventEmitter = require('events').EventEmitter;
var session_events = new EventEmitter( );
module.exports = function (app) {
'use strict';
//set-up session messages
app.use(function (req, res, next) {
var regenerate = req.session.regenerate;
req.session.regenerate = function() {
session_events.emit( "session_regenerated", req );
regenerate.apply( req.session, arguments );
};
/* set up flash*/
if (req.session.flash === "undefined") {
session_events.emit( "session_initializing", req );
}
// other code goes here
});
}
Then you can simply do:
session_events.on( "session_regenerated", function(req) {
req.session.flash.push({"type": "info", "message": "You have been logged out."});
});
session_events.on( "session_initializing", function(req) {
req.session.flash = [];
});
This is a monkey patch, though ( at least the .regenerate overriding ), so I advice writing your own session store, which is not difficult at all, since session is nothing else then an entry in Redis/any other storage.
Side-question: Is flash actually getting saved in redis?
Yes, everything attached to session goes to session store ( Redis in your case ).
Related
I am trying to create an application based on socket. Looking at the console it can be said that the client to trying to socket server twice; which should not be allowed.
Is there any way to stop that?
If have modular javascript file where multiple script want to access same socket connection how is that possible?
<!DOCTYPE html>
<html>
<head>
<title>socket</title>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.12.4/jquery.min.js"></script>
<script src="https://cdn.socket.io/socket.io-1.4.5.js"></script>
</head>
<body>
<script type="text/javascript">
$(function(){
var socket = io();
console.log(typeof socket); // object
var socket1 = io();
console.log(socket === socket1); // false
})
</script>
</body>
</html>
Don't connect the client to socket twice....
A global variable will allow you to access the socket from all your script files
Eg)
//you can access this variable from all your scripts
var socket;
$(function(){
socket = io();
console.log(typeof socket);
})
Note that in javascript polluting the global scope with variables and functions is considered bad practice, but that's for another post.
The obvious suggestion is to stop having your client connect twice.
But, if you're trying to prevent multiple connections on the server and you really don't want any end user to be able to use more than one window at a time to your web site, then you can prevent another connection once there is already a connection.
The key would be to identify each browser with some sort of unique cookie (user ID or uniquely generated ID number). You can then implement socket.io authentication that keeps track of which user IDs already have a connection and if a connection is already live, then subsequent attempts to connect will fail the authentication step and will be disconnected.
Supposed you have a cookie set for each browser that's called userId. Then, here's a sample app that makes sure the userId cookie is present and then uses it to deny more than one socket.io connection:
const express = require('express');
const app = express();
const cookieParser = require('cookie-parser');
const cookie = require('cookie');
app.use(cookieParser());
let userIdCntr = 1;
const tenDays = 1000 * 3600 * 24 * 10;
app.use(function(req, res, next) {
// make sure there's always a userId cookie
if (!req.cookies || !req.cookies.userId) {
// coin unique user ID which consists of increasing counter and current time
let userId = userIdCntr++ + "_" + Date.now();
console.log("coining userId: " + userId);
res.cookie('userId', userId, { maxAge: Date.now() + tenDays , httpOnly: true });
}
next();
});
app.get('/test.html', function (req, res) {
console.log('Cookies: ', req.cookies)
res.sendFile( __dirname + "/" + "test.html" );
});
const server = app.listen(80);
const io = require('socket.io')(server);
// which users are currently connected
var users = new Set();
// make sure cookie is parsed
io.use(function(socket, next) {
if (!socket.handshake.headers.cookieParsed) {
socket.handshake.headers.cookieParsed = cookie.parse(socket.handshake.headers.cookie || "");
}
next();
});
// now do auth to fail duplicate connections
io.use(function(socket, next) {
console.log("io.use() - auth");
// if no userId present, fail the auth
let userId = socket.handshake.headers.cookieParsed.userId;
if (!userId) {
next(new Error('Authentication error - no userId'));
return;
}
// if already logged in
if (users.has(userId)) {
console.log("Failing user " + userId);
next(new Error('Authentication error - duplicate connection for this userId'));
return;
} else {
console.log("adding user " + userId);
users.add(userId);
}
next();
});
io.on('connection', function(socket) {
console.log("socket.io connection cookies: ", socket.handshake.headers.cookieParsed);
socket.on('disconnect', function() {
console.log("socket.io disconnect");
let userId = socket.handshake.headers.cookieParsed.userId;
users.delete(userId);
});
// test message just to see if we're connected
socket.on("buttonSend", function(data) {
console.log("buttonSend: ", data);
});
});
P.S. You really have to think through the situation where a user with more than one computer (like home/work) leaves one computer on and open to your page and thus connected via socket.io and then attempts to user your site from the other computer. If you refused the second connection, this will refuse to allow them access from their second computer if the first computer happened to be left open and on.
I'm new to node.js and I'm stuck here. I have the following code in my router:
router.get('/', function(req, res, next) {
// Initialize the game
if(!req.session.gameStarted) {
req.session.gameStarted = true;
req.session.serverStocks = [];
setInterval(function() {
req.session.serverStocks.push('new object');
console.log('This works as it should, returns an array full of objects ->');
console.log(req.session.serverStocks);
}, 4000);
}
else {
console.log('After refresh, this is always empty ->');
console.log(req.session.serverStocks);
console.log('Although this returns "true" as it should which means sessions work OK ->');
console.log(req.session.gameStarted);
}
res.render('pages/homepage', { title: 'Hello' });
});
My goal is when a user loads the page for the first time it'll initialize an empty session object for him/her. Then every 4000 seconds the "new object" string will be added to it. The first visit part is working well, but when I refresh the page the session object is always empty. What am I missing? What is the right way to do this?
/// this is the part from app.js that is related to session
var session = require('express-session')({
secret: 'shhh_very_secret',
resave: true,
saveUninitialized: true,
cookie: { secure: false }
});
var sharedsession = require("express-socket.io-session");
// Express
var app = express();
// Config (game and app settings)
var config = require('./config/config.js');
app.config = config;
// Session
app.use(session);
// Socket.io
app.io = socket_io();
app.io.use(sharedsession(session, {
autoSave:true
}));
I was using socket.io but after I removed it from the client side so it shouldn't affect the code (or?).
// EDIT: I think the code inside setInterval function is working with some own scope because it's not really adding anything to my object if I check it outside of the function
Express doesn't support sessions out of the box. Make sure you are using middleware that adds sessions, e.g. https://github.com/expressjs/session
I'm new to Sails and don't know exactly where to put the initialisation of an object to be unique in all the app. After reading the docs I assumed that I can have it in the global sails object, but not sure if is the better place.
I'm using the new Appcelerator ArrowDB to store my users and objects. Docs talk about declare the appropriate vars and use it, with the APP_KEY.
var ArrowDB = require('arrowdb'),
arrowDBApp = new ArrowDB('<App Key>');
function login(req, res) {
var data = {
login: req.body.username,
password: req.body.password,
// the req and res parameters are optional
req: req,
res: res
};
arrowDBApp.usersLogin(data, function(err, result) {
if (err) {
console.error("Login error:" + (err.message || result.reason));
} else {
console.log("Login successful!");
console.log("UserInfo: " + JSON.stringify(result.body.response.users[0]));
}
});
}
But I will need to use constantly that arrowDBApp var to create, update, delete objects in the database, so I think the best way is to initialize it in the starting script app.js and share across the app.
I tried it, but I was not able to store it in the sails var, it seems that this var is not available (or lose its config) until sails.lift() is executed.
This code (app.js file) shows nothing in the console:
// Ensure we're in the project directory, so relative paths work as expected
// no matter where we actually lift from.
process.chdir(__dirname);
// Ensure a "sails" can be located:
(function() {
var sails;
try {
sails = require('sails');
} catch (e) {
console.error('To run an app using `node app.js`, you usually need to have a version of `sails` installed in the same directory as your app.');
console.error('To do that, run `npm install sails`');
console.error('');
console.error('Alternatively, if you have sails installed globally (i.e. you did `npm install -g sails`), you can use `sails lift`.');
console.error('When you run `sails lift`, your app will still use a local `./node_modules/sails` dependency if it exists,');
console.error('but if it doesn\'t, the app will run with the global sails instead!');
return;
}
// Try to get `rc` dependency
var rc;
try {
rc = require('rc');
} catch (e0) {
try {
rc = require('sails/node_modules/rc');
} catch (e1) {
console.error('Could not find dependency: `rc`.');
console.error('Your `.sailsrc` file(s) will be ignored.');
console.error('To resolve this, run:');
console.error('npm install rc --save');
rc = function () { return {}; };
}
}
// My own code
var APP_KEY = 'mykey';
var ArrowDB = require('arrowdb');
sails.arrowDBApp = new ArrowDB(APP_KEY);
console.log("Hi" + JSON.stringify(sails));
// Start server
sails.lift(rc('sails'));
console.log("Finish");
})();
No "HI" and no "Finish" are printed. If I try to use sails.arrowDBApp in another controller, it is undefined.
Tips are welcome.
It's not advisable to modify app.js unless you really need to.
The usual space to save all configuration information (e.g. the APP_KEY) is in the config directory in your project root.
One-time initializations (e.g. ArrowDB initialization) can be added to config/bootstrap.js.
Update
In config/arrowdb.js (you need to create this file yourself):
module.exports.arrowdb = {
APP_KEY: 'yourappkey',
ArrowDBApp: null
};
In config/bootstrap.js:
var ArrowDB = require('arrowdb');
module.exports.bootstrap = function(next){
sails.config.arrowdb['ArrowDBApp'] = new ArrowDB(sails.config.arrowdb['APP_KEY']);
next(); // Don't forget to add this
};
In your controller:
'task': function(req, res, next) {
sails.config.arrowdb['ArrowDBApp'].usersLogin(...);
// and so on.
// You could also add something like
// var ADB = sails.config.arrowdb['ArrowDBApp'];
// at the top in case you need to use it on and on.
}
Use config/bootstrap.js to initialize something before Sails lifted. Sometimes if we want to put something in global variable, this approach is good to use, like define/ override native Promise with Bluebird Promise.
Use api/services to put some method or other things that you will use regularly in your code (controllers, models, etc.), like Mail Service, that handle sending email within your application.
Use config at config folder to predefined something at sails.config[something]. It can be an object, function, or whatever in order to become configurable, like put Twitter API Key to use Twitter REST API.
To achieve what you wanted, I'll try to use service and bootstrap.js. Try this example.
Create service file at api/services/ArrowDBService.js
Put with this code:
var ArrowDB = require('arrowdb'),
arrowDBApp = new ArrowDB('<App Key>');
module.exports = {
arrowDBApp : arrowDBApp,
login : function (req, res) {
var data = {
login: req.body.username,
password: req.body.password,
// the req and res parameters are optional
req: req,
res: res
};
arrowDBApp.usersLogin(data, function(err, result) {
if (err) {
console.error("Login error:" + (err.message || result.reason));
} else {
console.log("Login successful!");
console.log("UserInfo: " + JSON.stringify(result.body.response.users[0]));
}
});
}
};
Now you can use it by sails.services.arrowdbservice.login(req,res) or simply ArrowDBService.login(req,res) (notice about case sensitive thing). Since I don't know about ArrowDB, so you may explore by yourself about login method that your example provide.
Recently I started learning a little bit about Node.js and it's capabilities and tried to use it for some web services.
I wanted to create a web service which will serve as a proxy for web requests.
I wanted my service to work that way:
User will access my service -> http://myproxyservice.com/api/getuserinfo/tom
My service will perform request to -> http://targetsite.com/user?name=tom
Responded data would get reflected to the user.
To implement it I used the following code:
app.js:
var express = require('express');
var bodyParser = require('body-parser');
var app = express();
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
var proxy = require('./proxy_query.js')
function makeProxyApiRequest(name) {
return proxy.getUserData(name, parseProxyApiRequest);
}
function parseProxyApiRequest(data) {
returned_data = JSON.parse(data);
if (returned_data.error) {
console.log('An eror has occoured. details: ' + JSON.stringify(returned_data));
returned_data = '';
}
return JSON.stringify(returned_data);
}
app.post('/api/getuserinfo/tom', function(request, response) {
makeProxyApiRequest('tom', response);
//response.end(result);
});
var port = 7331;
proxy_query.js:
var https = require('https');
var callback = undefined;
var options = {
host: 'targetsite.com',
port: 443,
method: 'GET',
};
function resultHandlerCallback(result) {
var buffer = '';
result.setEncoding('utf8');
result.on('data', function(chunk){
buffer += chunk;
});
result.on('end', function(){
if (callback) {
callback(buffer);
}
});
}
exports.getUserData = function(name, user_callback) {
callback = user_callback
options['path'] = user + '?name=' + name;
var request = https.get(options, resultHandlerCallback);
request.on('error', function(e){
console.log('error from proxy_query:getUserData: ' + e.message)
});
request.end();
}
app.listen(port);
I wish I didn't screwed this code because I replaced some stuff to fit my example.
Anyway, the problem is that I want to post the response to the user when the HTTP request is done and I cant find how to do so because I use express and express uses asynchronous calls and so do the http request.
I know that if I want to do so, I should pass the makeProxyApiRequest the response object so he would be able to pass it to the callback but it is not possible because of asyn problems.
any suggestions?
help will be appreciated.
As you're using your functions to process requests inside your route handling, it's better to write them as express middleware functions, taking the specific request/response pair, and making use of express's next cascade model:
function makeProxyApiRequest(req, res, next) {
var name = parseProxyApiRequest(req.name);
res.locals.userdata = proxy.getUserData(name);
next();
}
function parseProxyApiRequest(req, res, next) {
try {
// remember that JSON.parse will throw if it fails!
data = JSON.parse(res.locals.userdata);
if (data .error) {
next('An eror has occoured. details: ' + JSON.stringify(data));
}
res.locals.proxyData = data;
next();
}
catch (e) { next("could not parse user data JSON."); }
}
app.post('/api/getuserinfo/tom',
makeProxyApiRequest,
parseProxyApiRequest,
function(req, res) {
// res.write or res.json or res.render or
// something, with this specific request's
// data that we stored in res.locals.proxyData
}
);
Even better would be to move those middleware functions into their own file now, so you can simply do:
var middleware = require("./lib/proxy_middleware");
app.post('/api/getuserinfo/tom',
middleware.makeProxyApiRequest,
middleware.parseProxyApiRequest,
function(req, res) {
// res.write or res.json or res.render or
// something, with this specific request's
// data that we stored in res.locals.proxyData
}
);
And keep your app.js as small as possible. Note that the client's browser will simply wait for a response by express, which happens once res.write, res.json or res.render etc is used. Until then the connection is simply kept open between the browser and the server, so if your middleware calls take a long time, that's fine - the browser will happily wait a long time for a response to get sent back, and will be doing other things in the mean time.
Now, in order to get the name, we can use express's parameter construct:
app.param("name", function(req, res, next, value) {
req.params.name = value;
// do something if we need to here, like verify it's a legal name, etc.
// for instance:
var isvalidname = validator.checkValidName(name);
if(!isvalidname) { return next("Username not valid"); }
next();
});
...
app.post("/api/getuserinfo/:name", ..., ..., ...);
Using this system, the :name part of any route will be treated based on the name parameter we defined using app.param. Note that we don't need to define this more than once: we can do the following and it'll all just work:
app.post("/api/getuserinfo/:name", ..., ..., ...);
app.post("/register/:name", ..., ..., ... );
app.get("/api/account/:name", ..., ..., ... );
and for every route with :name, the code for the "name" parameter handler will kick in.
As for the proxy_query.js file, rewriting this to a proper module is probably safer than using individual exports:
// let's not do more work than we need: http://npmjs.org/package/request
// is way easier than rolling our own URL fetcher. In Node.js the idea is
// to write as little as possible, relying on npmjs.org to find you all
// the components that you need to glue together. If you're writing more
// than just the glue, you're *probably* doing more than you need to.
var request = require("request");
module.exports = {
getURL: function(name, url, callback) {
request.get(url, function(err, result) {
if(err) return callback(err);
// do whatever processing you need to do to result:
var processedResult = ....
callback(false, processedResult);
});
}
};
and then we can use that as proxy = require("./lib/proxy_query"); in the middleware we need to actually do the URL data fetching.
I'm having problems while trying to parse back signed cookies in express/connect application.
io.set('authorization', function (handshakeData, callback) {
if(handshakeData.headers.cookie) {
var signedCookies = cookie.parse(decodeURIComponent(handshakeData.headers.cookie));
handshakeData.cookie = connect.utils.parseSignedCookies(signedCookies, secret);
} else {
return accept('No cookie transmitted', false);
}
callback(null, true); // error first callback style
});
What happens is call to connect.utils.parseSignedCookies returns empty object. I looked into source for parse function and found out that it calls unsign method which gets a substring of encoded value and then tries to sign it again with the same secret and compare the results to verify that its the same value encoded and for some reasons it fails and values does not match. I don't know what I'm doing wrong, why those values differs and why I'm unable to get correct session ID.
My app initialization code looks like this:
app.use(express.cookieParser(secret));
app.use(express.session({
key: 'sessionID',
secret: secret,
maxAge: new Date(Date.now() + 3600000),
store: new RedisStore({
client: redisClient
})
}));
Please help and point what I'm doing wrong here. Thank you
The cookie parser is a middleware, so we have to use it like one. It will actually populate the object that you pass to it. This is how you would want to be using the parser:
// we need to use the same secret for Socket.IO and Express
var parseCookie = express.cookieParser(secret);
io.set('authorization', function(handshake, callback) {
if (handshake.headers.cookie) {
// pass a req, res, and next as if it were middleware
parseCookie(handshake, null, function(err) {
// use handshake.signedCookies, since the
// cookie parser has populated it
});
} else {
return accept('No session.', false);
}
callback(null, true);
});
The cookie parser API changed and this is what it looks like now:
module.exports = function cookieParser(secret) {
return function cookieParser(req, res, next) {
if (req.cookies) return next();
var cookies = req.headers.cookie;
req.secret = secret;
req.cookies = {};
req.signedCookies = {};
if (cookies) {
try {
req.cookies = cookie.parse(cookies);
if (secret) {
req.signedCookies = utils.parseSignedCookies(req.cookies, secret);
req.signedCookies = utils.parseJSONCookies(req.signedCookies);
}
req.cookies = utils.parseJSONCookies(req.cookies);
} catch (err) {
err.status = 400;
return next(err);
}
}
next();
};
};
So what we're doing is passing handshake as a request object, and the parser will read the headers.cookie property. Then, the cookies will be parsed, and put into req.signedCookies. Since we passed handshake as req, the cookies are now in handshake.signedCookies. Note that the cookies are only signed because you passed a secret to the parser.
I was having problems left and right with cookies/sessions/socket.io etc. It was finally #vytautas comment that helped me. In case anyone sees this, please make sure you're connecting to the correct host, whether you have it setup as localhost or an IP address or what have you. Otherwise you won't be able to parse your incoming cookies.
(Seems kind of obvious in hindsight.)