I'm currectly creating an app using Node.JS that makes use of Express and Socket.io. As time progresses it's becoming increasingly difficult to deal with one file, I'm in the process of moving certain things out that I know how but was wondering on the best approach to do this.
I have a private area constructor similar to:
privateArea.js
function privateArea(props) {
this.id = props.id;
this.name = props.name;
this.users = [];
}
privateArea.prototype.addUser = function(socketId) {
this.users.push(socketId);
};
module.exports = privateArea;
I'd like to have this also have access to the socket.io variable that's been setup for use in a separate sockets.js file that can be included via the main app.js and a seperate file for express.js
So I'd like the structure as follows:
project
| app.js - joins it all together
| express.js - initialises and manages all express routing
| privateArea.js - constructor for private areas - must be able to reference socket.io
| sockets.js - initialises and manages all socket.io sockets and events
Any help/examples would be very appreciated.
Thanks
I use socket.io and express quite often in my projects, and I've developed a template which makes things easy. I like to have a fail-over in case the socket connections drops for some reason, or if a socket connection cannot be established. So I create http channels as well as socket channels. Here's a basic module template:
module.exports = function () {
var exported = {};
var someFunction = function (done) {
//.. code here..//
if (typeof done === "function") {
done(null, true);
}
};
// export the function
exported.someFunction = someFunction;
var apicalls = function (app) {
app.get("/module/someFunction", function (req, res) {
res.header("Content-Type", "application/json");
someFunction(function (err, response) {
if (err) return res.send(JSON.stringify(err));
res.send(JSON.stringify(response));
});
});
};
exported.apicalls = apicalls;
var socketcalls = function (io) {
io.on("connection", function (socket) {
socket.on('module-someFunction', function () {
someFunction(function (err, response) {
if (err) return socket.emit('module-someFunction', err);
socket.emit('module-someFunction', response);
});
});
});
};
exported.socketcalls = socketcalls;
return exported;
}
So to use this, I'd first need to include the module in my app.js file like this:
var mymod = require('./myModule.js');
And then I can enable access to this service from HTTP and over the websocket like this:
mymod.apicalls(app); // passing express to the module
mymod.socketcalls(io); // passing socket.io to the module
Finally, from the front-end, I can check to see if I have a socket connection, and if so, I use the socket to emit "module-someFunction". If I don't have a socket connection, the front-end will do an AJAX call instead to "/module/someFunction" which will hit the same function on the server side that it would've had I used the socket connection.
As an added bonus, if I need to utilize the function within the server, I could do that as well since the function is exported. That would look like this:
mymod.someFunction(function (err, response) {
// ... handle result here ... //
});
Related
I have troubles for creating a module which exposes functionalities for my Socket.IO library:
const sio = require('socket.io');
module.exports = function(server) {
const io = sio(server);
return {
register: function(namespace) {
let nsp = io.of(namespace);
nsp.on('connect', function(socket) {
// ...
}
}
}
}
The problem is now how do I make use of this in other modules? In my app.js
I create the server with Express and can instantiate the module with require('./mysocketio')(server) but not in other modules because server is not available there. What's a nice way to resolve these circular dependencies?
Well you can achieve those in various ways, like:
setting objects to global namespace. (altering global needs care)
Use module.exports and require the object in the other files. (can lead to circular dependency issues if not done properly)
pass the instance as arguments to the controllers, while requiring them in routes.
myModule.js Module which exposes functionalities of your Socket.IO library
const sio = require('socket.io');
module.exports = function(server) {
const io = sio(server);
return {
register: function(namespace) {
let nsp = io.of(namespace);
nsp.on('connect', function(socket) {
// ...
}
}
}
}
FLow 1: set the module in global namespace.
app.js
var app = require('express').createServer();
var io = require('./myModule')(app);
global._io = io;
app.listen(80)
controller.js
module.exports = function(io){
var that={};
/*
* Private local variable
* made const so that
* one does not alter it by mistake
* later on.
*/
const _io = global._io;
that.myAction = function(req,res){
_io.register('newRoom');
res.send('Done');
}
return that;
}
Flow 2: passing module as arguments.
app.js
var app = require('express').createServer();
var io = require('./myModule')(app);
require(./router.js)(app,io);
app.listen(80);
router.js
/*
* Contains the routing logic
*/
module.exports = function (app,io) {
//passing while creating the instance of controller for the first time.
var controller = require("./controller")(io);
app.get('/test/about',controller.myAction);
};
controller.js
module.exports = function(io){
var that={};
const _io = io;
that.myAction = function(req,res){
_io.register('newsRoom');
res.send('Done');
}
// everything attached to that will be exposed
// more like making public member functions and properties.
return that;
}
Flow 3: Setting io to global. Thus no need to pass server every time.
app.js
var app = require('express').createServer();
require('./myModule')(app);
require(./router.js)(app);
app.listen(80);
controller.js
// no need to pass the server as io is already initialized
const _io = require('./myModule')();
module.exports = function(io){
var that={};
that.myAction = function(req,res){
_io.register('newsRoom');
res.send('Done');
}
return that;
}
myModule.js
module.exports = function( server ) {
const _io = global._io || require('socket.io')(server);
if(global._io === undefined){
//initializing io for future use
global._io = _io;
}
return {
register: function(namespace) {
let nsp = _io.of(namespace);
nsp.on('connect', function(socket) {
// ...
}
}
}
}
Probably, the cleanest way is to pass is as arguments to the controllers, while requiring them in routes. Although 3rd flow seems promising but one should be care full while altering the global namespace.
It's not really a circular dependency; It's just that your module a) depends on another module that's not globally available and b) your module is presumably used in many places in your code.
Global
A possible solution (with downsides), is to just load your module once, and attach it to a global:
global.mysocketio = require('./mysocketio')(server);
This allows you to access global.mysocketio anywhere in your project, once it has been loaded. This is a construction that I personally use for an own logger construction; My logger is used in many places around my code, so I just keep it attached to global.log.
However, usage of globals is a bit dirty; It gives problems with namespace-separation (what is somewhere some code decides to use global.mysocketio itself), and it creates an 'invisible' dependency; Other code just assumes that a certain global will exist, and it's not that easy to find these dependencies.
Export
A nicer solution is to just pass the variable wherever needed. There are many ways to do this. I understand that your app.js doesn't have the server variable available, but it surely is including your express-code in some way. If you need the 'server' or 'mysocketio' available from app.js, just export it from your module where you are creating 'server'. Like:
module.exports.expressServerVar = server;
Just my 2 cents; Do you strongly disagree with me or am I missing something important? Let me know!
I'd use a factory or dependency injection. You could use something like jimple.
But here's an example without using any external dependencies. This is by no means the best code example but it should hopefully get the point across. I'd still recommend using jimple rather than this.
// app.js
var express = require('express');
var app = express();
var factory = require('./factory.js');
factory.setExpress(app); // This could also be done in the factory constructor. Or you could instanciate your express app in the factory.js class.
// factory.js
var socketIoModule = require('./your-socket-io-module.js')
function Factory() {
}
Factory.prototype.setExpress = function(app) {
this.app = app;
}
Factory.prototype.getSocketIOModule = function() {
return socketIoModule(this.app);
}
// By exporting it this way we are making it a singleton
// This means that each module that requires this file will
// get the same instance of factory.
module.exports = new Factory();
// some code that needs socket io module
var factory = require('./factory.js');
function() {
var socketIo = factory.getSocketIOModule();
socketIo.doStuff();
}
Approach that I use in my applications is exposing server and io instances from start script and reusing them in modules
// Setup servers.
var http = require('http').Server(app);
var io = require('socket.io')(http);
// Setup application.
require('./server/app')(app, express, io);
// Start listening on port.
http.listen(configs.PORT, function() {
console.log("Listening on " + configs.PORT);
});
Inside your modules you can use io instance to setup event handlers or emit events, something like this
module.exports = {
contest: function(io, contest) {
var namespace = io.of('/' + contest.id);
namespace.on('connection', function(socket) {
socket.on('word', function(data) {
...
});
});
}
};
For your sample
I would put this part in app.js or in js file that is used to start server
const sio = require('socket.io');
const io = sio(server);
and will have Socket.IO module like this
module.exports = function(server, io) {
return {
register: function(namespace) {
let nsp = io.of(namespace);
nsp.on('connect', function(socket) {
// ...
}
}
}
}
My sample
https://github.com/gevorg/typeitquick/blob/master/web.js
https://github.com/gevorg/typeitquick/blob/master/server/contest.coffee
https://github.com/gevorg/typeitquick/blob/master/server/io.coffee
I would like to reuse a RabbitMQ channel in different node modules. Since the channel is created asynchronously, I am not sure what the best approach would be to "inject" this channel object to other modules.
If possible, I would like to avoid external dependencies like DI containers for this.
Below, you find my simplified code.
Thank you in advance for any advice.
web.js
require('./rabbitmq')(function (err, conn) {
...
// Start web server
var http = require('./http');
var serverInstance = http.listen(process.env.PORT || 8000, function () {
var host = serverInstance.address().address;
var port = serverInstance.address().port;
});
});
rabbitmq.js:
module.exports = function (done) {
...
amqp.connect(rabbitMQUri, function (err, conn) {
...
conn.createChannel(function(err, ch) {
...
// I would like to reuse ch in other modules
});
});
}
someController.js
module.exports.post = function (req, res) {
// Reuse/inject "ch" here, so I can send messages
}
you could always attach your channel handle to global object and it can be accessed across module:
// rabbitmq.js
global.ch = ch
// someController.js
global.ch // <- is the channel instance
I would prefer to inject the handle into each function because i think it is more explicit and easier to reason about and unittest. You could do that with partial function application
with lodash:
var post = function (req, res, rabbitMQchannel) {
// Reuse/inject "ch" here, so I can send messages
}
var ch = getYourChannel();
// post is function object that accepts two params, and the
// third is bound to your channel
module.exports.post = _.partialRight(post, ch);
a third option is to have rabbitmq.js export a function to getting a channel. A channel could be a singleton so there would only ever be one instance of it. Then any method that whats a channel handle would request one from the method, which wouldn't require channels to be passed around through functions, or globally.
I've the server.js file which is the starting point of my node application and also responsible to invoke 3 different functions( this functions are invoked just once when the server is up , function like create childProcess ,validaton etc ) which is OK.
My questions is
Should I put this logic (3 functions below) in different file in the
controller folder or its OK to keep it on the server.js file(SOC?)?
If I put those functions calls in different file (in the controller
folder) should I invoke those functions via event(like event
serverIsUp...) Example for this approach will be very helpful
The server.js look like following(in short...)
http.createServer(app).listen(app.get('port'), function (err) {
if (err) {
console.error(err);
} else {
console.log('server listening on port ' + app.get('port'));
}
});
...
//Here it the function which is called when the server is up and running
childProcess.createProcess() ;
fileParser.parse();
invokeValidations();
You can create a new file (or several ones for each function) and export/require the function:
In newfile.js you export the function
exports.invokeValidations = function () {
// Do something
}
In server.js you require the file and invoke the function
myFunctions = require('./newfile.js');
myFunctions.invokeValidations();
And for you should or not, that's up to you, if functions increase I would recommend to put them into separate files to keep stuff organised.
EDIT:
To use event emitter you should create your own emitter:
emitter.js:
var EventEmitter = require('events').EventEmitter;
var localEmitter = new EventEmitter();
module.exports = localEmitter;
Then you should require it in server.js and in your module.js
server.js:
var myEmitter = require('./emitter');
myModule = require('./module');
myEmitter.on('boot', function(){
console.log('hello world !');
});
myModule.invokeValidations();
module.js:
var myEmitter = require('./emitter');
exports.invokeValidations = function () {
myEmitter.emit('boot');
// Do something
};
And you are done
I'm trying to create a node.js server using socket.io. At the moment is just proof on concept.
I created 2 files, first for server and anther for server side user.
server-core.js
'use strict';
var io = require('socket.io').listen(4567);
var user = require('./server-user');
var users = [];
io.sockets.on('connection', function(socket){
var su = new user.ServerUser(socket);
users[socket.id] = su;
socket.on('auth', su.auth);
socket.on('disconnect', su.disconnect);
});
io.sockets.on('disconnect', function(socket){
console.log('disconnect');
users[socket.id].disconnect();
});
console.log('Server started');
server-user.js
var ServerUser = (function(){
function ServerUser(socket){
this.socket = socket;
console.log('serverUser-ctor ' + this.socket)
}
ServerUser.prototype.auth = function(data){
console.log('auth received\r\n' + data);
this.socket.emit('auth', {
Id: data.Id,
Timestamp: data.Timestamp,
StringField: data.StringField
});
}
ServerUser.prototype.disconnect = function(){
console.log('Client disconnected');
}
return ServerUser;
})();
module.exports = {
ServerUser: ServerUser
};
my C# client connects fine to server, but when user-server tries to send the answer back the this.socket is undefined in ServerUser.prototype.auth method. This tell me that the instance of the ServerUser that I create in server-core is not being hold and when 'auth' method is called a new instance of object is actually being created.
To proof this I replaced this line
socket.on('auth', su.auth);
with such one
socket.on('auth', function(data){su.auth(data);});
After this it worked as needed.
Is this the correct way to write JS code?
Is there a better way to separate logic under separate files and classes when writing large node.js applications?
Thx for any opinions.
The problem is the invocation context. When you pass su.auth to socket.on(), this no longer refers to su inside of auth. So, there are a couple of ways to fix that. Using an anonymous function, as you found, is one. Function.bind is another:
socket.on('auth', su.auth.bind(su));
I'd like to create a model to handle everything related to users, starting with a findOne() function.
app.js:
var u = new User(client);
u.findOne(function(error, user) {
console.log(error, user);
});
models/User.js:
var User = function (client) {
this.client = client
};
User.prototype.findOne = function (id, callback) {
client.connect();
client.get('testkey', function(error, result) {
var test = "hello#world.com";
callback(null, test);
client.close();
});
};
module.exports = User;
node.js complains findOne() would be undefined.
What's the correct way of creating such models and providing them with objects, like database pools etc.?
Your code contains various errors:
You do not use new when creating the instance
You mixed a function with the object literal syntax:
var User = function (client) {
client: client
};
You want this.client = client; instead. Right now the function body does nothing as it just defines a label called client does nothing with the variable client.
I would suggest you to search for an existing ORM for node.js instead of trying to write one on your own.