I have install yo angular-fullstack
The source code of the project is here : https://github.com/DaftMonk/fullstack-demo
My api look like that :
thing
├── index.js - Routes
├── thing.controller.js - Controller for our `thing` endpoint
├── thing.model.js - Database model
├── thing.socket.js - Register socket events
└── thing.spec.js - Test
How can i use the sockets in thing.controller.js, the socket in the clicked function doesn't work
/**
* Using Rails-like standard naming convention for endpoints.
* GET /things -> index
* POST /things -> create
* GET /things/:id -> show
* PUT /things/:id -> update
* DELETE /things/:id -> destroy
*/
'use strict';
var _ = require('lodash');
var Thing = require('./thing.model');
[...]
exports.clicked = function(req, res) {
//Why socket is not defined ?
socket.emit('test', data);
};
On my clicked function i just want to emit a socket to the client side.
You need to inject your socket.io instance into your thing.controller instance...
thing.controller.js
module.exports = function(context) {
var controller = {};
...
controller.clicked = function(req,res){
context.io.emit('test','data');
}
...
return controller;
}
routes.js
module.exports = function(app, context) {
...
app.use('/api/things', require('./api/thing')(context));
...
app.js
...
require('./routes')(app, {io:socketio});
...
NOTE: This will emit the event to ALL listeners...
When you connect via socket.io a channel is formed between the client and server.. this shows up via the socket.io connection event's socket on the server... When a REST call is made from angular.js to express, there is nothing that ties that request to the socket.io connection from the browser (or any way to know it's even from the same window in the browser).
If you need to communicate with socket.io to a specific instance, then you need to rework your angular service to use socket.io instead of REST, or maintain a reference table from the browser to a given socket as part of the REST request. This is a much broader discussion, and will either be limited to a single process, or be a much larger development.
Towards developing against a socket.io based service, you may want to checkout at least the following...
angular-socket-io component
Writing an angular.js app with socket.io
Make sure socket.io script is included and define sensor var for get it working as
var socket = io('http://localhost');
And finally get sure clicked function is getting called when event is fired.
Hope it helps!
Related
Assume the following scenario :
Rest client is a module, which has many middlewares; which are themselves modules.
Now, we're trying to create a new middleware that requires client itself to fetch/update metadata for the client input url.
Testing this middleware, will bring in a published version of the client from npm registry, because middleware has a devDependency on the client. But we want to serve our local client.
Also, the published version of client does not contain this new middleware, so it will not allow testing the request pipeline with this middleware.
We want to initiate the client with this middleware, when we're testing the middleware itself to send a request to fetch data.
The middleware is smart enough to not request metadata for metadata, so it will skip the second call. The new flow should be like the diagram below :
Wrap the nodejs module loader, to return local client instead of published one when client is requested during test execution.
describe(()=>{
const localClient = require('../client');
const m = require('module');
const orig = m._load;
before(()=>{
m._load = function(name){
if(name==='client'){
return localClient;
}
return orig.appy(this, arguments)
}
});
after(()=>{
m._load = orig;
});
it('test goes here',()=>{...})
})
This has been driving me crazy. I'm new to NodeJS. I love it so far but some things have been throwing me off. I was handed a very basic starting point to a node project and I'm unsure how to search google for this.
//myapp/server.js
var config = require('./config');
var app = express();
var api = require('./app/routes/api')(app, express); // <-- this?
app.use('/', api);
var server = app.listen(3000, function () {
console.log('\n============================');
console.log(' Server Running on Port 3000 ');
console.log('============================\n');
});
Then there's the api.js file that contains the routes.
//myapp/app/routes/api.js
var config = require('../../config');
var mysql = require('mysql');
module.exports = function(app, express) {
var api = express.Router();
api.all('/', function(req, res) {...});
api.all('/route-two', function(req, res) {...});
api.all('/another-route', function(req, res) {...});
return api;
}
Ideally I'd like to break up what's going on here into a more organized structure but, I want to understand what I'm doing exactly.
The main thing that is confusing me is this line
var api = require('./app/routes/api')(app, express);
I was unaware that you can have ()() next to each other without a . or something joining them. Can someone explain what is happening?
Also what is the point of (app, express)? It appears that app and express are getting passed to the api part of the application so it's scope can be reached? Am I way off?
If there is a cleaner approach to this I would love to get some insight. I appreciate any thoughts.
Thanks!
EDIT
To make sure I am understanding...
var api = require('require this file')(params available to this file);
Moving any requires from api.js to server.js then include those as parameters
var api = require('./app/routes/api')(config, app, express, mysql);
EDIT
After more helpful feedback from #AhmadAssaf #Gurbakhshish Singh and #guy mograbi
Modules I want to use in another file other than where they are require()ed should be passed in through the second set of ()
//.server.js
var config = require('./config');
var app = express();
var api = require('./app/routes/api')(config, app, express);
| | |
_____________/______/________/
/ / /
//.app/routes/api.js | | |
module.exports = function(config, app, express) {
var api = express.Router();
// code to handle routes
}
Could be wrong with this part but based on what I think I am understanding.
//.server.js
var config = require('./config');
var app = express();
var register = require('./app/routes/register')(config, app, express);
var login = require('./app/routes/login')(config, app, express);
| | |
_________________/______/________/
/ / /
//.app/routes/login.js | | |
module.exports = function(config, app, express) {...handle login...}
//.app/routes/register.js
module.exports = function(config, app, express) {...handle registration...}
etc. etc.
Hopefully my thinking is about right. I appreciate everyones help on this! :)
So basically you have to understand few thing
module.exports wraps a Javascript object and export it to be used as a pluggable piece of code around a node.js application
The wrapped javascript object can be a JSON object, Javascript variable, function, etc.
What you have up there in the api module is a function that takes two parameters. When you require that module you want to pass some constructors to that function and thats the use of the second () after the module name in the first ()
requiring express once in your program and passing the variable around is more or less a singleton pattern. What you can also do is pass the config object as well to the api module instead of requiring it again :)
var api = require('./app/routes/api')(app, express);
is Equivalent to:
var myFunc = require('./app/routes/api');
var api = myFunc(app, express);
and becauase of NodeJS's module loading procedure, the require('...') will be
plugged in by the piece of code that was exported at the path, it can be a object, function, simple variable, etc.
And as far as ()() goes the require() nodeJS will make it something like function(){}() in your case and this is valid javascript and rather very useful to write IIFE(Immediately-Invoked Function Expression) code
Quesiton 1
explain ()()
every language where a function can return a function you can have this syntax. imagine the following
function world(){ ... }
function hello(){
return world;
}
// ===>
hello()() // ==> would invoke hello and then world.
So when you see require('..')() then it means require('..') returns a function. You do this by writing the following:
module.exports = function(){}
and that function returns yet another function - in your case this means express.Router(); returns a function.
Question 2
is there a cleaner way to write this?
this is a discussion.. which is hard to answer. depends on your preferences. The only thing I can think of that might help you reach an answer is to use the express generator and see the structure the express team uses.. which is probably as clean as it gets.
express can generate a project for you with some code to start with. simply install it with npm install -g express and then run express - it will generate the project for you in the same directory where you ran it.
go over the generated project. I suggest follow the same pattern - this is what i do whenever i cick-off a project.
If something is still unclear or if you need me to elaborate, please comment so and I will edit the answer.
module.exports from api.js is a function, which takes two arguments: app, and express. Therefore, when you require it in server.js with require('./app/routes/api'), the value returned is that function. Since it's a function, you can just call it by putting parentheses after it, and passing in the arguments it expects (app and express), like so : require('./app/routes/api')(app, express).
This is how my app.js looks
var app = require('http').createServer(handler),
io = require('socket.io').listen(app),
static = require('node-static'); // for serving files
var game = require('./game.js').createGame();
// This will make all the files in the current folder
// accessible from the web
var fileServer = new static.Server('./');
// This is the port for our web server.
// you will need to go to http://localhost:8080 to see it
app.listen(8080);
// Listen for incoming connections from clients
io.sockets.on('connection', function (socket) {
handle Events ...
});
exports.io = io;
exports.game = game;
when I try to access the created socket.io listner or the game instance I get error saying its undefined.
This how I am trying to access it in trick,js
var game = require('./app.js').game;
var socketio = require('./app.js').io;
var PlayerMove = require('./playerMove.js');
This might be because trick.js is actually executed before app.js (I put debug points and it was confirmed there). How do I avoid this? Is there a better way of exposing object instances in node.js, some pattern that i should probably use ?
declaring a variable does not export it. If you need to export something for your module, you should use exports.myVariable = myValue.
requiring your app.js file for the first time will run it. So everything you export in it will be available once you're out of the require() call
I have one module which wraps the Socket.io functionality my app is using which looks something like this:
// realtime.js
var io = require('socket.io'),
sio;
exports.init = function(expressServer) {
sio = io.listen(expressServer);
}
...
The main app.js file looks like
// app.js
var rt = require('./realtime.js'),
other = require('./other.js');
...
rt.init(expressServer);
The other module also uses rt.js
// other.js
var rt = require('./realtime.js');
...
My question is, will both other.js and app.js have the same instance of rt.js?
The answer on SO relating to redis lead me to believe the above statement is true, but in the documentation here it says
Multiple calls to require('foo') may not cause the module code to be
executed multiple times. This is an important feature. With it,
"partially done" objects can be returned, thus allowing transitive
dependencies to be loaded even when they would cause cycles.
which seems to imply that it's not guaranteed to be the case?
Finally this question appears to indicate it depends on filename and that since there is only one instance of rt.js it shouldn't be executed more than once. If that's the case does it depend only on rt.js being the same file or does it depend on the path specified by require. Basically if rt.js and other.js were in lib/, and app.js was one level down the requires in other.js and app.js would point to rt.js from different files, does this matter?
I'd be grateful if anyone could clear this confusion up for me!
modules are currently evaluated only once, but you should not rely on this. Having any state in module is considered bad practice. What prevents you from passing reference to sio to other.js?
//realtime
var io = require('socket.io'),
exports.init = function(expressServer) {
return io.listen(expressServer);
}
// app.js
var rt = require('./realtime.js'),
other = require('./other.js');
...
var sio = rt.init(expressServer);
// now ask other.js to use same sio instance
other.use_sio(sio);
Be sure not to install socket.io in more than one place. If you require socket.io in different modules where each module is searching for packages from different paths, then each module will load a seperate instance of the package.
app directory layout:
-module1
--/npm_modules //has socket.io
---/socket.io
--/module1.js //requires socket.io from module1/npm_modules
-module2
--/npm_modules //has another socket.io installation
---/socket.io
--/module2.js //requires socket.io from module2/npm_modules (Does not create a reference to what was required in module1.)
Hope this helps.
Is there a common convention for breaking up and modularizing the app.js file in an Express.js application? Or is it common to keep everything in a single file?
I have mine broken up as follows:
~/app
|~controllers
| |-monkey.js
| |-zoo.js
|~models
| |-monkey.js
| |-zoo.js
|~views
| |~zoos
| |-new.jade
| |-_form.jade
|~test
| |~controllers
| |-zoo.js
| |~models
| |-zoo.js
|-index.js
I use Exports to return what's relevant. For instance, in the models I do:
module.exports = mongoose.model('PhoneNumber', PhoneNumberSchema);
and then if I need to create a phone number, it's as simple as:
var PhoneNumber = require('../models/phoneNumber');
var phoneNumber = new PhoneNumber();
if I need to use the schema, then PhoneNumber.schema
(which assumes that we are working from the routes folder and need to go 1 level up and then down to models)
EDIT 4
The express wiki has a list of frameworks built on top of it.
Of those, I think Twitter's matador is structured pretty well. We actually used a very similar approach to how they load up parts of the app.
derby.js also looks extremely interesting. It's akin to meteor without all of the hype and actually gives credit where credit is due (notably, node and express).
EDIT 3
If you are a fan of CoffeeScript (I am not) and reeeeaaaaaally want the L&F of Rails, there is also Tower.js.
EDIT 2
If you are familiar with Rails and don't mind the bleed-over of some concepts there is Locomotive. It is a light-weight framework built on Express. It has a very similar structure as RoR and carries over some of the more rudimentary concepts (such as routing).
It's worth checking out even if you don't plan to use it.
EDIT 1
nodejs-express-mongoose-demo is very similar to how I have mine structured. Check it out.
Warning: referencing code I hacked together for node knockout, it kind of works but is far from elegant or polished.
To be more specific about splitting up app.js I have the following app.js file
var express = require('express'),
bootstrap = require('./init/bootstrap.js'),
app = module.exports = express.createServer();
bootstrap(app);
This basically means I place all my bootstrapping in a seperate file, then I bootstrap the server.
So what does bootstrap do?
var configure = require("./app-configure.js"),
less = require("./watch-less.js"),
everyauth = require("./config-everyauth.js"),
routes = require("./start-routes.js"),
tools = require("buffertools"),
nko = require("nko"),
sessionStore = new (require("express").session.MemoryStore)()
module.exports = function(app) {
everyauth(app);
configure(app, sessionStore);
less();
routes(app, sessionStore);
nko('/9Ehs3Dwu0bSByCS');
app.listen(process.env.PORT);
console.log("server listening on port xxxx");
};
Well it splits all the server initialization setup in nice chunks. Specifically
I have a chunk that sets up all my remote OAuth authentication using everyauth.
I have a chunk that configures my application (basically calling app.configure)
I have a little bit of code that punches less so it re-compiles any of my less into css at run time.
I have code that sets up all my routes
I call this small nko module
Finally I start the server by listening to a port.
Just for example let's look at the routing file
var fs = require("fs"),
parseCookie = require('connect').utils.parseCookie;
module.exports = function(app, sessionStore) {
var modelUrl = __dirname + "/../model/",
models = fs.readdirSync(modelUrl),
routeUrl = __dirname + "/../route/"
routes = fs.readdirSync(routeUrl);
Here I load all my models and routes as arrays of files.
Disclaimer: readdirSync is only ok when called before you start the http server (before .listen). Calling synchronious blocking calls at server start time just makes the code more readable (it's basically a hack)
var io = require("socket.io").listen(app);
io.set("authorization", function(data, accept) {
if (data.headers.cookie) {
data.cookie = parseCookie(data.headers.cookie);
data.sessionId = data.cookie['express.sid'];
sessionStore.get(data.sessionId, function(err, session) {
if (err) {
return accept(err.message, false);
} else if (!(session && session.auth)) {
return accept("not authorized", false)
}
data.session = session;
accept(null, true);
});
} else {
return accept('No cookie', false);
}
});
Here I punch socket.io to actually use authorization rather then letting any tom and jack to talk to my socket.io server
routes.forEach(function(file) {
var route = require(routeUrl + file),
model = require(modelUrl + file);
route(app, model, io);
});
};
Here I start my routes by passing the relevant model into each route object returned from the route file.
Basically the jist is you organize everything into nice little modules and then have some bootstrapping mechanism.
My other project (my blog) has an init file with a similar structure.
Disclaimer: the blog is broken and doesn't build, I'm working on it.
For maintainable routing organisation you can check out this article about the express-routescan node module and try it. This is the best solution for me.
I have my apps build on top of the express-generator tool. You can install it by running npm install express-generator -g and run it using express <APP_NAME>.
To give you a perspective, one of my smaller application's structure looked like this:
~/
|~bin
| |-www
|
|~config
| |-config.json
|
|~database
| |-database.js
|
|~middlewares
| |-authentication.js
| |-logger.js
|
|~models
| |-Bank.js
| |-User.js
|
|~routes
| |-index.js
| |-banks.js
| |-users.js
|
|~utilities
| |-fiat-converersion.js
|
|-app.js
|-package.json
|-package-lock.json
One cool thing I like about this structure I end up adopting for any express application I develop is the way the routes are organized. I did not like having to require each route files into the app.js and app.use() each route, especially as the file gets bigger. As such, I found it helpful to group and centralize all my app.use() on a ./routes/index.js file.
In the end, my app.js will look something like this:
...
const express = require('express');
const app = express();
...
require('./routes/index')(app);
and my ./routes/index.js will look something like this:
module.exports = (app) => {
app.use('/users', require('./users'));
app.use('/banks', require('./banks'));
};
I am able to simply require(./users) because I wrote the users route using express.Router() which allows me to "group" multiple routes and then export them at once, with the goal of making the application more modular.
This is an example of what you would fine on my ./routers/users.js route:
const router = require('express').Router();
router.post('/signup', async (req, res) => {
// Signup code here
});
module.exports = router;
Hopefully this helped answer your question! Best of luck!