Testing a node.js server in the same process as the test - javascript

I am wondering if there is any disadvantage to starting a server in a process and then running tests against that server in the same process.
Obviously there are some performance concerns, but if we are testing accuracy instead of performance, are there any major concerns with code like the following?
var fs = require('fs');
var path = require('path');
var http = require('http');
var supertest = require('supertest');
var assert = require('assert');
describe('#Test - handleXml()*', function() {
var self = this;
var server;
var payload = ''; // stringified XML
var xmlPath = path.resolve('test', 'test_data', 'xml_payloads', 'IVR_OnDemandErrorCode.xml');
before(function(done) {
var config = self.config = require('univ-config')(module, this.test.parent.title, 'config/test-config');
server = createServer().on('listening', function() {
done(null);
});
});
beforeEach(function(done) {
fs.readFile(xmlPath, 'utf8', function(err, content) {
assert(err == null);
payload = content;
done();
});
});
it('should accept request Content-type "text/xml or application/xml"', function(done) {
supertest(server)
.post('/event')
.set('Content-Type', 'application/xml')
.send(payload)
.expect(200, done);
});
it('should transform XML payload into JSON object', function(done) {
supertest(server)
.post('/event')
.set('Content-type', 'application/xml')
.send(payload)
.expect(200)
.end(function(err, res) {
assert(err == null,'Error is not null');
var jsonifiedXml = JSON.parse(res.text);
assert(typeof jsonifiedXml === 'object','jsonifiedXml not an object');
done();
});
});
describe('JSONified XML', function() {
it('should have proper key casing', function(done) {
supertest(server)
.post('/event')
.set('Content-type', 'application/xml')
.send(payload)
.expect(200)
.end(function(err, res) {
assert(err == null);
var payload = JSON.parse(res.text);
payload = payload.events[0].data;
assert(payload.hasOwnProperty('ppv'),'Bad value for ppv');
assert(payload.hasOwnProperty('mac'),'Bad value for mac');
assert(payload.hasOwnProperty('appName'),'Bad value for appName');
assert(payload.hasOwnProperty('divisionId'),'Bad value for divisionId');
assert(payload.hasOwnProperty('callTime'),'Bad value for callTime');
assert(payload.hasOwnProperty('callDate'),'Bad value for callDate');
assert(payload.hasOwnProperty('ivrLOB'),'Bad value for ivrLOB');
done();
});
});
});
});
function createServer(opts) {
//Note: this is a good pattern, definitely
var handleXml = require(path.resolve('lib', 'handleXml'));
var server = http.createServer(function(req, res) {
handleXml(req, res, function(err) {
res.statusCode = err ? (err.status || 500) : 200;
res.end(err ? err.message : JSON.stringify(req.body));
});
});
server.listen(5999); //TODO: which port should this be listening on? a unused port, surely
return server;
}

That's the standard way of testing a the http endpoints in a node application. But you are not going to want to have a createServer() function in each test. You will have a common function that creates a server that you can use through out your application, including to start the production server.
You right in noticing the having the server listen to a port doesn't actually do anything for you.
For this reason, it's common to have what I call an application factory that starts everything about a server, but does not listen to a port. That way I can access the server from a test or a script. The production app gets booted from a minimal index file:
var createServer = require('./AppFactory');
var server = createServer();
server.listen(5999);

Related

Postman GET url request test for simple server API

I'm learning server-side javascript and am trying to test a GET request using postman where the server (server.js) receives a request for products.html (products.js) and returns the products JSON.
My files are packaged via npm, products.js is held in node_modules, and when I run server.js in command and then open localhost:3000 in browser, I can see that it's connecting. But, the browser returns a 404 and command shows a 400.
I feel like this is likely a syntax or file path error (or possibly I just don't know how to use postman), but I've been running myself in circles trying to fix. Anything stand out as wrong / any advice on how to correct?
//server.js
var fs = require('fs');
var http = require('http');
var url = require('url');
var product_mgr = require('product_manager'),
path=require('path');
//create server that listens on port 3000
http.createServer(function(req, res) {
var urlObj = url.parse(req.url, true, false);
var filename = urlObj.pathname;
fs.readFile(filename, function (err, data) {
// if url not returned, show error code 404
if (err) {
res.writeHead(404, {'Content-Type': 'application/json'});
return res.end("404 Not Found");
} else {
// if url returned, show success code 200
res.writeHead(200, {'Content-Type': 'application/json'});
res.write(data);
return res.end();
}});
}).listen(3000, function() {
console.log('Listening on port 3000.');
});
//products.js
//create class that represents a product
//include name, price, description and qty
class Product {
constructor(name, price, description, qty) {
this.name = name;
this.price = price;
this.description = description;
this.qty = qty;
};
};
var product_1 = new Product('Yo Yo', 2.99, 'Spinning Toy', 40);
var product_2 = new Product('Hot Wheel', 1.99, 'Tiny Toy Car', 30);
var product_3 = new Product('Glove', 23.49, 'Baseball Glove', 12);
var productArray = [product_1, product_2, product_3];
//create function called products which returns JSON array of product info
function products() {
return JSON.stringify(productArray)
};
//export products function
exports.products = products;
var fs = require('fs');
var http = require('http');
var url = require('url');
var product_mgr = require('product_manager');
http.createServer(function(req, res) {
var urlObj = url.parse(req.url, true, false);
var filename = "." + urlObj.pathname;
if (req.method == "GET" && req.url == "/products.html") {
res.writeHead(200, {
'Content-Type': 'application/json'
});
res.end(JSON.stringify({
error: null
}));
} else {
res.writeHead(404, {
'Content-Type': 'application/json'
});
res.end(JSON.stringify({
error: "Invalid Request"
}));
}
}).listen(3000, function() {
console.log('Listening on port 3000.');
});

NodeJS multi site web scrape

I am learning NodeJS and trying to scrape a fan wikia to get names of characters and store them in a json file. I have an array of character names and I want to loop through them and scrape each character name from each url in the array. The issue I am running into is:
throw new Error('Can\'t set headers after they are sent.');
Here is my source code at the moment:
var express = require('express');
var fs = require('fs');
var request = require('request');
var cheerio = require('cheerio');
var app = express();
app.get('/', function(req, res){
var bosses = ["Boss1","Boss2"];
for (boss in bosses) {
url = 'http://wikiasiteexample.com/' + bosses[boss];
request(url, function (error, response, html) {
if (!error) {
var $ = cheerio.load(html);
var title;
var json = { title: "" };
$('.page-header__title').filter(function () {
var data = $(this);
title = data.text();
json.title = title;
})
}
fs.writeFile('output.json', JSON.stringify(json, null, 4), {'flag':'a'}, function(err) {
if (err) {
return console.error(err);
}
});
res.send('Check your console!')
})
}
})
app.listen('8081')
console.log('Running on port 8081');
exports = module.exports = app;
You're calling res.send() for every request you make.
Your HTTP request can only have one response, so that gives an error.
You must call res.send() exactly once.
Promises (and Promise.all()) will help you do that.

Parse request in my simple Node Js server

I'm new to Node and am trying to build a simple server in Node using Express. The requests are in the form of say /input00001/1/output00001. What I need to do is to parse this request and if the flag is 1 (middle value), I need to replace the file \home\inputfiles\input00001.txt with file \home\outputfiles\output00001.txt. How is it possible to do that?
Here is my simple server so far. I'm OK with not using the Express and pure NodeJs if that makes things easier.
const express = require('express');
const app = express();
const port = 8000;
app.get('/', (request, response) => {
response.send('Hello from Express!');
request.param
});
app.get('/*', (request, response) => {
response.send('Start!');
var url = request.originalUrl;
});
app.listen(port, (err) => {
if (err) {
return console.log('something bad happened', err);
}
console.log(`server is listening on ${port} for incoming messages`);
});
You should set up a route that expects these items as url parameters and then use those parameters to do what you want. For example if you're url is /input00001/1/output00001 then you could set up a route like this:
app.get('/:input/:flag/:output', (req, res) => {
var params = req.params
var input = params.input //input0001
var flag = params.flag // 1
var output = params.output //output0001
// now do what you need to with input, flag, and output
if(typeof flag!=='undefined' && flag==1){
var file_name_string = '\home\inputfiles\input00001.txt';
var res = file_name_string.replace("input", "output");
}
console.log(input, flag, output)
res.send("done")
})

Building simple nodejs API with streaming JSON output

I am trying to build a simple node.js based streaming API. All I want to do is as I hit the server url, the output should stream a set of test data(JSON) like twitter streaming API.
var app = require('express')();
var server = require('http').Server(app);
var io = require('socket.io')(server);
server.listen(8083);
app.get('/', function (req, res) {
res.write(io.on('connection', function (socket) {
socket.emit('item', { hello: 'world' });
}));
});
So, If i do curl http://localhost:8083/, I want output something like:
$ curl http://localhost:8083/
{hello: 'world'}
{hello: 'world'}
{hello: 'world'}
{hello: 'world'}
...
I am new to node.js and web sockets. I might be horribly wrong on the basics of how node works, let me know the best solution.
First, it's better to put the JSONStream part inside a middleware like so:
var _ = require('lodash');
// https://github.com/smurthas/Express-JSONStream/blob/master/index.js
function jsonStream(bytes) {
return function jsonStream(req, res, next) {
// for pushing out jsonstream data via a GET request
var first = true;
var noop = function () {};
res.jsonStream = function (object, f) {
f = _.isFunction(f) ? f : noop;
if (!(object && object instanceof Object)) {
return f();
}
try {
if (first) {
first = false;
res.writeHead(200, {
'Content-Type': 'application/json',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive'
});
}
res.write(JSON.stringify(object) + '\n');
} catch (err) {
return _.defer(f.bind(null, err));
}
f();
};
next();
};
}
Then, let's say you want to be notified via this API each time someone connects to socket.io
var app = require('express')();
var server = require('http').Server(app);
var io = require('socket.io')(server);
var _ = require('lodash');
var EventEmitter = require('events').EventEmitter;
server.listen(8083);
var mediator = new EventEmitter();
io.on('connection', function (socket) {
mediator.emit('io:connection:new', socket);
});
// the second parameter, specify an array of middleware,
// here we use our previously defined jsonStream
app.get('/', [jsonStream()], function (req, res) {
function onNewConnection(socket) {
res.jsonStream({
type: 'newConnection',
message: 'got a new connection',
socket: {
id: socket.id
}
});
}
// bind `onNewConnection` on the mediator, we have to use an mediator gateway
// because socket.io does not offer a nice implementation of "removeListener" in 1.1.0
// this way each time someone will connect to socket.io
// the current route will add an entry in the stream
mediator.on('io:connection:new', onNewConnection);
// unbind `onNewConnection` from the mediator
// when the user disconnects
req.on('close', function () {
mediator.removeListener('connection', onNewConnection);
});
res.jsonStream({
type: 'welcome',
message: 'waiting for connection'
});
});
Finally, if you want to test this code without connecting to socket.io use the following simulator:
// Simulate socket.io connections using mediator
(function simulate() {
var dummySocket = {
id: ~~(Math.random() * 1000)
};
mediator.emit('io:connection:new', dummySocket);
setTimeout(simulate, Math.random() * 1000);
})();

How to push changes in a database to the client using socket.io?

I am working on an app which, among other things, pushes data to the client when an updation occurs on the database. The trouble is that the websocket on node.js listens to the databse on a different port then the node. The guy before me wrote a piece of code and then dumped it on me. The node looks somewhat like this:
var handler=http.createServer(function(req, res){
session(req, res, function(req, res, body) {
if (!req.session || !req.session.data || !req.session.data.uid || req.session.data.uid == '' || !req.session.data.role || req.session.data.role =='') {
var uri = url.parse(req.url).pathname;
if(req.method == 'GET' && uri =="/attendance-node/getMonthFromReport") {
var uri = url.parse(req.url).pathname;
var url_parts = url.parse(req.url,true);
processgetMonthFromReport(req, res, uri, url_parts.query);
return;
}
res.writeHead(401, {"Content-Type": "text/plain"});
res.write("401 Unauthorized");
res.end();
return;
}
if(req.method == 'POST') {
var uri = url.parse(req.url).pathname;
var qs = require('querystring');
var POSTVAR = qs.parse(body, '&', '=', {"maxKeys" : 0});
//var POSTVAR=JSON.parse(body);
handleRequest(req, res, uri, POSTVAR);
}
if (req.method=='GET') {
var uri = url.parse(req.url).pathname;
var url_parts = url.parse(req.url,true);
handleRequest(req, res, uri, url_parts.query);
}
});
}).listen(3014,"127.0.0.1");
var io = require('socket.io').listen(8077,"127.0.0.1");
sys.puts("websocket Server running on port 8077");
io.configure(function () {
io.set('transports', ['websocket', 'flashsocket', 'xhr-polling','jsonp-polling']);
io.set('log level', 0);
});
io.sockets.on('connection', function (socket) {
io.sockets.emit('init',"i am working via websocket");
});
As you can see the node is listening on 3014 and the socket on 8077. Now how am I suppossed to provide an handler for the message received on the socket and forward it to the node's client?
Note: I am fairly new to web development. So basically I was thrown in the water and now I am learning to swim.
P.S. Also, what would the client side of the socket look like?
P.P.S. The database sending update notification to the socket is already taken care of. It comes as a POST request.
Thanx in advance!!
It sounds like you want to have socket.io also on the client side (browser?) as well.
I'd say the best solution would be to have socket.io run on the same port as your web server. However, if it this not possible and you must keep the web socket that the database uses separate from the web server you could run two instances of socket.io.
One would be attached to the web server, and the other would be for the database.
var app = require('http').createServer(handler)
, IO = require('socket.io')
, web_io = IO.listen(app)
, data_io = IO.listen(8080)
, fs = require('fs')
app.listen(80);
function handler (req, res) {
fs.readFile(__dirname + '/index.html',
function (err, data) {
if (err) {
res.writeHead(500);
return res.end('Error loading index.html');
}
res.writeHead(200);
res.end(data);
});
}
web_io.sockets.on('connection', function (socket) {
socket.on('some_event', function (data) {
console.log(data);
});
});
data_io.sockets.on('connection', function (socket) {
socket.on('database_update', function (data) {
// Will be sent to everyone socket listening on port 80 (browser sockets mostlikely)
web_io.sockets.emit('database_update', data);
});
socket.on('disconnect', function () {
io.sockets.emit('user disconnected');
});
});

Categories