In my Vue.js app I'm using nightwatch to test my app. I have the following spec:
module.exports = {
'wrong email or password': function (browser) {
const devServer = browser.globals.devServerURL
var nock = require('nock');
var couchdb = nock('http://localhost:3000/')
.get('api/v1/login')
.reply(401, {
error: 'dupa'
});
browser
.url(devServer + '/login')
.setValue('input[type=email]', 'email#example.com')
.setValue('input[type=password]', 'password')
.click('.login')
.assert.containsText('#app', 'Niepoprawny email lub hasło.')
.end()
}
}
In my test I'm trying to use https://github.com/node-nock/nock. But unfortunately this not mocks any requests. What I'm doing wrong?
Nock replaces the HTTP mechanism in the browser environment in which it is run.
Since you're running it in your test, which isn't running in the browser, the browser environment is unchanged.
There are several things you could do, but that depends on what you're trying to achieve:
You could write a fake server and have it listen at port 3000 and answer any way you like.
You could configure your application to use a different mechanism according to configuration, and have it load a Nock 'strategy' in the test.
If you have other tests checking the UI, you could replace this test with unit tests and integration tests for the functions that do the actual requests.
To write a fake, you just need a simple server that returns the answer you want. Here's an example with express.js:
const express = require('express')
const app = express()
app.get('/api/v1/login', function (req, res) {
res.send('Some response')
})
app.listen(3000, function () {
console.log('server listening on port 3000')
})
Related
I need to be able to run some code that is going to be blocking and some other code that will then, when blocked, start some other actions.
The use-case is the follows:
I have a file, called index.ts, running an express and socket server
I have a testfile, called test.spec.ts, that needs to be able to start the express server and then initiate some commands for running tests either via HTTP request or socket message(I would prefer HTTP)
The only way I found to keep the webserver alive is instantiating it with
import { spawnSync } from 'child_process';
spawnSync('ts-node', ['path/to/index.ts"], { cwd: "path/to/workdir"});
which will block until the child process is killed ( could be up to 30min later ).
Is there a way to split into two processes, one that gets blocked when starting it and one continuing to work that exposes some functions for interactions with the test file?
My target would look like this:
// index.ts
import * as express from "express";
const app = express();
const port = 3000;
app.get('/', (req, res) => {
res.send('Hello World!');
});
app.listen(port, () => {
console.log(`Example app listening on port ${port}`);
});
// test.spec.ts
import { spawnSync } from 'child_process';
describe("Test",()=>{
it("Test", async ()=>{
// create somehow a child process that should block
const childProcess = ...
childProcess.do(spawnSync('ts-node', ['path/to/index.ts'], {cwd: 'path/to/workdir'}) //should block now
// the following code should run in parallel
await new Promise(r => setTimeout(r, 5000)); //wait some time until the webserver is ready
fetch('http://localhost:3000').then((ret)=>{
expect(ret,'to be Hello World').to.contain('Hello World!");
})
... // more tests
});
});
I think I found the reason and a possible solution myself.
Node always runs in a single process therefore it can't work. Luckily I was able to discover a totally different approach. I installed the npm-run-all package and created a new script in my package.json file calling run-p script1 script2. This way I can run both in parallel and the process with the webserver stays alive until all tests are run
I have created windows service from nodeJs application using node-windows package. Below is my code.
Main.js
var Service = require('node-windows').Service;
// Create a new service object
var svc = new Service({
name:'SNMPCollector',
description: 'SNMP collector',
script: './app.js',
nodeOptions: [
'--harmony',
'--max_old_space_size=4096'
]
//, workingDirectory: '...'
});
// Listen for the "install" event, which indicates the
// process is available as a service.
svc.on('install',function(){
svc.start();
});
svc.install();
/* svc.uninstall(); */
App.js
const { workerData, parentPort, isMainThread, Worker } = require('worker_threads')
var NodesList = ["xxxxxxx", "xxxxxxx"]
module.exports.run = function (Nodes) {
if (isMainThread) {
while (Nodes.length > 0) {
// my logic
})
}
}
}
Now when I run main.js, it creates a windows service and I can see the service running in services.msc
But, how can I call this run() method which is inside the running service, from any outside application? I couldn't find any solution for this, any help would be great.
You might consider simply importing your run function where you need it and run it there, then there is no need for a windows service or main.js - this assumes that "any outside application" is a Node application.
In your other application you you do the folowing:
const app = require('<path to App.js>');
app.run(someNodes)
For broader usage or if you do need to run it as a service, you could be starting an express (or another webserver) in your App.js with an endpoint that invokes your run function. Then from anywhere else you'll need to make an http call to that endpoint.
App.js
const express = require('express')
const bodyParser = require('body-parser')
const { workerData, parentPort, isMainThread, Worker } = require('worker_threads')
const app = express()
const port = 3000
var NodesList = ["xxxxxxx", "xxxxxxx"]
const run = function (Nodes) {
if (isMainThread) {
while (Nodes.length > 0) {
// my logic
})
}
}
}
app.use(bodyParser.json())
app.post('/', (req, res) => res.send(run(req.body)))
app.listen(port, () => console.log(`Example app listening at http://localhost:${port}`))
(Based off of example for express - https://expressjs.com/en/starter/hello-world.html)
You'll need to install both express and body-parser: $ npm install --save express body-parser from the directory of App.js.
From your other applications you will need to call the endpoint http://localhost:3000 with a POST request and the Nodes as a JSON array.
You can expose it on a port like the other answer mentions, though you'll want to make sure you don't expose it more broadly depending on the environment you're running in. There's a good answer here on ensuring the port is locked down.
As an alternative to exposing it on a port you can simply call the function by running the command in any other application:
node -e 'require("/somePathToYourJS/app").run()'
One concern is that app.js will now run at whatever permissions the calling application has. Although that can be resolved by running runas prior. More details here. But an example is:
runas /user:domainname\username "node -e 'require(^"/somePathToYourJS/app^").run()'"
My node app can read stream from kafka producer and console.log it to terminal in real time. But I would like to update it in my web app the same way (real time). How can I implement it?
I start my app with 'node index.js' command (or npm start).
index.js:
'use strict';
var express = require('express'),
app = express(),
server,
data = [];
...
consumer.on('message', function (message) {
console.log(message);
data.push(message);
//global.location.reload();
});
...
app.get('/', function(req, res){
res.send(data);
});
server = app.listen(3002, function(){
console.log('Listening on port 3002');
});
I think, that I need modify res.send(data) or add some code to on('message') event.
You should keep connection between client and server.
Try this socket.io package to update message in realtime.
I need to create an application that proxies a request from port A to Port B.
For instance if a user connects on port 3000 he will be routed (under the hood) to port 3001, therefore the "original" application will run on port 3001 but in the client (browser) the user will put port 3000.
Not redirect...
http://example.com:3000/foo/bar
A new server will be created which listens to port 3001 and all the call are actually to port 3000 running with the new server and new port.
Since port 3000 is actually occupied,by my reverse proxy app? how should I test it...
Is there a way to test this to verify that this is working,e.g. by unit testing?
I've found this module https://github.com/nodejitsu/node-http-proxy which might be able to help.
Straight from the node-http-proxy docs, this is quite simple. You can test it simply by making an HTTP request to port 3000 -- if you get the same response as you do on port 3001, it's working:
var http = require('http'),
httpProxy = require('http-proxy');
//
// Create a proxy server with custom application logic
//
var proxy = httpProxy.createProxyServer({});
var server = http.createServer(function(req, res) {
// You can define here your custom logic to handle the request
// and then proxy the request.
proxy.web(req, res, {
// Your real Node app
target: 'http://127.0.0.1:3001'
});
});
console.log("proxy listening on port 3000")
server.listen(3000);
I highly recommend you write a suite of integration tests using some thing like mocha for your project as well - in this way, you can run your tests both against your server directly and against your proxy. If tests pass against both, then you can be assured your proxy is behaving as expected.
A unit test using mocha and should.js would look something like this:
var should = require('should');
describe('server', function() {
it('should respond', function(done) {
// ^ optional synchronous callback
request.get({
url: "http://locahost:3000"
// ^ Port of your proxy
}, function(e, r, body) {
if (e)
throw new Error(e);
body.result.should.equal("It works!");
done(); // call the optional synchronous callback
});
});
});
You then simply run your test (once Mocha is installed):
$ mocha path/to/your/test.js
You can either verify that this is working by adding the following
to the proxy request (as described in Remus answer )
proxy.on('proxyReq', function (proxyReq, req, res, options) {
res.setHeader('App-Proxy', 'proxy');
});
In this way you can verify that your "original" call is working against the new server proxy and even provide the ability to create a UT,in addition you can use the changeOrigin:true property...
I'm quite new to Node.js / Express, and I'm using it as a backend for an AngularJS app. I've looked all over StackOverflow for some help on my problem, but I can't seem to figure out how to port the suggestions to my code.
My application works as follows:
A long running Scala process periodically sends my Node.js application log messages. It does this by posting to an HTTP API
When the post is received, my application writes the log message to MongoDB
The log messages are then sent in real time to the Angular client.
I am having a problem with Node's modules, as I can't figure out how to refer to the socket instance in the Express controller.
As you can see, in server.js, socket.io is instantiated there. However, I would like the controller itself, logs.js, to be able to emit using the socket.io instance.
How can I refer to io in the controller? I'm not sure how to pass the io instance to the controller so I can emit messages?
Here is some of the Node code:
server.js
var app = express(),
server = require('http').createServer(app),
io = require('socket.io').listen(server);
require('./lib/config/express')(app);
require('./lib/routes')(app);
server.listen(config.port, config.ip, function() {
console.log('Express server listening on %s:%d, in %s mode', config.ip, config.port, app.get('env'));
});
io.set('log level', 1); // reduce logging
io.sockets.on('connection', function(socket) {
console.log('socket connected');
socket.emit('message', {
message: 'You are connected to the backend through the socket!'
});
});
exports = module.exports = app;
routes.js
var logs = require('./controllers/logs'),
middleware = require('./middleware');
module.exports = function(app) {
app.route('/logs')
.post(logs.create);
}
logs.js
exports.create = function(req, res) {
// write body of api request to mongodb (this is fine)
// emit log message to angular with socket.io (how do i refer to the io instance in server.js)
};
You can use a pattern based on standard JS closures. The main export in logs.js will not be the controller function itself, but a factory function that will accept all needed dependencies, and create the controller:
exports.create = function(socket) {
return function(req, res) {
// write body of api request to mongodb
socket.emit();
}
}
Then, when you want to use it:
app.route('/logs').post(logs.create(socket));
Since you set up your routes in a separate package, you have to use the same pattern in routes.js - routes should receive the socket to use as a parameter.
This pattern works well if you want to handle those things with DI later, or test your controllers with mock "sockets".