I have created windows service from nodeJs application using node-windows package. Below is my code.
Main.js
var Service = require('node-windows').Service;
// Create a new service object
var svc = new Service({
name:'SNMPCollector',
description: 'SNMP collector',
script: './app.js',
nodeOptions: [
'--harmony',
'--max_old_space_size=4096'
]
//, workingDirectory: '...'
});
// Listen for the "install" event, which indicates the
// process is available as a service.
svc.on('install',function(){
svc.start();
});
svc.install();
/* svc.uninstall(); */
App.js
const { workerData, parentPort, isMainThread, Worker } = require('worker_threads')
var NodesList = ["xxxxxxx", "xxxxxxx"]
module.exports.run = function (Nodes) {
if (isMainThread) {
while (Nodes.length > 0) {
// my logic
})
}
}
}
Now when I run main.js, it creates a windows service and I can see the service running in services.msc
But, how can I call this run() method which is inside the running service, from any outside application? I couldn't find any solution for this, any help would be great.
You might consider simply importing your run function where you need it and run it there, then there is no need for a windows service or main.js - this assumes that "any outside application" is a Node application.
In your other application you you do the folowing:
const app = require('<path to App.js>');
app.run(someNodes)
For broader usage or if you do need to run it as a service, you could be starting an express (or another webserver) in your App.js with an endpoint that invokes your run function. Then from anywhere else you'll need to make an http call to that endpoint.
App.js
const express = require('express')
const bodyParser = require('body-parser')
const { workerData, parentPort, isMainThread, Worker } = require('worker_threads')
const app = express()
const port = 3000
var NodesList = ["xxxxxxx", "xxxxxxx"]
const run = function (Nodes) {
if (isMainThread) {
while (Nodes.length > 0) {
// my logic
})
}
}
}
app.use(bodyParser.json())
app.post('/', (req, res) => res.send(run(req.body)))
app.listen(port, () => console.log(`Example app listening at http://localhost:${port}`))
(Based off of example for express - https://expressjs.com/en/starter/hello-world.html)
You'll need to install both express and body-parser: $ npm install --save express body-parser from the directory of App.js.
From your other applications you will need to call the endpoint http://localhost:3000 with a POST request and the Nodes as a JSON array.
You can expose it on a port like the other answer mentions, though you'll want to make sure you don't expose it more broadly depending on the environment you're running in. There's a good answer here on ensuring the port is locked down.
As an alternative to exposing it on a port you can simply call the function by running the command in any other application:
node -e 'require("/somePathToYourJS/app").run()'
One concern is that app.js will now run at whatever permissions the calling application has. Although that can be resolved by running runas prior. More details here. But an example is:
runas /user:domainname\username "node -e 'require(^"/somePathToYourJS/app^").run()'"
Related
I'm using Node v18 with the experimental testrunner. I use express as a dev dependency for http integration tests which works fine but there is one test freezing or stopping the testrunner ( it doesn't continue )
I'm using TS but can also reproduce it with JS, the test file HttpTests.js contains
import assert from 'assert/strict';
import express from 'express';
import test from 'node:test';
test('Http', async () => {
const server = express();
server.listen(3000);
assert.ok(false);
});
Running this with the npm script "test": "node --test $(find . -name '*Tests.js')" breaks the test runner.
Any ideas what is wrong or missing?
Why am I not using the default execution model?
Since I'm using TS I had to find a way to use ts-node with the testrunner. You can find more information here
https://github.com/nodejs/node/issues/43675
So currently my TS project is using this npm script, which works fine
Reproduction
I created a minimal reproduction repository with and without TypeScript
https://github.com/matthiashermsen/reproduce-broken-test-ts
https://github.com/matthiashermsen/reproduce-broken-test-js
For reproduction purposes run mkdir reproduction && cd reproduction && npm init -y && npm install express. After that create a test directory with a file HttpTests.js containing the content as shown above. Change the package.json to
{
"name": "reproduction",
"type": "module",
"scripts": {
"test": "node --test $(find . -name '*Tests.js')"
}
}
and run the script, the testrunner should not finish.
The testrunner is still experimental
Yes I know. But there are many tests in the project that work perfectly fine. Some sample code
await t.test('subtest - saves data.', async () => {
const expectedResult = {};
const api = express();
const port = await getRandomPort();
const server = api
.use(express.json())
.post('/save', (request, response) => {
response.json(expectedResult);
})
.listen(port);
const httpDataProvider = new HttpDataProvider({ url: `http://localhost:${port}` });
const actualResult = await httpDataProvider.saveSomething();
assert.deepEqual(actualResult, expectedResult);
server.close();
});
The issue is the async activity that you start (server.listen()) but don't stop before the test errors out (by an exception thrown by assert.ok(false)).
Your second test case will probably also stall if actualResult doesn't deep-equal expectedResult because of the same issue (server.close() won't be called).
A workaround would be to always make sure the server gets closed in the end:
test('Http', async () => {
const app = express();
const server = app.listen(3000);
try {
assert.ok(false);
} finally {
server.close();
}
});
Most test frameworks provide "before/after" functionality that can be used to set up or tear down auxiliary objects before and after a test.
I need to be able to run some code that is going to be blocking and some other code that will then, when blocked, start some other actions.
The use-case is the follows:
I have a file, called index.ts, running an express and socket server
I have a testfile, called test.spec.ts, that needs to be able to start the express server and then initiate some commands for running tests either via HTTP request or socket message(I would prefer HTTP)
The only way I found to keep the webserver alive is instantiating it with
import { spawnSync } from 'child_process';
spawnSync('ts-node', ['path/to/index.ts"], { cwd: "path/to/workdir"});
which will block until the child process is killed ( could be up to 30min later ).
Is there a way to split into two processes, one that gets blocked when starting it and one continuing to work that exposes some functions for interactions with the test file?
My target would look like this:
// index.ts
import * as express from "express";
const app = express();
const port = 3000;
app.get('/', (req, res) => {
res.send('Hello World!');
});
app.listen(port, () => {
console.log(`Example app listening on port ${port}`);
});
// test.spec.ts
import { spawnSync } from 'child_process';
describe("Test",()=>{
it("Test", async ()=>{
// create somehow a child process that should block
const childProcess = ...
childProcess.do(spawnSync('ts-node', ['path/to/index.ts'], {cwd: 'path/to/workdir'}) //should block now
// the following code should run in parallel
await new Promise(r => setTimeout(r, 5000)); //wait some time until the webserver is ready
fetch('http://localhost:3000').then((ret)=>{
expect(ret,'to be Hello World').to.contain('Hello World!");
})
... // more tests
});
});
I think I found the reason and a possible solution myself.
Node always runs in a single process therefore it can't work. Luckily I was able to discover a totally different approach. I installed the npm-run-all package and created a new script in my package.json file calling run-p script1 script2. This way I can run both in parallel and the process with the webserver stays alive until all tests are run
I have different routes in my node js application and i have to use socket.io in every route to make my node and react js application realtime. But, i have the below structure of my node js application.
router.js
const express = require('express');
const router = express.Router();
const worksheetController = require('../controllers/worksheet')
const attendenceController = require('../controllers/attendence')
router.route('/worksheets')
.get(
worksheetController.getWorksheet
)
.post(
worksheetController.validateWorksheet,
worksheetController.addWorksheet,
attendenceController.markAttendence
)
router.route('/attendances')
.get(
attendenceController.getAttendance
)
module.exports = router;
server.js
const express = require('express');
const router = require('./router');
const app = express();
app.use('/api', router);
app.listen('5000', () => {
console.log('Listening on port');
});
module.exports = app;
So, I want to know
1) Should i need to use http module to create a server, if i need to use socket.io.
2) How can i use socket.io for diffrent routes.
I found posts that match's to my question on stackoverflow, which is this, this and this. But i don't think, that works for me. So please help me.
You can use http module or other module in document of socket.io for to use socket.io
I don't sure your idea. But when you want implement socket.io. I think you should run another node app. (Meaning you have 2 nodejs app. 1 for node http normally and 1 for socket.io app). After you can use path option when init socket.io app https://socket.io/docs/server-api/#new-Server-httpServer-options. Because when you deploy to production. You should run your socket.io app with beside of proxy serve (ex: nginx). Socket.io basically support multi transport and protocol. So if use with http restful. How about config your connection mapping from nginx to socket.io app, how you setup error handler ?.
In your case:
+ Create new file socket.js:
// socket.js
var http = require('http')
var socket_io = require('socket.io')
function init_socket(app) {
const server = http.Server(app)
const io = socket_io(server, { path: 'your-path-want-for-socket-io' }) // default: /socket.io/
}
import {init_socket} from 'socket.js'
init_socket(app)
In my Vue.js app I'm using nightwatch to test my app. I have the following spec:
module.exports = {
'wrong email or password': function (browser) {
const devServer = browser.globals.devServerURL
var nock = require('nock');
var couchdb = nock('http://localhost:3000/')
.get('api/v1/login')
.reply(401, {
error: 'dupa'
});
browser
.url(devServer + '/login')
.setValue('input[type=email]', 'email#example.com')
.setValue('input[type=password]', 'password')
.click('.login')
.assert.containsText('#app', 'Niepoprawny email lub hasło.')
.end()
}
}
In my test I'm trying to use https://github.com/node-nock/nock. But unfortunately this not mocks any requests. What I'm doing wrong?
Nock replaces the HTTP mechanism in the browser environment in which it is run.
Since you're running it in your test, which isn't running in the browser, the browser environment is unchanged.
There are several things you could do, but that depends on what you're trying to achieve:
You could write a fake server and have it listen at port 3000 and answer any way you like.
You could configure your application to use a different mechanism according to configuration, and have it load a Nock 'strategy' in the test.
If you have other tests checking the UI, you could replace this test with unit tests and integration tests for the functions that do the actual requests.
To write a fake, you just need a simple server that returns the answer you want. Here's an example with express.js:
const express = require('express')
const app = express()
app.get('/api/v1/login', function (req, res) {
res.send('Some response')
})
app.listen(3000, function () {
console.log('server listening on port 3000')
})
I am creating an app with nodejs. In the app, I have a app.js script that is the entrypoint that initializes both the app, as an expressjs app, and the http server that I use.
Just to clarify: modules here are not npm modules, they are my own files. I've written the app in modules. They are just seperate script files used by require()-ing them.
This app has several modules that a main module handler initializes. It reads the contents of a folder, which contains my own modules, and then by convention call the .initialize on each module after running a require() call on the filenames without the .js extension.
However, I have 1 module that needs the app variable to create an endpoint, and 1 module that needs the httpServer variable to create a web socket. Both of these are instansiated in app.js.
Seeing as I don't know what kind of modules will be in the folder, I don't really want to send app and httpServer to every module if they are just needed by 1 module each. Something like dependency injection would fit nice, but is that possible without to much overhead?
Right now I just temporarily added app and httpServer to the GLOBAL object.
What I usually do is have app.js export app so that modules elsewhere in my app can require it directly rather than having to deal with passing it around everywhere. I also slightly modify app.js so that it won't "listen" if it is required as a module that way later on if i decide to wrap it with another app, I can with minimal changes. This is not important to your question, I just find it give me more control when unit testing. All you really need from the code below is module.exports = app
'use strict';
var express = require('express'),
app = express(),
config = require('config'),
pkg = require('./package.json');
// trust reverse proxies
app.enable('trust proxy');
app.set('version', pkg.version);
module.exports = app; // <--- *** important ***
if (app.get('env') !== 'production') {
app.set('debug', true);
}
// calling app.boot bootstraps the app
app.boot = function (skipStart) { // skipStart var makes it easy to unit test without actually starting the server
// add middleware
require('./middleware/');
// setup models
app.set('models', require('./models'));
// setup routes
require('./routes/');
// wait for a dbconnection to start listening
app.on('dbopen', function () {
// setup hosting params
if (!skipStart) {
let server = app.listen(config.port, function () {
app.emit('started');
console.log('Web Server listening at: http://%s:%s', server.address().address, server.address().port);
// mail server interceptor for dev
if (app.get('env') !== 'production') {
// Config smtp server for dev
let SMTPServer = require('smtp-server').SMTPServer,
mailServer = new SMTPServer({
secure: false,
disabledCommands: ['STARTTLS'],
onData: function(stream, session, callback){
stream.pipe(process.stdout); // print message to console
stream.on('end', callback);
},
onAuth: function (auth, session, callback) {
callback(null, {user: 1, data: {}});
}
});
// Start smtp server
mailServer.listen(1025, '0.0.0.0');
} else {
// start agenda jobs only on production
require('./jobs.js');
console.log('Agenda Jobs Running.');
}
});
} else {
app.emit('booted');
}
});
};
// If this is the main module, run boot.
if (require.main === module) {
// move all of this to next tick so we can require app.js in other modules safely.
process.nextTick(app.boot);
}
Suppose you want to initialize 2 file from main app.js
app.js
var socketIni = require('./socketini.js');//You have to pass server
var xyz = require('./xyz.js')//you have to pass app
var app = express();
var server=http.createServer(app);
socketIni(server);
xyz(app);
socketini.js
module.exports = function(server){
//your socket initilize goes here
var io = require('socket.io').listen(server);
}
xyz.js
module.exports = function(app){
//you can access app here
}