child.kill() does not kill the running process - javascript

I have this block of code:
const childProcess = require("child_process");
let child;
before(function(done) {
// 'npm start' starts an express server.
child = childProcess.exec(["npm start"]);
done();
});
// My tests run here
after(function() {
child.kill();
});
The node.exe process never stops running. I can see it running when if I log running processes.
This is what happens in the server file when the npm start command runs :
const app = express();
app.listen(process.env.PORT || app.get("port"), function() {
console.log("HTTP listening on *:" + app.get("port"));
});
I m using mocha.js to run my tests

Related

Node.js running a shell command from the same process

I'm trying to make a way to boot up a Minecraft server from nodejs, but I'm having trouble making a way to run commands from nodejs.
const { spawn } = require('node:child_process')
const express = require('express');
const bodyParser = require('body-parser');
const app = express();
const fs = require('fs');
app.get('/start', (req, res) => {
fs.writeFile('minecraftstatus.txt', 'on', (err) => {
if (err) throw err;
});
const command = spawn('java', ['-jar', '-Xms2048M','-Xmx2048M', '-Dfile.encoding=utf8', 'server.jar', 'nogui'])
// the `data` event is fired every time data is
// output from the command
command.stdout.on('data', output => {
// the output data is captured and printed in the callback
fs.appendFile('console.txt', ("\n" + output.toString()), 'utf-8', err => {
console.log(err)
})
console.log("Output: ", output.toString())
})
res.status(200).send("OK")
});
app.listen(80, () => {
console.log('Server started on port 80');
});
From what you see above, whenever a user sends a GET request, it sends a command and appends any output to a text file. I need to make a way in order to send commands to Minecraft. I need to send commands to the same shell that nodejs ran the command.
I've tried this:
app.get('/mcstop', (req, res) => {
try {
const command2 = spawn('/stop')
// the `data` event is fired every time data is
// output from the command
command2.stdout.on('data', output => {
// the output data is captured and printed in the callback
console.log("Output: ", output.toString())
})
res.status(200).send("OK")
}
catch {
console.log("Oh no...")
}
});
Where it sends /stop to the shell, but it seems like it isn't being ran on the same shell as where the Minecraft server was created from.
How could I achieve this?

TCPSERVERWRAP error can not let jest close after completion of test

I have written unit tests using jest. In unit test I am opening one websocket server using http server.
I am getting following error -
TCPSERVERWARP
beforeAll(async () => {
> 27 | server = new http.Server().listen(5000);
| ^
28 | wss = new WebSocketServer({ server: server });
29 | await wss.on('connection',async()=>{
30 | await wss.on('close',()=>{})
This is my test suite -
import http from 'http';
import WebSocket, { WebSocketServer } from "ws";
describe("test", () => {
beforeAll(async () => {
server = new http.Server().listen(5000);
wss = new WebSocketServer({ server: server });
await wss.on('connection',async()=>{
await wss.on('close',()=>{})
await wss.on('error',()=>{})
})
});
afterEach(() => {
jest.clearAllMocks();
jest.resetModules();
});
afterAll( () => {
jest.resetAllMocks();
wss.close();
})
it("some test", ()=>{
// test details
});
})
I got this message in the console -
Jest has detected the following 1 open handle potentially keeping Jest from exiting:
I need websocket server for my test as the class on which I am writing unit tests, needs one websocket server so I need to open one websocket server in local.
This is the error message without --detectOpenHandles option-
Jest did not exit one second after the test run has completed.
This usually means that there are asynchronous operations that weren't stopped in your tests. Consider running Jest with --detectOpenHandles to troubleshoot this issue.

Spawn process inside process or detached it pkg builder

I dont sure what the problem here, mongod process not spawn inside program.exe that create with pkg. i test it first before compile the script can launch mongod process. after i tested it, spawn cant read pkg filesystem ( snapshot ).
const { spawn } = require('child_process');
const { parse } = require('path')
let processPath = parse(process.argv[0]);
let processDir = processPath.dir;
const args = [
'-f', `${__dirname}\\configs\\mongodb.yml`,
'--dbpath', `${processDir}\\database\\data`,
'--logpath', `${processDir}\\database\\log\\system.log`,
];
const options = {
cwd: `${processDir}\\bin`
};
const mongod = spawn('mongod', args, options);
mongod.stdout.on('data', chunk => {
console.log(chunk.toString())
});
mongod.stdout.on('error', chunk => {
console.log(chunk.toString())
});
mongod.on('spawn', () => {
console.log('success')
});
mongod.on('error', function(error) {
console.log(error)
});
Build Dir
build
build/program.exe
build/bin
build/bin/mongod.exe
build/database
build/database/data
build/database/log/system.log
Package.json pkg configurations
"bin": "dist/application.js",
"pkg": {
"targets": ["node16-win-x64"],
"outputPath": "dist/build",
"assets": [
"dist/configs/*"
]
}
Here is my solution to this issue, tested on Linux Ubuntu 22.04 LTS.
Case scenario:
I needed to include an executable file hello_world as an asset into /snapshot/project/bin/hello_world virtual path and based on some conditions execute it inside the Linux environment.
The problem:
I was getting the following error when I've been trying to execute the command via child_process.spawn:
/bin/sh: 1: /snaponshot/project/bin/hello_world: not found
So clearly my OS is trying to execute hello_world command via /bin/sh, however, the system is unable to access to /snapshot virtual filesystem, therefor not able to execute it.
The workaround:
Clearly, the main file system is unable to access the virtual file system, but we can do the opposite, by copying our executable file from the virtual file system into the main file system and executing it from there, basically, this is what I did:
//node packages
const fs = require('fs');
const os = require('os');
const path = require('path');
const {execSync, spawn} = require('child_process');
// executable file name
const executable = 'hello_world';
//file path to the asset executable file
const remoteControlFilePath = path.join(__dirname, `../bin/${executable}`);
let executableFileFullPath = remoteControlFilePath;
// avoid the workaround if the parent process in not pkg-ed version.
if (process.pkg) {
// creating a temporary folder for our executable file
const destination = fs.mkdtempSync(`${os.tmpdir()}${path.sep}`);
const destinationPath = path.join(destination, executable);
executableFileFullPath = destinationPath;
// copy the executable file into the temporary folder
fs.copyFileSync(remoteControlFilePath, destinationPath);
// on Linux systems you need to manually make the file executable
execSync(`chmod +x ${destinationPath}`);
}
// using {detached: true}, execute the command independently of its parent process
// to avoid the main parent process' failing if the child process failed as well.
const child = spawn(executableFileFullPath, {detached: true});
child.stdout.on('data', (data) => {
console.log(`child stdout:\n${data}`);
});
child.stderr.on('data', (data) => {
console.error(`child stderr:\n${data}`);
});
child.on('exit', (code, signal) => {
console.log(`child process exited with code ${code} and signal ${signal}`);
});

Mocha and Chai test never starts when running an Express app

I'm trying to test an Express API with Mocha and Chai.
My structure is as follows:
server.js
const express = require('express');
...
const loadCredentials = () => ({
...
})
const application = () => {
console.log('This is a log i should always see');
const app = express();
app.use('/api', authentication, authorization('#data/accounts'), router);
...
return app;
};
if (require.main === module) {
application.listen(443)
...
}
module.exports = { application };
test.js
const { application } = require('../server/src/server');
describe('Some async test', async () => {
it(, async () => {
console.log('I should really see this log!!');
server = application();
const res = await chai.request(server).get('/api');
...test stuff...
}
}
When I lerna run test (which runs mocha ./test.js --timeout 60000) the test never executes.
lerna notice cli v3.10.7
lerna info Executing command in 1 package: "yarn run test"
However, if I disable the call to application, the test starts (and fails because server is undefined).
I also tried refactoring application and passing an express() parameter to application(app) from the test, and I get the same behavior.
The test runs if I run it from WebStorm as an individual test.
Needless to say the server works when I yarn run it.
Any help would be appreciated.

How to stop webdriver without crashing node js

is it possible to stop the selenium webdriver without stopping node?
I have following issue:
I try to create a API tool that does some web automation when getting a get request. So I am basically running selenium webdriver on a get request to /start via Express. I want the tool to check for different elements and when it somehow fails I want it to stop selenium but NOT the node instance (since a new get request could be send).
This is my code so far:
"use strict";
const webdriver = require('selenium-webdriver'),
Express = require('express'),
By = webdriver.By,
until = webdriver.until,
Keys = webdriver.Key,
app = new Express();
app.get("/automate", (req, res) => {
start(res);
});
function start(res) {
const driver = new webdriver.Builder().forBrowser('chrome').build();
driver.get('https://www.google.com/');
// # --- foo --- #
let errMessage = {pos: "FOO", message: "Ooops friendly robot has some troubles!"}
checkStep("#foo", errMessage);
driver.findElement(By.id("foo"))
.sendKeys("fooz");
// # --- bar --- #
errMessage = {pos: "BAR", message: "Ooops friendly robot has some troubles!"}
checkStep("#bar", errMessage);
driver.findElement(By.id("bar"))
.sendKeys("baz");
// etc…
function checkStep(selector, errMessage) {
driver.findElement(By.css(selector))
.then(() => {
console.log(`${selector} => found`);
})
.catch(err => {
console.log(`Error: ${err}`);
res.send(errMessage);
driver.quit();
});
}
}
app.get("*", (req, res) => {
res.send("Hello World");
});
// start the server
const port = process.env.PORT || 3000;
const env = process.env.NODE_ENV || 'production';
app.listen(port, err => {
if (err) { return console.error(err); }
console.info(`Server running on http://localhost:${port} [${env}]`);
});
it is actually working so far that when selenium do not find the element the response from the API is correct. In Selenium I get back:
{
"pos": "FOO",
"message": "Ooops friendly robot has some troubles!"
}
So far all good.
BUT unfortunately stopping selenium is also stopping Node from running.
The error I get is following:
throw error;
^
WebDriverError: no such session
(Driver info: chromedriver=2.30.477690 (c53f4ad87510ee97b5c3425a14c0e79780cdf262),platform=Ma
c OS X 10.12.5 x86_64)
at WebDriverError
Please help, thank you!
ps: I am not using webdriverio as you can see I use this package: https://www.npmjs.com/package/selenium-webdriver
Ok, I got this working. It’s a bit difficult solution but it works:
Using Child Processes
Basically, every time the app gets a get request to /automate it will now create a child process in node which runs the selenium scripts (a child process is kind of like using another thread. Here is a very good tutorial on child processes):
index.js
"use strict";
const Express = require('express');
const { spawn } = require('child_process');
const data = require('./data.json');
const app = new Express();
app.get("/automate", (req, res) => {
const child = spawn(
process.execPath,
[`${__dirname}/test.js`, JSON.stringify(data)],
{ stdio: ['inherit', 'inherit', 'inherit', 'pipe'] }
);
child.stdio[3].on('data', data => {
const response = JSON.parse(data.toString());
res.send(response);
console.log(response);
child.kill();
});
});
app.get("*", (req, res) => {
res.send("Hello World");
});
const port = process.env.PORT || 3000;
const env = process.env.NODE_ENV || 'production';
app.listen(port, err => {
if (err) { return console.error(err); }
console.info(`Server running on http://localhost:${port} [${env}]`);
});
test.js
"use strict";
// hook with argument 3, that is "pipe" from parent
const Net = require('net'),
pipe = new Net.Socket({ fd: 3 });
const data = JSON.parse(process.argv[2]);
const webdriver = require('selenium-webdriver'),
By = webdriver.By,
until = webdriver.until,
Keys = webdriver.Key;
function start() {
const driver = new webdriver.Builder().forBrowser('chrome').build();
driver.get('https://www.google.com/');
// # --- foo --- #
let errMessage = {pos: "lst-ib", message: "Ooops friendly robot has some troubles!"}
checkStep("#lst-ib")
.sendKeys("fooz");
driver.get('https://www.facebook.com/');
driver.get('https://www.google.com/');
driver.get('https://www.facebook.com/');
// # --- bar --- #
errMessage = {pos: "BAR", message: "Ooops friendly robot has some troubles!"}
checkStep("#bar")
.sendKeys("baz");
function checkStep(selector) {
driver.findElement(By.css(selector))
.then(() => {
console.log(`${selector} => found`);
})
.catch(err => {
console.log(`${selector} => not found`);
publish(errMessage);
driver.quit();
});
}
}
function publish(message) {
pipe.write(JSON.stringify(message));
}
start();
It is working like a charm: on each request opening a new child process and killing that child process if it sends some message while also responding with the message to the client. Like this you can easily have several selenium instances simultaneously.
You’re welcome.
ps: If you hate all this asyncron stuff from Selenium webdriver-sync seems like a good choice. It basically wraps the selenium code to be syncon instead of asyncron. Like that I am able to use try {} catch {} and to driver.quit(); without any errors for code that comes later. (But this comes with a disadvantage: it is actually blocking your other nodejs code.)

Categories