Cannot kill a linux process using node js process.kill(pid) - javascript

Im trying to kill background running process using nodejs process.kill(pid, 'SIGTERM'), but the process is not getting killed.
I executed node script mentioned below and later checked the process using ps -efww | grep 19783 | grep -v grep from the prompt to confirm it is still not killed.
I can confirm that the process it is trying to kill is started by the same user, so there is no permission issue.
Is there something I need to pass to get the process killed.
Node Version: 8.11.1
OS: Linux 3.10.0-327.10.1.e17.x86_64
Reference : Node process
Code :
'use strict';
const argv = require('yargs').argv;
const exec = require('child_process').exec;
function execute(command) {
console.log("Executing Command : ", command);
return new Promise((resolve, reject) => {
exec(command, {
maxBuffer: 1024 * 5000000
}, (error, stdout, stderr) => {
if (error) {
console.log(`ERROR: Something went wrong while executing ${command}: ${error}`);
reject(error);
} else {
resolve(stdout);
}
});
});
}
function kill(pid) {
try {
console.log(`Killing Process : ${pid}`);
process.kill(pid, 'SIGTERM');
let command = `ps -efww | grep ${pid} | grep -v grep | grep -v dzdo `;
let output = execute(command).then(res => {
console.log(`output: ${res}`);
}).catch(err => console.log(err));
} catch (e) {
console.log(`Invalid Process ID:${pid}, failed during kill, "ERROR: ${e}"`);
}
}
function main() {
// remove all spaces;
if (argv.kill) {
let allPIDs = argv.kill || undefined;
// console.log(`ALL PID's: ${allPIDs}`);
allPIDs = allPIDs.toString().replace(/\s/, '').split(',');
if (allPIDs.length > 0) {
allPIDs.forEach(pid => {
if (!isNaN(pid)) {
// console.log(`Valid PID: ${pid}`);
kill(pid);
} else {
console.log(`ERROR: Invalid Process ID : ${pid}, Skipped Kill `);
}
});
}
}
}
main();
Assuming this code is saved as killer.js
Usage: node killer.js --kill=19783

Try SIGKILL instead of SIGTERM
The doc says
'SIGKILL' cannot have a listener installed, it will unconditionally terminate Node.js on all platforms.
So I think worth trying.

Related

How to serve yarn build file with npm serve command?

Basically, my task is to compile a js file and serve it at http://localhost:5000/final.js
I have the following script.
Current issues
The console.log seems printing out of order.
Able to switch dir and run yarn build, but it seems not able to serve file
Here is the source code:
#!/usr/bin/env node
const frontendDir =
"/my/frontend";
const jsDir =
"/my/frontend/build/static/js";
// util
const util = require("util");
// exec
const exec = util.promisify(require("child_process").exec);
// async func
async function runcmd(cmd) {
try {
const { stdout, stderr } = await exec(cmd);
// need output
console.log("stdout:", stdout);
// need error
console.log("stderr:", stderr);
} catch (err) {
console.error(err);
}
}
try {
// go to front end dir
process.chdir(frontendDir);
console.log("Switch to dir: " + process.cwd());
// yarn build
runcmd("yarn build");
// go to file dir
process.chdir(jsDir);
console.log("Switch to dir: " + process.cwd());
// find that js file and copy it, rename it
runcmd("cp main.*.js final.js");
// serve at /my/frontend/build/static/js with url http://localhost:5000/final.js
runcmd("serve .");
} catch (err) {
console.log("chdir: " + err);
}
Here is the output from the script above
Switch to dir: /my/frontend
Switch to dir: /my/frontend/build/static/js
stdout:
stderr:
stdout: yarn run v1.21.1
$ react-app-rewired build && cpr ./build/ ../build/frontend/ -o
Creating an optimized production build...
Compiled successfully.
File sizes after gzip:
210.3 KB build/static/js/main.c1e6b0e9.js
The project was built assuming it is hosted at ./.
You can control this with the homepage field in your package.json.
The build folder is ready to be deployed.
Find out more about deployment here:
Done in 13.91s.
stderr:
// https://stackoverflow.com/questions/41037042/nodejs-wait-for-exec-in-function
const exec = require("child_process").exec;
function os_func() {
this.execCommand = function(cmd) {
return new Promise((resolve, reject) => {
exec(cmd, (error, stdout, stderr) => {
if (error) {
reject(error);
return;
}
resolve(stdout);
});
});
};
}
const os = new os_func();
process.chdir(frontendDir);
os.execCommand("yarn buildlocal")
.then(() => {
console.log("# done yarn build");
process.chdir(jsDir);
os.execCommand("cp main.*.js docman.js").then(() => {
console.log("# done copy and serve at port 5000");
os.execCommand("serve -l 5000 .").then(() => {});
});
})
.catch(err => {
console.log("os >>>", err);
});

How to kill a child process and allow parent process to continue

I am using node.js to monitor a website's API for commands and choose the appropriate child process based on the said command. Only one child process should be running at a time. As of now, child process 1 begins on startup of the node.js script (good). Then, when a command is heard child process 2 starts up (also good), but along with child process 1 still running in the background (not good).
How can I get child process 1 to be terminated on the start of child process 2? I am currently using ("child_process").exec; and my code is mostly working, should I be using ("child_process").fork; or ("child_process").spawn; instead?
I have tried using execFile.kill('SIGTERM'); on startup of child process 2, as well as an else/if statement without any luck. (I'll cut down on the non-essential code below).
const tmi = require('tmi.js');
var exec = require("child_process").exec;
var execFile = require('child_process').exec;
.
.
.
(edited out)
.
.
.
// Register our event handlers (defined below)
client.on('connected', (address, port) => {
client.action('account', 'phrase');
execFile('/path/script.py, function(error, stdout, stderr) {
if (error) {
throw error;
return;
}
});
});
client.on('chat', (channel, user, message, self) => {
if (message === '!exec') {
execFile.kill('SIGTERM'); //to try and terminate child process 1 above
exec('/path/script.py , function(error, stdout, stderr) {
if (error) {
console.error(`exec error: ${error}`);
return;
}
console.log(`stdout: ${stdout}`);
console.log(`stderr: ${stderr}`);
});
}
});
I have worked on this issue for a while without much progress. Any and all insight is appreciated!
const tmi = require('tmi.js');
var exec = require("child_process").exec;
var execFile = require('child_process').exec;
var child;
.
.
.
(edited out)
.
.
.
// Register our event handlers (defined below)
client.on('connected', (address, port) => {
client.action('account', 'phrase');
child = execFile('/path/script.py, function(error, stdout, stderr) {
if (error) {
throw error;
return;
}
});
});
client.on('chat', (channel, user, message, self) => {
if (message === '!exec') {
child.kill(); //kills the child process.
}
});
Here is the example code. The below code starts the mongo server with child process and after 10 secs it will get killed.
const child = require('child_process');
const ls = child.exec('mongod --dbpath ~/Documents/myown/learning/mongo', (err, stdout, stderr) => {
if(err) {
console.log(err);
}
else {
console.log(stdout);
}
});
setTimeout(() => {
ls.kill();
}, 10000);

Kill a Child Process in Node.js while still running Server

I am trying to kill a child process I have running within my server. Basically the child process runs johnny-five code I've written in an online terminal in React to my server. When I run the child process, the code works great but if I want to kill the child process I cant do so without stopping the server. I've tried doing so with Control-C and .exit() but neither seem to work.
codeRouter
.post('/codeAPI', (req, res) => {
console.log(req.body)
let fileName = `johnnyFiles/${req.body.currentFile}`
fs.writeFileSync(fileName, req.body.currentCode, (err) => {
if (err) throw err
})
let id = shortid.generate()
let fileObject = {
fileName: req.body.currentFile,
fileContents: req.body.currentCode,
ID: id
}
data = [fileObject, ...data]
fs.writeFileSync('data/fileData.json', JSON.stringify(data), (err) => {
if (err) throw err
})
res.json(data)
///////////////////////////////////////////
let nodeSpawn = spawn('node', [fileName], {
//detached: true,
shell: true
})
nodeSpawn.stdout.on('data', (data) => {
console.log("OUTPUT", data.toString())
})
nodeSpawn.stderr.on('data', (data) => {
console.log("ERRORS", data.toString())
})
nodeSpawn.on('exit', (code) => {
console.log(`Child exited with code ${code}`)
nodeSpawn.kill('SIGINT')
})
})
`
You can use the linux command line.
To see the running processes use the command, use:
pgrep node
To kill the process you can use:
kill <pid>
Or to force the shutdown
kill -9 <pid>
Or if you want kill all node processes
kill $(pgrep node)

How to ignore errors generated by child_process.exec?

I am executing shell script commands from my nodejs script. One of these commands is "npm install" followed by a command to run the index file of a nodejs file.
The npm install command is returning an error generated by node-gyp. In general, this error does not affect my service. However, child_process.exec is catching it and stopping the script. My questions is, how do I trigger the exec command and ignore the error returned?
Below is a fraction of the code snippet
const exec = require('child_process').exec;
exec("npm install", {
cwd: serviceDirectory + gitRepo
},
(error1, stdout, stderr) => {
if(error1){
//this error is for testing purposes
util.log(error1);
}
//run the service
exec("node index.js",{
cwd: serviceDirectory + gitRepo + "/"
}, cb);
});
}
You can use try-catch to handle the errors with catch, example:
const { promisify } = require('util');
const exec = promisify(require('child_process').exec);
export default async function () {
const dataFormat = {
stdout: '',
stderr: '',
};
let cpu = dataFormat;
let diskUsed = dataFormat;
try {
cpu = await exec('top -bn1 | grep "Cpu(s)" | sed "s/.*, *\\([0-9.]*\\)%* id.*/\\1/"');
} catch (error) {
cpu.stderr = error.stderr;
}
try {
diskUsed = await exec("df -h | awk 'NR==2{printf $3}'");
} catch (error) {
diskUsed.stderr = error.stderr;
}
const payload = {
cpu,
diskUsed,
};
return payload
}

How execute multiple commands on SSH2 using NodeJS

I'm trying to deploy from GitHub using I want to execute more than one command, in order of the array. The code I'm using now is included below.
async.series([
...
// Deploy from GitHub
function (callback) {
// Console shizzle:
console.log('');
console.log('Deploying...'.red.bold);
console.log();
console.log();
var deployFunctions = [
{
command: 'cd ' + envOptions.folder + ' && pwd',
log: false
},
{
command: 'pwd'
},
{
command: 'su ' + envOptions.user,
log: false
},
{
command: 'git pull'
},
{
command: 'chmod 0777 * -R',
log: false
}
];
async.eachSeries(deployFunctions, function (item, callback) {
deployment.ssh2.exec(item.command, function (err, stream) {
deployment.logExec(item);
stream.on('data', function (data, extended) {
console.log(data.toString().trim());
console.log();
});
function done() {
callback(err);
}
stream.on('exit', done);
stream.on('end', done);
});
}, function () {
callback();
});
},
...);
But, after I cd'ed to the right directory, it forgets where it was and starts all over again.
$ cd /some/folder && pwd
/some/folder
$ pwd
/root
#robertklep is correct about why your cd doesn't persist. Each command invokes a distinct shell instance which starts in its initial state. You could prefix each command with cd /home/jansenstok/domains/alcoholtesterwinkel.com/public_html/ && as a quick fix, but really you are setting yourself up for pain. What you want is a shell script with all the power of multiple lines as opposed to a list of individual disconnected commands.
Look at using ssh2's sftp function to transfer a complete shell script to the remote machine as step 1, execute it via exec (/bin/bash /tmp/your_deploy_script.sh) as step 2, and then delete the script as step 3.
I know this is a super old question, but I ran into this problem while trying to manage an ACE through my Node server. The answer didn't work for me, but several searches later led me to a wrapper that worked really well for me. Just wanted to share here because this was the top link in my Google search. It's called ssh2shell and can be found here: https://www.npmjs.com/package/ssh2shell
It's very simple to use, just pass an array of commands and they run one by one waiting for each command to complete before moving on to the next.
A practical example:
const client = new Client();
const cmds = [
'ls -lah \n',
'cd /mnt \n',
'pwd \n',
'ls -lah \n',
'exit \n',
];
client.on('ready', () => {
console.log('Client :: ready');
client.shell((err, stream) => {
stream.on('close', (code) => {
console.log('stream :: close\n', { code });
}).on('data', (myData) => {
console.log('stream :: data\n', myData.toString());
}).on('exit', (code) => {
console.log('stream :: exit\n', { code });
client.end();
}).on('error', (e) => {
console.log('stream :: error\n', { e });
rej(e);
});
for (let i = 0; i < cmds.length; i += 1) {
const cmd = cmds[i];
stream.write(`${cmd}`);
}
});
}).connect({
host: '127.0.0.1',
port: 22,
username: 'root',
password: 'root',
});
all the examples in the doc use stream.end() which caused the creation of a new session instead of using the current one.
You cooldn't use "shell" on your program because "Shell" command invokes a new terminal on the system and does your jop. You need to use "exec" command without not emitting "exit" . Default "exec" command emits "exit" command after the command which you gave has been executed.

Categories