Kill a Child Process in Node.js while still running Server - javascript

I am trying to kill a child process I have running within my server. Basically the child process runs johnny-five code I've written in an online terminal in React to my server. When I run the child process, the code works great but if I want to kill the child process I cant do so without stopping the server. I've tried doing so with Control-C and .exit() but neither seem to work.
codeRouter
.post('/codeAPI', (req, res) => {
console.log(req.body)
let fileName = `johnnyFiles/${req.body.currentFile}`
fs.writeFileSync(fileName, req.body.currentCode, (err) => {
if (err) throw err
})
let id = shortid.generate()
let fileObject = {
fileName: req.body.currentFile,
fileContents: req.body.currentCode,
ID: id
}
data = [fileObject, ...data]
fs.writeFileSync('data/fileData.json', JSON.stringify(data), (err) => {
if (err) throw err
})
res.json(data)
///////////////////////////////////////////
let nodeSpawn = spawn('node', [fileName], {
//detached: true,
shell: true
})
nodeSpawn.stdout.on('data', (data) => {
console.log("OUTPUT", data.toString())
})
nodeSpawn.stderr.on('data', (data) => {
console.log("ERRORS", data.toString())
})
nodeSpawn.on('exit', (code) => {
console.log(`Child exited with code ${code}`)
nodeSpawn.kill('SIGINT')
})
})
`

You can use the linux command line.
To see the running processes use the command, use:
pgrep node
To kill the process you can use:
kill <pid>
Or to force the shutdown
kill -9 <pid>
Or if you want kill all node processes
kill $(pgrep node)

Related

Spawning Java File

How can i spawn a java file while receiving stdout output?
I am trying to start a minecraft server file using child_process' spawn function, and attempting to get the output thats getting sent.
Heres what i've tried
var childP = require("child_process")
var server = childP.spawn("java", ["-jar", "Launch.jar", "nogui"])
server.on("spawn", () => {
console.log("Server started")
})
server.on("message", (message) => {
console.log(message)
})
server.on("error", (err) => {
console.log(err)
})
server.on("disconnect", () => {
console.log("Server disconnected");
})
server.on("exit", (code) => {
console.log(`Server exited with code ${code}`)
})
Easily done by just doing server.stdout.on("data"), should've looked more into the spawn event before asking the question :p
var childP = require("child_process")
var server = childP.spawn("java", ["-jar", "Launch.jar", "nogui"])
server.stdout.on("data", (data) => {
console.log(data.toString().trim())
})
To spawn a Java file and receive its standard output using the child_process module in Node.js, you can use the spawn function as follows:
`const { spawn } = require('child_process');
const javaProcess = spawn('java', ['-jar', 'minecraft_server.jar']);
javaProcess.stdout.on('data', (data) => {
console.log(stdout: ${data});
});
javaProcess.stderr.on('data', (data) => {
console.error(stderr: ${data});
});
javaProcess.on('close', (code) => {
console.log(child process exited with code ${code});
});`

Node child process spawn not executing command

I am trying to execute a child process command to convert a file from a format to another using FFmpeg. I have FFmpeg installed in my system and in the same code I am able to successfully merge a video and a file using cp.spawn. The next step would be to convert the file in the desired format, but the process is immediately exiting with code 1, without giving any errors or outputs.
Here's the code:
const cp = require('child_process');
const ffmpeg = require('ffmpeg-static');
const process = cp.spawn(ffmpeg, [
'-loglevel', '8', '-hide_banner',
'-i', 'output.webm', "output.ogg",
], {
windowsHide: false,
});
process.on('error', (err) => {
console.log('Failed to start subprocess.', error);
});
process.stdout.on('data', (data) => {
console.log(`stdout: ${data}`);
});
process.stderr.setEncoding("utf8")
process.stderr.on('data', (data) => {
console.log(`stderr: ${data}`);
})
process.on('close', (code) => {
console.log(`child process exited with code ${code}`);
});
The file "output.webm" is in the current working directory. The only output that I'm getting is: child process exited with code 1, and nothing else happens.

Is there a way to get 'live' output lines from a python script spawned by child_process.execFile without flushing stdout every time?

I am trying to get the lines a ('never ending') python script puts into stdout. But currently my code would only log something to the console when the python process exits. Is there a way I can get the 'live' output of the python script line by line?
spawn_child.js:
let execFile = require("child_process").execFile;
var child = execFile("python3", ["PATH_TO_FILE"]);
child.stdout.on("data", data=>{
console.log(data.toString());
});
child.stderr.on("data", data=>{
console.log(data.toString());
});
child.on("exit", code=>{
console.log("Child exited with code "+code);
});
The python file:
from time import sleep
while True:
sleep(3)
print("test")
Edit: It works when using a nodejs script instead of a python script
change python script to
import time
import sys
while True:
time.sleep(1)
print("test")
sys.stdout.flush()
and increase the buffer size of the child process
const child = execFile("python", ["./runner.py"], {
detached: true,
maxBuffer: 10 * 1024 * 1024 * 1024
});
or you can do it without the flushing to stdout with python-shell
const { PythonShell } = require('python-shell');
let pyshell = new PythonShell('runner.py');
pyshell.on('message', function (message) {
console.log(message);
});
pyshell.end(function (err, code, signal) {
if (err) throw err;
console.log('The exit code was: ' + code);
console.log('The exit signal was: ' + signal);
console.log('finished');
});
Use spawn instead of execFile, dont forget options shell and stdio.
const spawn = require("child_process").spawn;
const child = spawn("python3", ["file.py"], {shell: true, stdio: 'inherit'});
child.on('data', function(data) {
console.log(data);
});
child.on('close', function(code) {
console.log('Child process exited with exit code '+code);
});
You can also add cwd option.
Was trying to implement something similar inside a NextJS application and wanted live output from my python script and using python-shell had the same issue that it was only giving me output when the process existed and I ended up using node-pty instead which worked as expected:
import { spawn } from "node-pty"
const pyProcess = spawn("python", ["path/to/python/script"], {
name: 'xterm-color',
cols: 80,
rows: 30,
cwd: process.cwd(),
});
pyProcess.on('data', function (data: { toString: () => any; }) {
console.log(data.toString());
});
pyProcess.on('exit', (code: any) => {
console.log(`child process exited with code ${code}`);
});

How to kill a child process and allow parent process to continue

I am using node.js to monitor a website's API for commands and choose the appropriate child process based on the said command. Only one child process should be running at a time. As of now, child process 1 begins on startup of the node.js script (good). Then, when a command is heard child process 2 starts up (also good), but along with child process 1 still running in the background (not good).
How can I get child process 1 to be terminated on the start of child process 2? I am currently using ("child_process").exec; and my code is mostly working, should I be using ("child_process").fork; or ("child_process").spawn; instead?
I have tried using execFile.kill('SIGTERM'); on startup of child process 2, as well as an else/if statement without any luck. (I'll cut down on the non-essential code below).
const tmi = require('tmi.js');
var exec = require("child_process").exec;
var execFile = require('child_process').exec;
.
.
.
(edited out)
.
.
.
// Register our event handlers (defined below)
client.on('connected', (address, port) => {
client.action('account', 'phrase');
execFile('/path/script.py, function(error, stdout, stderr) {
if (error) {
throw error;
return;
}
});
});
client.on('chat', (channel, user, message, self) => {
if (message === '!exec') {
execFile.kill('SIGTERM'); //to try and terminate child process 1 above
exec('/path/script.py , function(error, stdout, stderr) {
if (error) {
console.error(`exec error: ${error}`);
return;
}
console.log(`stdout: ${stdout}`);
console.log(`stderr: ${stderr}`);
});
}
});
I have worked on this issue for a while without much progress. Any and all insight is appreciated!
const tmi = require('tmi.js');
var exec = require("child_process").exec;
var execFile = require('child_process').exec;
var child;
.
.
.
(edited out)
.
.
.
// Register our event handlers (defined below)
client.on('connected', (address, port) => {
client.action('account', 'phrase');
child = execFile('/path/script.py, function(error, stdout, stderr) {
if (error) {
throw error;
return;
}
});
});
client.on('chat', (channel, user, message, self) => {
if (message === '!exec') {
child.kill(); //kills the child process.
}
});
Here is the example code. The below code starts the mongo server with child process and after 10 secs it will get killed.
const child = require('child_process');
const ls = child.exec('mongod --dbpath ~/Documents/myown/learning/mongo', (err, stdout, stderr) => {
if(err) {
console.log(err);
}
else {
console.log(stdout);
}
});
setTimeout(() => {
ls.kill();
}, 10000);

How execute multiple commands on SSH2 using NodeJS

I'm trying to deploy from GitHub using I want to execute more than one command, in order of the array. The code I'm using now is included below.
async.series([
...
// Deploy from GitHub
function (callback) {
// Console shizzle:
console.log('');
console.log('Deploying...'.red.bold);
console.log();
console.log();
var deployFunctions = [
{
command: 'cd ' + envOptions.folder + ' && pwd',
log: false
},
{
command: 'pwd'
},
{
command: 'su ' + envOptions.user,
log: false
},
{
command: 'git pull'
},
{
command: 'chmod 0777 * -R',
log: false
}
];
async.eachSeries(deployFunctions, function (item, callback) {
deployment.ssh2.exec(item.command, function (err, stream) {
deployment.logExec(item);
stream.on('data', function (data, extended) {
console.log(data.toString().trim());
console.log();
});
function done() {
callback(err);
}
stream.on('exit', done);
stream.on('end', done);
});
}, function () {
callback();
});
},
...);
But, after I cd'ed to the right directory, it forgets where it was and starts all over again.
$ cd /some/folder && pwd
/some/folder
$ pwd
/root
#robertklep is correct about why your cd doesn't persist. Each command invokes a distinct shell instance which starts in its initial state. You could prefix each command with cd /home/jansenstok/domains/alcoholtesterwinkel.com/public_html/ && as a quick fix, but really you are setting yourself up for pain. What you want is a shell script with all the power of multiple lines as opposed to a list of individual disconnected commands.
Look at using ssh2's sftp function to transfer a complete shell script to the remote machine as step 1, execute it via exec (/bin/bash /tmp/your_deploy_script.sh) as step 2, and then delete the script as step 3.
I know this is a super old question, but I ran into this problem while trying to manage an ACE through my Node server. The answer didn't work for me, but several searches later led me to a wrapper that worked really well for me. Just wanted to share here because this was the top link in my Google search. It's called ssh2shell and can be found here: https://www.npmjs.com/package/ssh2shell
It's very simple to use, just pass an array of commands and they run one by one waiting for each command to complete before moving on to the next.
A practical example:
const client = new Client();
const cmds = [
'ls -lah \n',
'cd /mnt \n',
'pwd \n',
'ls -lah \n',
'exit \n',
];
client.on('ready', () => {
console.log('Client :: ready');
client.shell((err, stream) => {
stream.on('close', (code) => {
console.log('stream :: close\n', { code });
}).on('data', (myData) => {
console.log('stream :: data\n', myData.toString());
}).on('exit', (code) => {
console.log('stream :: exit\n', { code });
client.end();
}).on('error', (e) => {
console.log('stream :: error\n', { e });
rej(e);
});
for (let i = 0; i < cmds.length; i += 1) {
const cmd = cmds[i];
stream.write(`${cmd}`);
}
});
}).connect({
host: '127.0.0.1',
port: 22,
username: 'root',
password: 'root',
});
all the examples in the doc use stream.end() which caused the creation of a new session instead of using the current one.
You cooldn't use "shell" on your program because "Shell" command invokes a new terminal on the system and does your jop. You need to use "exec" command without not emitting "exit" . Default "exec" command emits "exit" command after the command which you gave has been executed.

Categories