Basically, my task is to compile a js file and serve it at http://localhost:5000/final.js
I have the following script.
Current issues
The console.log seems printing out of order.
Able to switch dir and run yarn build, but it seems not able to serve file
Here is the source code:
#!/usr/bin/env node
const frontendDir =
"/my/frontend";
const jsDir =
"/my/frontend/build/static/js";
// util
const util = require("util");
// exec
const exec = util.promisify(require("child_process").exec);
// async func
async function runcmd(cmd) {
try {
const { stdout, stderr } = await exec(cmd);
// need output
console.log("stdout:", stdout);
// need error
console.log("stderr:", stderr);
} catch (err) {
console.error(err);
}
}
try {
// go to front end dir
process.chdir(frontendDir);
console.log("Switch to dir: " + process.cwd());
// yarn build
runcmd("yarn build");
// go to file dir
process.chdir(jsDir);
console.log("Switch to dir: " + process.cwd());
// find that js file and copy it, rename it
runcmd("cp main.*.js final.js");
// serve at /my/frontend/build/static/js with url http://localhost:5000/final.js
runcmd("serve .");
} catch (err) {
console.log("chdir: " + err);
}
Here is the output from the script above
Switch to dir: /my/frontend
Switch to dir: /my/frontend/build/static/js
stdout:
stderr:
stdout: yarn run v1.21.1
$ react-app-rewired build && cpr ./build/ ../build/frontend/ -o
Creating an optimized production build...
Compiled successfully.
File sizes after gzip:
210.3 KB build/static/js/main.c1e6b0e9.js
The project was built assuming it is hosted at ./.
You can control this with the homepage field in your package.json.
The build folder is ready to be deployed.
Find out more about deployment here:
Done in 13.91s.
stderr:
// https://stackoverflow.com/questions/41037042/nodejs-wait-for-exec-in-function
const exec = require("child_process").exec;
function os_func() {
this.execCommand = function(cmd) {
return new Promise((resolve, reject) => {
exec(cmd, (error, stdout, stderr) => {
if (error) {
reject(error);
return;
}
resolve(stdout);
});
});
};
}
const os = new os_func();
process.chdir(frontendDir);
os.execCommand("yarn buildlocal")
.then(() => {
console.log("# done yarn build");
process.chdir(jsDir);
os.execCommand("cp main.*.js docman.js").then(() => {
console.log("# done copy and serve at port 5000");
os.execCommand("serve -l 5000 .").then(() => {});
});
})
.catch(err => {
console.log("os >>>", err);
});
Related
I'm trying to open explorer.exe from a Node.js script running inside WSL Ubuntu 20.04. The issue I've encountered is that explorer.exe never opens the folder I'd like it to. Instead of WSL user's home directory it opens my Windows user's Documents folder. What should I do to make explorer.exe open the folder I want?
Here's what I've tried:
The script first defines a function execShellCommand that promisifies exec. Then self-executing function first converts process.env.HOME to a Windows path with wslpath. Then it executes explorer.exe with the converted path as a parameter.
#!/usr/bin/node
const execShellCommand = async cmd => {
const exec = require('child_process').exec
return new Promise((resolve, reject) => {
exec(cmd, (error, stdout, stderr) => {
if (error) {
console.warn(error)
}
resolve(stderr ? stderr : stdout)
})
})
}
;(async () => {
const path = await execShellCommand(`wslpath -w "${process.env.HOME}"`)
console.log({ path })
await execShellCommand(`explorer.exe ${path}`)
})()
The output I get when I run my script in WSL
$ ./script.js
{ path: '\\\\wsl$\\Ubuntu-20.04\\home\\user\n' }
Error: Command failed: explorer.exe \\wsl$\Ubuntu-20.04\home\user
at ChildProcess.exithandler (child_process.js:308:12)
at ChildProcess.emit (events.js:315:20)
at maybeClose (internal/child_process.js:1048:16)
at Process.ChildProcess._handle.onexit (internal/child_process.js:288:5) {
killed: false,
code: 1,
signal: null,
cmd: 'explorer.exe \\\\wsl$\\Ubuntu-20.04\\home\\user\n'
}
explorer.exe does run regardless of the error shown in the output. The weird part is that if I run the same command my script tries to run (explorer.exe \\\\wsl$\\Ubuntu-20.04\\home\\user\n) directly in WSL terminal explorer.exe does open the folder I want it to. Trimming the new line at the end of the path doesn't help.
I think you have to do some additional escaping on the backslashes that are produced by wslpath. The code below works for me, meaning it opens the correct directory in Windows Explorer.
Note: it does still throw the error you mentioned, which I think is due to the way node exits rather than anything wrong w/the execution of explorer.exe; I'm not a node expert by any stretch.
#!/usr/bin/node
const execShellCommand = async cmd => {
const exec = require('child_process').exec
return new Promise((resolve, reject) => {
exec(cmd, (error, stdout, stderr) => {
if (error) {
console.warn(error)
}
resolve(stderr ? stderr : stdout)
})
})
}
;(async () => {
let path = await execShellCommand(`wslpath -w "${process.env.HOME}"`)
console.log("before", {path});
path = path.replace(/\\/g,"\\\\");
console.log("after", {path});
await execShellCommand(`explorer.exe ${path}`)
})()
Even cleaner than replacing backslashes, I think this will work for you by resolving the $HOME variable directly into your command line:
await execShellCommand(`explorer.exe "$(wslpath -w $HOME)"`);
I am trying to get my Sequelize migration scripts to run automatically when my node application starts. I have manually tested the migration scripts to make sure they are running correctly, by running db:migrate command.
Now, I have added this file to run the migration scripts:
index.js
const {exec} = require('child_process');
const Sequelize = require('sequelize');
const config = require("config");
const sequelize = new Sequelize(config.get('postgres'));
async function start() {
await new Promise((resolve, reject) => {
const migrate = exec(
'npm run db:migrate',
{env: 'development'},
(err, stdout, stderr) => {
if (err) {
reject(err);
} else {
resolve();
}
}
);
// Forward stdout+stderr to this process
migrate.stdout.pipe(process.stdout);
migrate.stderr.pipe(process.stderr);
});
}
module.exports = {
start: start
};
And in server.js:
async function start(appStarted) {
logger.info('Initializing ...');
// execute pending migrations
logger.info('Migrating DB...');
await require('../migrations').start();
logger.info('DB Migration complete.');
When I start the app, it displays Migrating DB... and gets stuck there.
How can I resolve this?
You can listen for the console message and kill the child process, like this:
// Listen for the console.log message and kill the process to proceed to the next step in the npm script
migrate.stdout.on('data', (data) => {
console.log(data);
if (data.indexOf('No migrations were executed, database schema was already up to date.') !== -1) {
migrate.kill();
}
});
This will make sure that the child process is killed when you've already run your migrations.
Im trying to kill background running process using nodejs process.kill(pid, 'SIGTERM'), but the process is not getting killed.
I executed node script mentioned below and later checked the process using ps -efww | grep 19783 | grep -v grep from the prompt to confirm it is still not killed.
I can confirm that the process it is trying to kill is started by the same user, so there is no permission issue.
Is there something I need to pass to get the process killed.
Node Version: 8.11.1
OS: Linux 3.10.0-327.10.1.e17.x86_64
Reference : Node process
Code :
'use strict';
const argv = require('yargs').argv;
const exec = require('child_process').exec;
function execute(command) {
console.log("Executing Command : ", command);
return new Promise((resolve, reject) => {
exec(command, {
maxBuffer: 1024 * 5000000
}, (error, stdout, stderr) => {
if (error) {
console.log(`ERROR: Something went wrong while executing ${command}: ${error}`);
reject(error);
} else {
resolve(stdout);
}
});
});
}
function kill(pid) {
try {
console.log(`Killing Process : ${pid}`);
process.kill(pid, 'SIGTERM');
let command = `ps -efww | grep ${pid} | grep -v grep | grep -v dzdo `;
let output = execute(command).then(res => {
console.log(`output: ${res}`);
}).catch(err => console.log(err));
} catch (e) {
console.log(`Invalid Process ID:${pid}, failed during kill, "ERROR: ${e}"`);
}
}
function main() {
// remove all spaces;
if (argv.kill) {
let allPIDs = argv.kill || undefined;
// console.log(`ALL PID's: ${allPIDs}`);
allPIDs = allPIDs.toString().replace(/\s/, '').split(',');
if (allPIDs.length > 0) {
allPIDs.forEach(pid => {
if (!isNaN(pid)) {
// console.log(`Valid PID: ${pid}`);
kill(pid);
} else {
console.log(`ERROR: Invalid Process ID : ${pid}, Skipped Kill `);
}
});
}
}
}
main();
Assuming this code is saved as killer.js
Usage: node killer.js --kill=19783
Try SIGKILL instead of SIGTERM
The doc says
'SIGKILL' cannot have a listener installed, it will unconditionally terminate Node.js on all platforms.
So I think worth trying.
I am executing shell script commands from my nodejs script. One of these commands is "npm install" followed by a command to run the index file of a nodejs file.
The npm install command is returning an error generated by node-gyp. In general, this error does not affect my service. However, child_process.exec is catching it and stopping the script. My questions is, how do I trigger the exec command and ignore the error returned?
Below is a fraction of the code snippet
const exec = require('child_process').exec;
exec("npm install", {
cwd: serviceDirectory + gitRepo
},
(error1, stdout, stderr) => {
if(error1){
//this error is for testing purposes
util.log(error1);
}
//run the service
exec("node index.js",{
cwd: serviceDirectory + gitRepo + "/"
}, cb);
});
}
You can use try-catch to handle the errors with catch, example:
const { promisify } = require('util');
const exec = promisify(require('child_process').exec);
export default async function () {
const dataFormat = {
stdout: '',
stderr: '',
};
let cpu = dataFormat;
let diskUsed = dataFormat;
try {
cpu = await exec('top -bn1 | grep "Cpu(s)" | sed "s/.*, *\\([0-9.]*\\)%* id.*/\\1/"');
} catch (error) {
cpu.stderr = error.stderr;
}
try {
diskUsed = await exec("df -h | awk 'NR==2{printf $3}'");
} catch (error) {
diskUsed.stderr = error.stderr;
}
const payload = {
cpu,
diskUsed,
};
return payload
}
I would like to git pull, commit and push from nodeJS with child_process - is this suppose to work?
var cmd = require('child_process');
var commmandString = "cd c:\\xampp\\htdocs\\MenuMakerServer\\experiments\\editormenu && git commit -am 'menu.json changes' && git push origin main";
cmd.exec(commmandString , function (error: any, stdout, stderr) {
if (error) {
callback(error.stack, null);
}
});
EDIT:
OK,
I managed to get this to work:
var sys = require('sys')
var exec = require('child_process').exec;
function puts(error, stdout, stderr) { sys.puts(stdout) }
var options = {cwd:"c:\\xampp\\htdocs\\MenuMakerServer\\projects\\editormenu"};
exec("git status && git pull && git commit -am 'menu changed' && git push", options, puts);
Define a node.js module something like below code.
exports.series = function(cmds, callback){
var execNext = function(){
exports.exec(cmds.shift(), function(error){
if (error) {
callback(error);
} else {
if (cmds.length) execNext();
else callback(null);
}
});
};
execNext();
};
Then you can run it:
myProcessor.series([
'cd c:\\xampp\\htdocs\\MenuMakerServer\\experiments\\editormenu'
'git commit -am "menu.json changes"',
'git push origin main '
], function(err){
console.log('executed many commands in a row');
});
NOTE: Here myProcessor is the require variable name (somethig like var myProcessor = require('./path/to/above/code/file');) for the above code snippet.
No that won't work... it looks like you are combining both DOS shell commands and Unix shell commands. Specifically c:\ is DOS and using && to chain commands is Unix shell. Which environment are you using?
If you are using DOS then you need make a .bat and call the batch. This is nice becasuse you can use parameters.