I would like to git pull, commit and push from nodeJS with child_process - is this suppose to work?
var cmd = require('child_process');
var commmandString = "cd c:\\xampp\\htdocs\\MenuMakerServer\\experiments\\editormenu && git commit -am 'menu.json changes' && git push origin main";
cmd.exec(commmandString , function (error: any, stdout, stderr) {
if (error) {
callback(error.stack, null);
}
});
EDIT:
OK,
I managed to get this to work:
var sys = require('sys')
var exec = require('child_process').exec;
function puts(error, stdout, stderr) { sys.puts(stdout) }
var options = {cwd:"c:\\xampp\\htdocs\\MenuMakerServer\\projects\\editormenu"};
exec("git status && git pull && git commit -am 'menu changed' && git push", options, puts);
Define a node.js module something like below code.
exports.series = function(cmds, callback){
var execNext = function(){
exports.exec(cmds.shift(), function(error){
if (error) {
callback(error);
} else {
if (cmds.length) execNext();
else callback(null);
}
});
};
execNext();
};
Then you can run it:
myProcessor.series([
'cd c:\\xampp\\htdocs\\MenuMakerServer\\experiments\\editormenu'
'git commit -am "menu.json changes"',
'git push origin main '
], function(err){
console.log('executed many commands in a row');
});
NOTE: Here myProcessor is the require variable name (somethig like var myProcessor = require('./path/to/above/code/file');) for the above code snippet.
No that won't work... it looks like you are combining both DOS shell commands and Unix shell commands. Specifically c:\ is DOS and using && to chain commands is Unix shell. Which environment are you using?
If you are using DOS then you need make a .bat and call the batch. This is nice becasuse you can use parameters.
Related
Basically, my task is to compile a js file and serve it at http://localhost:5000/final.js
I have the following script.
Current issues
The console.log seems printing out of order.
Able to switch dir and run yarn build, but it seems not able to serve file
Here is the source code:
#!/usr/bin/env node
const frontendDir =
"/my/frontend";
const jsDir =
"/my/frontend/build/static/js";
// util
const util = require("util");
// exec
const exec = util.promisify(require("child_process").exec);
// async func
async function runcmd(cmd) {
try {
const { stdout, stderr } = await exec(cmd);
// need output
console.log("stdout:", stdout);
// need error
console.log("stderr:", stderr);
} catch (err) {
console.error(err);
}
}
try {
// go to front end dir
process.chdir(frontendDir);
console.log("Switch to dir: " + process.cwd());
// yarn build
runcmd("yarn build");
// go to file dir
process.chdir(jsDir);
console.log("Switch to dir: " + process.cwd());
// find that js file and copy it, rename it
runcmd("cp main.*.js final.js");
// serve at /my/frontend/build/static/js with url http://localhost:5000/final.js
runcmd("serve .");
} catch (err) {
console.log("chdir: " + err);
}
Here is the output from the script above
Switch to dir: /my/frontend
Switch to dir: /my/frontend/build/static/js
stdout:
stderr:
stdout: yarn run v1.21.1
$ react-app-rewired build && cpr ./build/ ../build/frontend/ -o
Creating an optimized production build...
Compiled successfully.
File sizes after gzip:
210.3 KB build/static/js/main.c1e6b0e9.js
The project was built assuming it is hosted at ./.
You can control this with the homepage field in your package.json.
The build folder is ready to be deployed.
Find out more about deployment here:
Done in 13.91s.
stderr:
// https://stackoverflow.com/questions/41037042/nodejs-wait-for-exec-in-function
const exec = require("child_process").exec;
function os_func() {
this.execCommand = function(cmd) {
return new Promise((resolve, reject) => {
exec(cmd, (error, stdout, stderr) => {
if (error) {
reject(error);
return;
}
resolve(stdout);
});
});
};
}
const os = new os_func();
process.chdir(frontendDir);
os.execCommand("yarn buildlocal")
.then(() => {
console.log("# done yarn build");
process.chdir(jsDir);
os.execCommand("cp main.*.js docman.js").then(() => {
console.log("# done copy and serve at port 5000");
os.execCommand("serve -l 5000 .").then(() => {});
});
})
.catch(err => {
console.log("os >>>", err);
});
I am creating a single code base containing multiple projects and each project can be published.
To create a streamlined continuous integration process I want my build agent to run a single command that publishes all the projects that need to be published. Obviously this doesnt work because a publish fails if it would publish over a previously published version and that causes the command to fail and all following commands do not execute. Something like this:
"build-package1": "ng build package1",
"publish-package1": "npm publish ./dist/package1",
"build-package2": "ng build package2",
"publish-package2": "npm publish ./dist/package2",
"build-all": "npm run build-package1 && npm run build-package2",
"publish-all": "npm run publish-package1 && npm run publish-package2"
So in this scenario if I run build-all, and then publish-all and package1 has not got a version change in it's package.json file then the command will fail and package2 will not be published.
Is there a command I can do that checks if a publish is needed before trying to publish?
I have seen this answer:
https://askubuntu.com/questions/334994/which-one-is-better-using-or-to-execute-multiple-commands-in-one-line/539293#539293
Ideally though I think finding out if a publish would work before attempting it might be cleaner.
It is possible to install npm locally to the project and then use imperative npm commands to check the latest published version, and the node file system to check local versions before using an imperative npm publish command
import * as fs from 'fs';
import * as npm from 'npm';
function getLocalPackageData(path): Promise<IPackageData> {
return new Promise((resolve, reject) => {
fs.readFile(path + '/package.json', (err, data) => {
if (err) reject(err);
const pData = JSON.parse(data as unknown as string);
resolve({ version: pData.version, name: pData.name });
});
});
}
function getPublishedPackageData(packageName: string) {
npm.commands.show([packageName], function (err, result) {
if (err) reject(err);
const latestVersion = Object.keys(result)[0];
resolve({ version: latestVersion, name: result[latestVersion].name });
});
}
function needsPublish(latestPackageSegments: IPackageVersionSegments, localPackageSegments: IPackageVersionSegments): boolean {
const majorGreater = localPackageSegments.major > latestPackageSegments.major;
const majorEqual = localPackageSegments.major === latestPackageSegments.major;
const minorGreater = localPackageSegments.minor > latestPackageSegments.minor;
const minorEqual = localPackageSegments.minor === latestPackageSegments.minor;
const bugFixGreater = localPackageSegments.bug > latestPackageSegments.bug;
return majorGreater ||
(majorEqual && (minorGreater || bugFixGreater)) ||
(majorEqual && minorEqual && bugFixGreater);
}
from there you can achieve checking local against published and do:
npm.commands.publish(projectPathsToUpdate, (error, res) => {
if (error) {
throw error;
}
console.log('================= published: =================');
packagesToUpdate.forEach(p => console.log(p.name + ' updated to ' + p.version));
});
hope this helps someone in the future - possibly me again.
I am trying to get my Sequelize migration scripts to run automatically when my node application starts. I have manually tested the migration scripts to make sure they are running correctly, by running db:migrate command.
Now, I have added this file to run the migration scripts:
index.js
const {exec} = require('child_process');
const Sequelize = require('sequelize');
const config = require("config");
const sequelize = new Sequelize(config.get('postgres'));
async function start() {
await new Promise((resolve, reject) => {
const migrate = exec(
'npm run db:migrate',
{env: 'development'},
(err, stdout, stderr) => {
if (err) {
reject(err);
} else {
resolve();
}
}
);
// Forward stdout+stderr to this process
migrate.stdout.pipe(process.stdout);
migrate.stderr.pipe(process.stderr);
});
}
module.exports = {
start: start
};
And in server.js:
async function start(appStarted) {
logger.info('Initializing ...');
// execute pending migrations
logger.info('Migrating DB...');
await require('../migrations').start();
logger.info('DB Migration complete.');
When I start the app, it displays Migrating DB... and gets stuck there.
How can I resolve this?
You can listen for the console message and kill the child process, like this:
// Listen for the console.log message and kill the process to proceed to the next step in the npm script
migrate.stdout.on('data', (data) => {
console.log(data);
if (data.indexOf('No migrations were executed, database schema was already up to date.') !== -1) {
migrate.kill();
}
});
This will make sure that the child process is killed when you've already run your migrations.
I wrote a Nodejs script which finds the last changed/modified file names.
For that reason, I am using find CLI command. I have one hidden file .change to compare other files to that one (modified times).
Here is a code below:
const es6dir = 'es6';
const path2dir = './htdocs/';
const exec = require("child_process").exec;
exec(`find ${path2dir + es6dir}/ -type f -newer .change`, (error, stdout) => {
if(error){
console.log(`Error: ${error}`);
return;
}
console.log(stdout);
//update .change modified timestamp
exec('touch -c .change');
}
Everything works fine if I run this command in Git Bash but if I use windows terminal it says an incorrect command.
Is there a simple way which will work for Linux and Windows terminal at the same time?
I would like to run this command in both platforms because some of the team members are working on Linux while others are using windows machines.
Consider using Node's built-in fs.Stats over platform specific commands or utilities. The fs module exposing fs.stat method returns the property mtime comprising last modified time in milliseconds.
Cross compatibility can be achieved through child processes or using fs.stat and fs.writeFile.
Stats returns an object like such:
Stats {
dev: 16777220,
mode: 33188,
nlink: 1,
uid: 501,
gid: 20,
rdev: 0,
blksize: 4096,
ino: 5077219,
size: 11,
blocks: 8,
atimeMs: 1556271390822.264,
mtimeMs: 1556271389892.5886,
ctimeMs: 1556271389892.5886,
birthtimeMs: 1556270439285.706,
atime: 2019-04-26T09:36:30.822Z,
mtime: 2019-04-26T09:36:29.893Z,
ctime: 2019-04-26T09:36:29.893Z,
birthtime: 2019-04-26T09:20:39.286Z }
As suggested in comments and an answer, I agree this would be a better approach. Here is how you can approach creating a new file, and checking creation date.
const fs = require('fs');
// Directory
const PATH = './';
// Get file's stats
fs.stat(`./.change`, function(error, stats) {
if (error) { throw error; } // Throw if an error, file not found
let time = Date.now(); // Current Time
console.log(`Current .change: Created: `, stats['mtime']); // Created Time
// If current time > file creation time
if (time > stats['mtime']) {
// writeFile function with filename, content and callback function
fs.writeFile(`${PATH}/.change`, 'Inside File', function (error) {
if (error) { throw error; }
console.log('File is updated successfully.');
});
}
});
I'm trying to deploy from GitHub using I want to execute more than one command, in order of the array. The code I'm using now is included below.
async.series([
...
// Deploy from GitHub
function (callback) {
// Console shizzle:
console.log('');
console.log('Deploying...'.red.bold);
console.log();
console.log();
var deployFunctions = [
{
command: 'cd ' + envOptions.folder + ' && pwd',
log: false
},
{
command: 'pwd'
},
{
command: 'su ' + envOptions.user,
log: false
},
{
command: 'git pull'
},
{
command: 'chmod 0777 * -R',
log: false
}
];
async.eachSeries(deployFunctions, function (item, callback) {
deployment.ssh2.exec(item.command, function (err, stream) {
deployment.logExec(item);
stream.on('data', function (data, extended) {
console.log(data.toString().trim());
console.log();
});
function done() {
callback(err);
}
stream.on('exit', done);
stream.on('end', done);
});
}, function () {
callback();
});
},
...);
But, after I cd'ed to the right directory, it forgets where it was and starts all over again.
$ cd /some/folder && pwd
/some/folder
$ pwd
/root
#robertklep is correct about why your cd doesn't persist. Each command invokes a distinct shell instance which starts in its initial state. You could prefix each command with cd /home/jansenstok/domains/alcoholtesterwinkel.com/public_html/ && as a quick fix, but really you are setting yourself up for pain. What you want is a shell script with all the power of multiple lines as opposed to a list of individual disconnected commands.
Look at using ssh2's sftp function to transfer a complete shell script to the remote machine as step 1, execute it via exec (/bin/bash /tmp/your_deploy_script.sh) as step 2, and then delete the script as step 3.
I know this is a super old question, but I ran into this problem while trying to manage an ACE through my Node server. The answer didn't work for me, but several searches later led me to a wrapper that worked really well for me. Just wanted to share here because this was the top link in my Google search. It's called ssh2shell and can be found here: https://www.npmjs.com/package/ssh2shell
It's very simple to use, just pass an array of commands and they run one by one waiting for each command to complete before moving on to the next.
A practical example:
const client = new Client();
const cmds = [
'ls -lah \n',
'cd /mnt \n',
'pwd \n',
'ls -lah \n',
'exit \n',
];
client.on('ready', () => {
console.log('Client :: ready');
client.shell((err, stream) => {
stream.on('close', (code) => {
console.log('stream :: close\n', { code });
}).on('data', (myData) => {
console.log('stream :: data\n', myData.toString());
}).on('exit', (code) => {
console.log('stream :: exit\n', { code });
client.end();
}).on('error', (e) => {
console.log('stream :: error\n', { e });
rej(e);
});
for (let i = 0; i < cmds.length; i += 1) {
const cmd = cmds[i];
stream.write(`${cmd}`);
}
});
}).connect({
host: '127.0.0.1',
port: 22,
username: 'root',
password: 'root',
});
all the examples in the doc use stream.end() which caused the creation of a new session instead of using the current one.
You cooldn't use "shell" on your program because "Shell" command invokes a new terminal on the system and does your jop. You need to use "exec" command without not emitting "exit" . Default "exec" command emits "exit" command after the command which you gave has been executed.