So I am using gulp-exec (https://www.npmjs.com/package/gulp-exec) which after reading some of the documentation it mentions that if I want to just run a command I shouldn't use the plugin and make use of the code i've tried using below.
var exec = require('child_process').exec;
gulp.task('server', function (cb) {
exec('start server', function (err, stdout, stderr) {
.pipe(stdin(['node lib/app.js', 'mongod --dbpath ./data']))
console.log(stdout);
console.log(stderr);
cb(err);
});
})
I'm trying to get gulp to start my Node.js server and MongoDB. This is what i'm trying to accomplish. In my terminal window, its complaining about my
.pipe
However, I'm new to gulp and I thought that is how you pass through commands/tasks. Any help is appreciated, thank you.
gulp.task('server', function (cb) {
exec('node lib/app.js', function (err, stdout, stderr) {
console.log(stdout);
console.log(stderr);
cb(err);
});
exec('mongod --dbpath ./data', function (err, stdout, stderr) {
console.log(stdout);
console.log(stderr);
cb(err);
});
})
For future reference and if anyone else comes across this problem.
The above code fixed my problem. So basically, I found out that the above is its own function and therefore, doesn't need to:
.pipe
I thought that this code:
exec('start server', function (err, stdout, stderr) {
was the name of the task I am running however, it is actually what command I will be running. Therefore, I changed this to point to app.js which runs my server and did the same to point to my MongoDB.
EDIT
As #N1mr0d mentioned below with having no server output a better method to run your server would be to use nodemon. You can simply run nodemon server.js like you would run node server.js.
The below code snippet is what I use in my gulp task to run my server now using nodemon :
// start our server and listen for changes
gulp.task('server', function() {
// configure nodemon
nodemon({
// the script to run the app
script: 'server.js',
// this listens to changes in any of these files/routes and restarts the application
watch: ["server.js", "app.js", "routes/", 'public/*', 'public/*/**'],
ext: 'js'
// Below i'm using es6 arrow functions but you can remove the arrow and have it a normal .on('restart', function() { // then place your stuff in here }
}).on('restart', () => {
gulp.src('server.js')
// I've added notify, which displays a message on restart. Was more for me to test so you can remove this
.pipe(notify('Running the start tasks and stuff'));
});
});
Link to install Nodemon : https://www.npmjs.com/package/gulp-nodemon
This solution has stdout/stderr shown as they occur and does not use 3rd party libs:
var spawn = require('child_process').spawn;
gulp.task('serve', function() {
spawn('node', ['lib/app.js'], { stdio: 'inherit' });
});
You can also create gulp node server task runner like this:
gulp.task('server', (cb) => {
exec('node server.js', err => err);
});
If you want your console to output everything that the child process outputs, as well as pass to the child process all environment variables you already have set:
const exec = require('child_process').exec;
function runCommand(command, cb) {
const child = exec(command, { env: process.env }, function (err) {
cb(err);
})
child.stdout.on('data', (data) => {
process.stdout.write(data);
});
child.stderr.on('data', (data) => {
process.stdout.write(`Error: [${data}]`);
});
}
Note that both out and err write to stdout, this is intentional for my case but you can adapt to whatever you need.
Related
I want to exectue following Shell Command with the Execute Option form Javascript:
"sed -i 4r<(sed '1,5!d' /etc/wpa_supplicant/wpa_template.template) /etc/wpa_supplicant/wpa_supplicant.conf"
Then i try this command in the shell console it works perfektly. But after starting it with the .js programm nothing is happen. I just want to copy the first 5 lines in template and add them to the config file.
You can do with exec from child_process
const exec = require('child_process').exec
exec("sed -i 4r<(sed '1,5!d' /etc/wpa_supplicant/wpa_template.template) /etc/wpa_supplicant/wpa_supplicant.conf", (err, stdout, stderr) => {
if (err) {
return console.error(err)
}
console.log(`stdout: ${stdout}`)
console.log(`stderr: ${stderr}`)
})
You can use the below code to execute any shell command and replace the o/p to a new file(create a new file) everytime you run it. And, you should use the line-reader module to get the line from the files.
npm install --save line-reader
const fs = require('fs');
const lineReader = require('line-reader');
const { exec } = require('child_process');
const cmd = "sed -i 4r<(sed '1,5!d' /etc/wpa_supplicant/wpa_template.template) /etc/wpa_supplicant/wpa_supplicant.conf"
exec(cmd, (err, stdout, stderr) => {
if (err) {
//some err occurred
console.error(err)
} else {
// the *entire* stdout and stderr (buffered)
fs.writeFile('file path', stdout, (err) => {
// throws an error, you could also catch it here
if (err) throw err;
// success case, the file was saved
console.log('file saved');
readFile1();
});
}
});
const readFile1 = () => lineReader.eachLine('file path', function(line) {
//make logic to take number of lines you want to take.
console.log(line);
});
I'm trying to open application using nodejs (I'd like this process to be workable in cross-platform , MAC/Windows ..etc
here's the code
const { exec } = require('child_process')
const fs = require('fs')
// giving the required permissions , not really sure if 777 is the right choice
fs.chmodSync(appPath,777);
exec("open -a " + path , (err, stdout, stderr) => {
console.log(err);
console.log(stdout);
console.log(stderr);
});
console.log(err); // logs this error
{ Error: Command failed: open -a /location.app
LSOpenURLsWithRole() failed for the application /location.app with error -10810
when executing the same command in terminal (out of node) , it fires the same issue
I have a nodejs app based on microservice architecture, which i am planning to run on kubernetes cluster by containerizing it.
I need this app to query from db of all the cron entries and add these cron entries to crontab on the same server as in where my application is running.
When i query for all my cron entries and i get a list of cron jobs, as below :
1 * * * * root node /home/project/app.js 103
1 * * * * root node /home/project/app.js 104
1 * * * * root node /home/project/app.js 105
I need to add these cron jobs dynamically on to a server( server in here is my app container, which is based on ubuntu image) when ever i do a DB query to list jobs.
How do i add these cron jobs from my application to my server?
Is there a direct solution to this or can this be done with any library in NodeJS?
I looked through shelljs lib for NodeJS, i found it bit complex so couldn't try with it.
any help is appreciated.
I wouldn't recommend witing multiple cron entries for one application - because if you have thousends of entries for multiple apps it will get very large and adding some by your self will become very hard.
When the required execution time never changes, just create a "cron.sh"-file in your node js root file directory and write all commands into it.
crontab -e: 1 * * * * root /home/project/cron.sh > /home/project/cron.log
Content of /home/project/cron.sh:
#!/bin/bash
# Change directory:
cd /home/project/
# Run the script with all parameters
node ./app.js 103
node ./app.js 104
node ./app.js 105
Dont forget to chmod 0770 cron.sh in order to be able to write to it and, because you seem to run this cron as root, prevent other users to execute commands as root.
/e: for changing crontab file directly you can use the following shell code to first export, modify and import jobs again:
const exec = require('child_process').exec;
const fs = require('fs');
// Read content of file
function ReadCronFile(cb) {
exec("crontab -l", function(err, stdout, stderr) {
cb(stdout);
});
}
// Write a crontab file
function WriteCronFile(data, cb) {
fs.writeFile("tmp.txt", data, function(err) {
exec("crontab tmp.txt", function(err, stdout, stderr) {
fs.unlink("tmp.txt", function(err) {
cb();
});
});
});
}
// Example usage
ReadCronFile(function(data) { console.log(data); });
WriteCronFile("...", function() { console.log("Done!"); });
Note: code untested
Or, with Promise (tested by OP)
const exec = require('child_process').exec;
const fs = require('fs');
// Read content of file
function ReadCronFile() {
return new Promise(function(resolve, reject) {
exec("crontab -l", function(err, stdout, stderr) {
if(err) return reject(err);
else return resolve(stdout);
});
});
}
// Write a crontab file
function WriteCronFile(data) {
fs.writeFile("tmp.txt", data, function(err) {
if(err) return reject(err);
exec("crontab tmp.txt", function(err, stdout, stderr) {
fs.unlink("tmp.txt", function(err) {
if(err) return reject(err);
else return resolve();
});
});
});
}
// Example usage
ReadCronFile().then(function(data) {
console.log(data);
}).catch(function(err) {
console.log(err);
});
WriteCronFile("...").then(function() {
console.log("Done!");
}).catch(function(err) {
console.log(err);
});
For parsing crontab file you can use https://github.com/harrisiirak/cron-parser
Try Cron NPM Module
It's very easy to start with and has a lot of feature to implement complex jobs.
Want to concatenate two audio files. i used an npm known as audioconcate but when i installed and configured the below code i am confronted with the following error
Error: Error: Cannot find ffmpeg
at E:\VoiceMan\registercheck\node_modules\fluent-ffmpeg\lib\processor.js:136:22
at E:\VoiceMan\registercheck\node_modules\fluent-ffmpeg\lib\capabilities.js:123:9
at E:\VoiceMan\registercheck\node_modules\async\dist\async.js:356:16
at nextTask (E:\VoiceMan\registercheck\node_modules\async\dist\async.js:5057:29)
at E:\VoiceMan\registercheck\node_modules\async\dist\async.js:5064:13
at apply (E:\VoiceMan\registercheck\node_modules\async\dist\async.js:21:25)
at E:\VoiceMan\registercheck\node_modules\async\dist\async.js:56:12
at E:\VoiceMan\registercheck\node_modules\async\dist\async.js:840:16
at E:\VoiceMan\registercheck\node_modules\fluent-ffmpeg\lib\capabilities.js:116:11
at E:\VoiceMan\registercheck\node_modules\fluent-ffmpeg\lib\utils.js:223:16
ffmpeg stderr: undefined
here is the code i am using: (all audio files are in the same folder also)
var audioconcat = require('audioconcat')
var songs = [
'a(1).mp3',
'a(2).mp3',
'a(3).mp3'
]
audioconcat(songs)
.concat('all.mp3')
.on('start', function (command) {
console.log('ffmpeg process started:', command)
})
.on('error', function (err, stdout, stderr) {
console.error('Error:', err)
console.error('ffmpeg stderr:', stderr)
})
.on('end', function (output) {
console.error('Audio created in:', output)
})
Does not look like you have ffmpeg installed, it is a requirement for that package.
Requirements
ffmpeg with additional compilation flags --enable-libass --enable-libmp3lame
https://www.npmjs.com/package/audioconcat
I am using the recipe from the official gulp repo, to make browserify work for multiple entry points. It worked out fine, when I followed the recipe for a single file, but when I try to run the task now, it prints
the following tasks did not complete: browserify.
Did you forget to signal async completion?
Unfortunately. I am using Gulp 4 for this. This is my adapted task:
gulp.task('browserify', function() {
var bundledStream = through();
bundledStream.pipe(source('./public/static/js-dev/bundles/*.js'))
.pipe(buffer())
.pipe(sourcemaps.init({loadMaps: true}))
.on('error', gutil.log)
.pipe(sourcemaps.write('.'))
.pipe(gulp.dest(local.jsDist+'/bundles'));
globby(['./public/static/js-dev/bundles/*.js'], function(err, entries) {
if (err) {
bundledStream.emit('error', err);
return;
}
var b = browserify({
entries: entries,
debug: true
});
b.bundle().pipe(bundledStream);
});
return bundledStream;
});
I don't know what I am doing wrong - I just want this to work.
You need to callback the task zo add done to the function as argument and call it at last in your function as done();