I'm building a Yeoman generator and after it has finished I want to perform some command line actions like 'npm install', 'bower install' and 'grunt less'. I'm using spawnCommand for this and I nested all actions using event listeners to perform them synchronously. However, to avoid this endless nesting, I'm looking for a cleaner implementation, to make it easily expandable.
Perfectly, I would like to have an array with commands (like ['npm install', 'grunt install', 'less:dev']) and have this processed synchronously with proper error detection.
// Install npm packages
this.spawnCommand('npm', ['install'])
.on('exit', function (err) {
if (err) {
this.log.error('npm package installation failed. Please run \'npm install\' and \'bower install\'. Error: ' + err);
} else {
// Install bower packages
this.spawnCommand('bower', ['install'])
.on('exit', function (err) {
if (err) {
this.log.error('bower package installation failed. Please run \'bower install\'. Error: ' + err);
} else {
this.spawnCommand('grunt', ['less'])
.on('exit', function (err) {
if (err) {
this.log.error('Less compilation failed. Please run \'grunt less:dev\'. Error: ' + err);
} else {
}
}.bind(this));
}
}.bind(this));
}
}.bind(this));
Something like this? (untested though):
this.processTask = function (task) {
this.spawnCommand(task.cmd, task.args)
.on('exit', function (err) {
if (err) {
this.log.error('task failed. Error: ' + err);
} else {
this.emit('nextTask');
}
});
};
this.on('nextTask' function(){
var next = this.tasks.shift();
if (next){
this.processTask(next);
} else {
console.log('we are done');
}
}.bind(this));
//preparing the list of tasks:
this.tasks = [];
this.tasks.push({cmd: 'npm', args:['install']});
this.tasks.push({cmd: 'bower', args:['install']});
this.tasks.push({cmd: 'grunt', args:['less']});
//start first task
this.processTask(this.tasks.shift());
I used execSync from Node.js and it seems to work, eg:
var child_process = require('child_process');
var result = execSync('grunt less');
Node.js 0.12 and io.js 1.10 support execSync:
child_process.execSync(command[, options])
and returns, "Buffer|String The stdout from the command", which may be an error code.
API documentation.
The back story about the synchronous API.
You can make a script like init.sh and put your commands that need to be run in order in it, like:
#!/usr/bin/env bash
npm install
your-funky-command
gulp something-special
gulp
...then wherever you need to put the spawnCommand code (I do it in end method), add somehting like this:
var done = this.async();
this.spawnCommand('sh', ['init.sh'], /* maybe cwd? {cwd: 'src'} */)
.on('close', done);
Ain't pretty or anything but it works, and it's obvious.
Optionally, if you need one command to only run if the prev succeeded, do this:
#!/usr/bin/env bash
npm install \
&& your-funky-command \
&& gulp something-special \
&& gulp
(Bonus advantage is that now your app init logic is no longer tied to Yo.)
Related
I am working on building multiple languages when running a gradle command for an angular application.
Below is the code which is used to build all the localized files.
The buildAllLanguageVersion() method is called to build the language one by one by iterating over all the available build languages.
function buildAllLanguageVersion() {
return new Promise(async (accept, reject) => {
const doneLanguages = [];
const applicationBase = "APPNAME";
for (let language of buildLanguages) {
const ret = await buildingLanguage(language);
doneLanguages.push(ret);
}
accept(doneLanguages);
});
}
function buildingLanguage(buildLanguage) {
return new Promise(accept => {
console.log("Language building" + buildLanguage);
const languagePath = path.resolve(`./src/i18n/messages.${buildLanguage}.xlf`);
const languageFile = fs.existsSync(languagePath) ? buildLanguage : "en";
if (languageFile !== buildLanguage && !fs.existsSync(languagePath)) {
console.error(
"Language file does not exist." +
buildLanguage
);
}
exec.exec(`${folder} build --configuration=dynamic`, (error, stdout, stderror) => {
if (error) {
throw "Build failed: " + error;
}
accept(buildLanguage);
});
});
}
The above code is working fine when I have to build a language in a sequential order. And for a language to build it took almost 3-5 minutes. I am using 7 different languages for now in the application.
My question is how I can call the method in parallel so that I can reduce the build time.
So far i have tried Promise.all() in the method buildAllLanguageVersion() to call the method buildingLanguage() in parallel, but that didn't help. The methods are getting called at a time(in parallel) but the build is getting failed with below error.
14:31:17 D:\ls\workspace\Generic_commit\APP\app-name\tools\build-all.js:129
14:31:17 throw "Build failed: " + error;
14:31:17 ^
14:31:17 Build failed: Error: Command failed: D:\ls\workspace\Generic_commit\APP\app-name\node_modules\.bin\ng.cmd build --configuration=dynamic
14:31:17
14:31:17 ERROR in ngcc is already running at process with id 5252.
14:31:17 If you are running multiple builds in parallel then you should pre-process your node_modules via the command line ngcc tool before starting the builds;
14:31:17 See https://v9.angular.io/guide/ivy#speeding-up-ngcc-compilation.
14:31:17 (If you are sure no ngcc process is running then you should delete the lock-file at D:/ls/workspace/Generic_commit/APP/app-name/node_modules/#angular/compiler-cli/ngcc/_ngcc_lock_file_.)
I tried removing the _ngcc_lock_file_ but that didn't help.
I am learning clustering in NodeJS. I have two tasks one is node-sass and another one is uglifyjs which I want to run by two different workers using cluster in nodeJS. Though the code is working file and creating the SASS -> CSS file and main.js to main.min.js file.
But I am not sure whether it is handled by separate workers or not. Let me know where I can make the amendments to make -
SASS -> CSS handled by one worker
UglifyJS task by second worker
Once both tasks complete Master console a successful message
Following is my code:
const cluster = require('cluster');
const http = require('http');
const numCPUs = require('os').cpus().length;
var fs = require('fs');
var UglifyJS = require("uglify-js");
var sass = require('node-sass');
if (cluster.isMaster) {
console.log(`Master ${process.pid} is running`);
// Fork workers.
for (let i = 0; i < 2; i++) {
cluster.fork();
}
cluster.on('exit', (worker, code, signal) => {
console.log(`worker ${worker.process.pid} died`);
});
} else {
var result = UglifyJS.minify("js/main.js");
fs.writeFile(__dirname + '/js/main.min.js', result.code, function(err){
if(err)
throw err;
});
sass.render({
file: './css/main.scss',
outFile: 'css',
}, function(err, result) {
if(err)
throw err;
fs.writeFile(__dirname + '/css/main.css', result.css, function(err){
if(err)
throw err;
});
});
console.log(`Worker ${process.pid} started`);
}
I think this will help
const cluster = require('cluster');
const http = require('http');
const numCPUs = require('os').cpus().length;
var fs = require('fs');
var UglifyJS = require("uglify-js");
var sass = require('node-sass');
if (cluster.isMaster) {
console.log(`Master ${process.pid} is running`);
cluster.fork()
.on('exit', (worker, code, signal) => {
console.log(`worker ${worker.process.pid} died`);
});
cluster.fork()
.on('exit', (worker, code, signal) => {
console.log(`worker ${worker.process.pid} died`);
});
} else if (cluster.worker.id === 1) {
console.log(`Worker ${process.pid} started`);
sass.render({
file: './css/main.scss',
outFile: 'css',
}, function (err, result) {
if (err)
throw err;
fs.writeFile(__dirname + '/css/main.css', result.css, function (err) {
if (err)
throw err;
});
});
process.exit();
} else {
var result = UglifyJS.minify("js/main.js");
fs.writeFile(__dirname + '/js/main.min.js', result.code, function (err) {
if (err)
throw err;
});
process.exit();
}
In the cluster --> master-slave scenarios the real basic code, as in many parts of the original Node.js structure, is sometimes a bit more complicated than just declaring a master and slave. Here is one of the cases where I would strongly advise a couple of hours searching NPM and finding a module that will work for your schema. I have tested cluster-master but in your case you may actually need more than one NPM module. It would be well to keep in mind that clusters usually mean cores where you are going to fork -- above code cluster.fork();.
You want to implement the cluster master-worker paradigm correctly and you want a return from each worker and know the process is running as you think it should. Either that means delving deep into the Node.js cluster documentation and implementation, or researching the various NPM modules available which will usually obfuscate the hard work for you.
For poc part I tried node cluster and pm2, pm2 looks very easy to setup. pm2 also can keep the node project running in background.
Add pm2 command in your build script or just try this and see how it works
pm2 start app.js -i max
Refer
http://pm2.keymetrics.io/docs/usage/cluster-mode/
I'm trying to push to my remote repository using the gulp-git module from npm. The add & commit portion runs fine, but it runs into a stream error when trying to perform the remote push.
bump: function () {
var branch = argv.branch || 'development';
fs.readFile('./package.json', function (err, data) {
if (err) { return ; }
return gulp.src(['./package.json', './bower.json'])
.pipe(git.add())
.pipe(git.commit('chore(core): bump to ' + JSON.parse(data).version))
.pipe(git.push('origin', branch, function(err) {
if(err) throw (err);
}));
});
}
The stack trace:
C:\src\git\ig\node_modules\gulp-git\node_modules\through2\node_modules\readable-stream\lib_stream_readable.js:623
var written = dest.write(chunk);
^
TypeError: undefined is not a function
at write (C:\src\git\ig\node_modules\gulp-git\node_modules\through2\node_modules\readable-stream\lib_stream_readable.js:623:24)
at flow (C:\src\git\ig\node_modules\gulp-git\node_modules\through2\node_modules\readable-stream\lib_stream_readable.js:632:7)
at DestroyableTransform.pipeOnReadable (C:\src\git\ig\node_modules\gulp-git\node_modules\through2\node_modules\readable-stream\lib_stream_readable.js:664:5)
at DestroyableTransform.emit (events.js:104:17)
at emitReadable_ (C:\src\git\ig\node_modules\gulp-git\node_modules\through2\node_modules\readable-stream\lib_stream_readable.js:448:10)
at emitReadable (C:\src\git\ig\node_modules\gulp-git\node_modules\through2\node_modules\readable-stream\lib_stream_readable.js:444:5)
at readableAddChunk (C:\src\git\ig\node_modules\gulp-git\node_modules\through2\node_modules\readable-stream\lib_stream_readable.js:187:9)
at DestroyableTransform.Readable.push (C:\src\git\ig\node_modules\gulp-git\node_modules\through2\node_modules\readable-stream\lib_stream_readable.js:149:10)
at DestroyableTransform.Transform.push (C:\src\git\ig\node_modules\gulp-git\node_modules\through2\node_modules\readable-stream\lib_stream_transform.js:145:32)
at Array.forEach (native)
I'm running gulp-git version 1.6.0. It looks like they are at 1.7.0. Maybe the upgrade path would help however this seems like a pretty standard usage of the command, so I think it's something I'm doing wrong.
With help from stevelacy (the project admin) I was able to make it work with this code change:
.pipe(git.commit('chore(core): bump to ' + JSON.parse(data).version))
.on('end', function() {
git.push('origin', branch, function(err) {
if(err) throw (err);
});
});
It turns out that the git push command cannot be done from a stream as of yet.
Update: Seems like this is a bug in gulp-protractor. On their github page they filed it as a bug and will take a look into it. Source: https://github.com/mllrsohn/gulp-protractor/issues/64
Only possible workaround you can do until the bug is resolved is change the directory of your project to something that doesn't include spaces.
So I'm trying to get an Aurelia project started including front end unit testing. Here is where the problem starts. When I try to run the e2e gulp task I get the following error:
[10:45:44] using gulpfile ~\Documents\Visual Studio 2013\Projects\ProjectX\ProjectX\gulpfile.js
[10:45:44] Starting 'build-e2e'...
[10:45:44] Finished 'build-e2e' after 207 ms
[10:45:44] Starting 'e2e'...
'C:\Users\jorisd\Documents\Visual' is not recognized as an internal or external command, operable program or batch file.
C:\Users\jorisd\Documents\Visual Studio 2013\Projects\ProjectX\ProjectX\build\tasks\e2e.js:34
.on('error', function(e) {throw e; });
Error: protractor exited with code 1
Basically it's the highlighted code that has the problem. Since my path includes a space, it'll stop there for some reason.
Here's how my e2e.js file looks like right now:
var gulp = require('gulp');
var paths = require('../paths');
var to5 = require('gulp-babel');
var plumber = require('gulp-plumber');
var webdriverUpdate = require('gulp-protractor').webdriver_update;
var webdriverStandalone = require("gulp-protractor").webdriver_standalone;
var protractor = require('gulp-protractor').protractor;
// for full documentation of gulp-protractor,
// please check https://github.com/mllrsohn/gulp-protractor
gulp.task('webdriver-update', webdriverUpdate);
gulp.task('webdriver-standalone', ['webdriver-update'], webdriverStandalone);
// transpiles files in
// /test/e2e/src/ from es6 to es5
// then copies them to test/e2e/dist/
gulp.task('build-e2e', function() {
return gulp.src(paths.e2eSpecsSrc)
.pipe(plumber())
.pipe(to5())
.pipe(gulp.dest(paths.e2eSpecsDist));
});
// runs build-e2e task
// then runs end to end tasks
// using Protractor: http://angular.github.io/protractor/
gulp.task('e2e', ['build-e2e'], function(cb) {
return gulp.src(paths.e2eSpecsDist + '/*.js')
.pipe(protractor({
configFile: '/protractor.conf.js',
args: ['--baseUrl', 'http://127.0.0.1:9000']
}))
.on('end', function() { process.exit(); })
.on('error', function(e) { throw e; });
});
The problem is situating in the e2e task with the configFile option.
I tried change the line into the following:
configFIle: __dirname + '/protractor.conf.js',
But this aswell without result. If any of you know a workaround for including spaces in the configFile path, I'll be happy to hear it.
For me its working fine.
var angularProtractor = require('gulp-angular-protractor');
gulp.task('test', function (callback) {
gulp
.src([__dirname+'/public/apps/adminapp/**/test/**_test.js'])
.pipe(angularProtractor({
'configFile': 'public/apps/adminapp/app.test.config.js',
'debug': false,
'args': ['--suite', 'adminapp'],
'autoStartStopServer': true
}))
.on('error', function(e) {
console.log(e);
})
.on('end',callback);
});
I am creating a gulp task which might fail under certain circumstances.
gulp.task('favicon', function () {
try {
require('child_process').execSync('icotool --version');
} catch( e ) {
var err = new Error( 'Unix bash and icotool required for generating favicon' );
throw err;
}
return gulp.src('', {read: false})
.pipe(shell([
'./generate-favicon.sh'
]));
});
When running my task via gulp and running into the error, the error will be presented rather ugly.
I would like to present the error in a way as it is done by e.g. jslint gulp-util's PluginError.
It actually works to just create a PluginError there and throw it but that doesn't seem quite right.
Another solution not that nice would be to set
err.showStack = false;
for at least a little nicer error output. A gulp.task.Error would be nice.
From what I've seen its not great to throw an error from gulp. But I found this blog entry that I used to work for me.
http://gotofritz.net/blog/geekery/how-to-generate-error-in-gulp-task/
Edit: gulp-util has been deprecated. Instead, use the plugin-error package.
My Example:
var gulp = require('gulp');
var error = require('plugin-error');
gulp.task('deploy', function(cb) {
if(typeof(specialId) === 'undefined') {
var err = new PluginError({
plugin: 'deploy',
message: 'specialId is empty.'
});
}
}