I am learning clustering in NodeJS. I have two tasks one is node-sass and another one is uglifyjs which I want to run by two different workers using cluster in nodeJS. Though the code is working file and creating the SASS -> CSS file and main.js to main.min.js file.
But I am not sure whether it is handled by separate workers or not. Let me know where I can make the amendments to make -
SASS -> CSS handled by one worker
UglifyJS task by second worker
Once both tasks complete Master console a successful message
Following is my code:
const cluster = require('cluster');
const http = require('http');
const numCPUs = require('os').cpus().length;
var fs = require('fs');
var UglifyJS = require("uglify-js");
var sass = require('node-sass');
if (cluster.isMaster) {
console.log(`Master ${process.pid} is running`);
// Fork workers.
for (let i = 0; i < 2; i++) {
cluster.fork();
}
cluster.on('exit', (worker, code, signal) => {
console.log(`worker ${worker.process.pid} died`);
});
} else {
var result = UglifyJS.minify("js/main.js");
fs.writeFile(__dirname + '/js/main.min.js', result.code, function(err){
if(err)
throw err;
});
sass.render({
file: './css/main.scss',
outFile: 'css',
}, function(err, result) {
if(err)
throw err;
fs.writeFile(__dirname + '/css/main.css', result.css, function(err){
if(err)
throw err;
});
});
console.log(`Worker ${process.pid} started`);
}
I think this will help
const cluster = require('cluster');
const http = require('http');
const numCPUs = require('os').cpus().length;
var fs = require('fs');
var UglifyJS = require("uglify-js");
var sass = require('node-sass');
if (cluster.isMaster) {
console.log(`Master ${process.pid} is running`);
cluster.fork()
.on('exit', (worker, code, signal) => {
console.log(`worker ${worker.process.pid} died`);
});
cluster.fork()
.on('exit', (worker, code, signal) => {
console.log(`worker ${worker.process.pid} died`);
});
} else if (cluster.worker.id === 1) {
console.log(`Worker ${process.pid} started`);
sass.render({
file: './css/main.scss',
outFile: 'css',
}, function (err, result) {
if (err)
throw err;
fs.writeFile(__dirname + '/css/main.css', result.css, function (err) {
if (err)
throw err;
});
});
process.exit();
} else {
var result = UglifyJS.minify("js/main.js");
fs.writeFile(__dirname + '/js/main.min.js', result.code, function (err) {
if (err)
throw err;
});
process.exit();
}
In the cluster --> master-slave scenarios the real basic code, as in many parts of the original Node.js structure, is sometimes a bit more complicated than just declaring a master and slave. Here is one of the cases where I would strongly advise a couple of hours searching NPM and finding a module that will work for your schema. I have tested cluster-master but in your case you may actually need more than one NPM module. It would be well to keep in mind that clusters usually mean cores where you are going to fork -- above code cluster.fork();.
You want to implement the cluster master-worker paradigm correctly and you want a return from each worker and know the process is running as you think it should. Either that means delving deep into the Node.js cluster documentation and implementation, or researching the various NPM modules available which will usually obfuscate the hard work for you.
For poc part I tried node cluster and pm2, pm2 looks very easy to setup. pm2 also can keep the node project running in background.
Add pm2 command in your build script or just try this and see how it works
pm2 start app.js -i max
Refer
http://pm2.keymetrics.io/docs/usage/cluster-mode/
Related
Anyone know how to run Node Cluster on windows? I haven't been able to find any articles on the web and cannot seem to solve this problem:
events.js:160
throw er; // Unhandled 'error' event
^
Error: write ENOTSUP
at exports._errnoException (util.js:1007:11)
at ChildProcess.target._send (internal/child_process.js:634:20)
at ChildProcess.target.send (internal/child_process.js:521:19)
at sendHelper (cluster.js:751:15)
at send (cluster.js:534:12)
at cluster.js:509:7
at SharedHandle.add (cluster.js:99:3)
at queryServer (cluster.js:501:12)
at Worker.onmessage (cluster.js:449:7)
at ChildProcess.<anonymous> (cluster.js:765:8)
And the code...
if (cluster.isMaster) {
for (let i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('online', (worker) => {
console.log('Worker ' + worker.process.pid + ' is online');
});
cluster.on('exit', (worker, code, signal) => {
console.log(`Worker ${worker.process.pid} died with code ${code} and signal ${signal}`);
});
} else {
console.log('else part ');
openPort();
}
function openPort() {
let server = dgram.createSocket('udp4');
server.bind(port, host);
server.on('message', processMessage);
}
Support for UDP clustering was added in v0.11.14 (for Linux and OSX).
Check file on node.js master, which says "dgram clustering is currently not supported on windows"
In the current node js version I am using below code to create cluster on windows.
var cluster = require('cluster');
var numCPUs = require('os').cpus().length;
if (cluster.isMaster) {
// Fork workers.
for (var i = 0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('exit', function(worker, code, signal) {
console.log("worker ${worker.process.pid} died");
cluster.fork();
});
} else {
var express = require('express');
var http = require('http');
// init app
var app = express();
function createServer(app) {
return http.createServer(app);
}
app.locals.server = createServer(app);
app.locals.server.listen(port, function() {
console.info("server online");
});
}
This will create clusters on same port.
So, in order to use UDP with Node cluster on Windows, you have to call server.bind like this:
server.bind({port: 1900, exclusive: true}, function () {
console.log('PORT BIND SUCCESS');
server.setBroadcast(true);
server.setMulticastTTL(128);
server.addMembership(multicastAddress, myIp);
});
The key part is to pass in the object {port: PORT, exclusive: true} to the bind function. I found the answer here: https://github.com/misterdjules/node/commit/1a87a95d3d7ccc67fd74145c6f6714186e56f571
This gulp task hangs on exec('node config/app') line. first exec works fine but the second just hangs.
gulp.task('test', function(cb) {
var exec = require('child_process').exec;
exec('echo 3', function(err, stdout) {
console.log(stdout);
});
exec('node config/app', function(err, stdout, stderr) {
console.log(stdout);
var testemOptions = {
file: 'testem.json'
};
var t = new testem();
return t.startCI(testemOptions, function() {
cb();
});
});
});
I can see the output 3 but no output is shown for the second console.log.
I am trying to run my server before running the tests with testem.
I've tried this similar solution but it doesn't work: Exec not returning anything when trying to run git shortlog with nodejs.
Also I've recently asked a hanging testem gulp task question: Testem gulp task hangs after finished.
Edit:
My current solution is:
gulp.task('test', /*['build'],*/ function(cb) {
var spawn = require('child_process').spawn;
var proc = spawn('node', ['config/app']);
proc.stdout.on('readable', function() {
var output = proc.stdout.read();
if (output && output.toString().match('express listening')) {
var testemOptions = {
file: 'testem.json'
};
var t = new testem();
t.startCI(testemOptions, function() {
proc.kill();
cb();
});
}
});
});
If you want to use testem to test the "node config/app" server, you cannot use exec.
Exec is supposed to callback when the command is finished so in your case it will never callback.
try with
gulp.task('test', function(cb) {
var spawn = require('child_process').spawn;
var proc = spawn('node', ['config/app']);
var testStarted = false;
proc.stdout.on('readable', function() {
if (testStarted) return;
testStarted = true;
var testemOptions = {
file: 'testem.json'
};
var t = new testem();
t.startCI(testemOptions, function() {
proc.kill()
cb();
});
}
});
Note that I did not test this code and that it probably does not handle all the corner cases you might encounter (if the server stops unexpectedly for example)
you may also want to check the plugin https://github.com/sargentsurg/gulp-testem
There is ŧestem plugin on github.
I'm building a Yeoman generator and after it has finished I want to perform some command line actions like 'npm install', 'bower install' and 'grunt less'. I'm using spawnCommand for this and I nested all actions using event listeners to perform them synchronously. However, to avoid this endless nesting, I'm looking for a cleaner implementation, to make it easily expandable.
Perfectly, I would like to have an array with commands (like ['npm install', 'grunt install', 'less:dev']) and have this processed synchronously with proper error detection.
// Install npm packages
this.spawnCommand('npm', ['install'])
.on('exit', function (err) {
if (err) {
this.log.error('npm package installation failed. Please run \'npm install\' and \'bower install\'. Error: ' + err);
} else {
// Install bower packages
this.spawnCommand('bower', ['install'])
.on('exit', function (err) {
if (err) {
this.log.error('bower package installation failed. Please run \'bower install\'. Error: ' + err);
} else {
this.spawnCommand('grunt', ['less'])
.on('exit', function (err) {
if (err) {
this.log.error('Less compilation failed. Please run \'grunt less:dev\'. Error: ' + err);
} else {
}
}.bind(this));
}
}.bind(this));
}
}.bind(this));
Something like this? (untested though):
this.processTask = function (task) {
this.spawnCommand(task.cmd, task.args)
.on('exit', function (err) {
if (err) {
this.log.error('task failed. Error: ' + err);
} else {
this.emit('nextTask');
}
});
};
this.on('nextTask' function(){
var next = this.tasks.shift();
if (next){
this.processTask(next);
} else {
console.log('we are done');
}
}.bind(this));
//preparing the list of tasks:
this.tasks = [];
this.tasks.push({cmd: 'npm', args:['install']});
this.tasks.push({cmd: 'bower', args:['install']});
this.tasks.push({cmd: 'grunt', args:['less']});
//start first task
this.processTask(this.tasks.shift());
I used execSync from Node.js and it seems to work, eg:
var child_process = require('child_process');
var result = execSync('grunt less');
Node.js 0.12 and io.js 1.10 support execSync:
child_process.execSync(command[, options])
and returns, "Buffer|String The stdout from the command", which may be an error code.
API documentation.
The back story about the synchronous API.
You can make a script like init.sh and put your commands that need to be run in order in it, like:
#!/usr/bin/env bash
npm install
your-funky-command
gulp something-special
gulp
...then wherever you need to put the spawnCommand code (I do it in end method), add somehting like this:
var done = this.async();
this.spawnCommand('sh', ['init.sh'], /* maybe cwd? {cwd: 'src'} */)
.on('close', done);
Ain't pretty or anything but it works, and it's obvious.
Optionally, if you need one command to only run if the prev succeeded, do this:
#!/usr/bin/env bash
npm install \
&& your-funky-command \
&& gulp something-special \
&& gulp
(Bonus advantage is that now your app init logic is no longer tied to Yo.)
This are my first steps with node.js. I will watch a directory with chokidar for added files. If the copy process is finished a script should be called. But I don't know how can I identify when the copy process is finished and the file is complete available on my directory. The console log command with the word finish never appear.
var fs = require('fs');
var chokidar = require('chokidar');
var watcher = chokidar.watch('/root/Documents/gw/', {ignored: /^\./, persistent: true});
watcher
.on('add', function(path) {
console.log('File', path, 'has been added');
fs.watchFile(path, function(curr, prev) {
if (curr.size == prev.size) {
console.log('finish');
// TODO start a shell script
} else {
console.log(curr.size);
}
});
});
I use node.js version 0.10.25 on a Linux system.
Thanks in advance for help!
The reason your test will never be positive is that on a simple copy operation fs.watchFile won't generate an event where the previous size is going to be equal to the current size. For this to happen, there would have to be a change to the file, because fs.watchFile fire only on changes. But in a straightforward copy operation there typically won't be a file change occurring where the previous size and the current size are the same.
Here's a solution that detects when a file has stopped changing after a set timeout:
var fs = require('fs');
var chokidar = require('chokidar');
var watcher = chokidar.watch('dir', {ignored: /^\./, persistent: true});
var end_timeout = 30000;
watcher
.on('add', function(path) {
console.log('File', path, 'has been added');
fs.stat(path, function (err, stat) {
// Replace error checking with something appropriate for your app.
if (err) throw err;
setTimeout(checkEnd, end_timeout, path, stat);
});
});
function checkEnd(path, prev) {
fs.stat(path, function (err, stat) {
// Replace error checking with something appropriate for your app.
if (err) throw err;
if (stat.mtime.getTime() === prev.mtime.getTime()) {
console.log("finished");
// Move on: call whatever needs to be called to process the file.
}
else
setTimeout(checkEnd, end_timeout, path, stat);
});
}
Note that if the directory is prepopulated with files, this code will start watching them right away. I can imagine use-case scenarios where this is desirable and some where it is not.
I'm trying to use Node.js to create a zip file from an existing folder, and preserve the structure.
I was hoping there would be a simple module to allow this kind of thing:
archiver.create("../folder", function(zipFile){
console.log('et viola');
});
but I can't find anything of the sort!
I've been googling around, and the best I've found so far is zipstream, but as far as I can tell there's no way to do what I want. I don't really want to call into commandline utilities, as the the app has to be cross platform.
Any help would be greatly appreciated.
Thanks.
It's not entirely code free, but you can use node-native-zip in conjunction with folder.js. Usage:
function zipUpAFolder (dir, callback) {
var archive = new zip();
// map all files in the approot thru this function
folder.mapAllFiles(dir, function (path, stats, callback) {
// prepare for the .addFiles function
callback({
name: path.replace(dir, "").substr(1),
path: path
});
}, function (err, data) {
if (err) return callback(err);
// add the files to the zip
archive.addFiles(data, function (err) {
if (err) return callback(err);
// write the zip file
fs.writeFile(dir + ".zip", archive.toBuffer(), function (err) {
if (err) return callback(err);
callback(null, dir + ".zip");
});
});
});
}
This can be done even simpler using node's built-in execfile function. It spawns a process and executes the zip command through the os, natively. Everything just works.
var execFile = require('child_process').execFile;
execFile('zip', ['-r', '-j', zipName, pathToFolder], function(err, stdout) {
console.log(err);
logZipFile(localPath);
});
The -j flag 'junks' the file path, if you are zipping a sibdirectory, and don't want excessive nesting within the zip file.
Here's some documentation on execfile.
Here's a man page for zip.
Using Easy-zip, npm install easy-zip, you can do:
var zip5 = new EasyZip();
zip5.zipFolder('../easy-zip',function(){
zip5.writeToFile('folderall.zip');
});