I'm learning node, its events API and trying to make a simple example.
So here's my code:
var fs = require('fs');
var util = require('util');
var events = require('events');
var FileLoader = function () {
events.EventEmitter.call(this);
this.load = function (url) {
fs.readFile(url, function (err, data) {
if (err) {
throw err;
} else {
this.emit('loaded', data.toString());
}
});
};
};
util.inherits(FileLoader, events.EventEmitter);
module.exports = FileLoader;
And I want to load() a text file and when it is loaded, catch it with .on('loaded',function(){...}), but this is undefined, so program crashes.
I'm definitely missing something, how to make it work?
This is not an issue with Node, it is an issue with JavaScript. The this in this.emit is not a FileLoader instance.
I recommend you read up on the behavior of this in JavaScript. The MDN docs for this may be helpful for you.
In this case, you need to bind the this inside your readFile callback so that the inner this is the outer this by adding .bind(this) to your callback.
this.load = function (url) {
fs.readFile(url, function (err, data) {
if (err) {
throw err;
} else {
this.emit('loaded', data.toString());
}
}.bind(this));
};
Related
This is my module1:
var fs = require('fs');
var obj;
exports.module1= function(ret)
{
fs.readFile('source.json', 'utf8', function (err, data)
{
if (err) {
return console.error(err);
}
obj=JSON.parse(data);
console.log(obj);
return obj;
});
}
Module2:
var module1 = require('./module1.js');
var obj=module1.module1();
var callback = function () {
console.log(obj);
};
setTimeout(callback, 10000);
The obj of module2 is not getting updated with returned value of module1. I am newbie btw.
I believe ur problem is that fs.readFile is an async call and its return value will not be passed to the obj defined in Module2.
Just for reference u may pass an callback function to module1's export and then call when the file reading completes.
Module1:
var fs = require('fs');
var obj;
exports.module1= function(callback)
{
fs.readFile('source.json', 'utf8', function (err, data)
{
if (err) {
return console.error(err);
}
obj=JSON.parse(data);
console.log(obj);
callback(obj)
});
}
Module2:
var module1 = require('./module1.js');
var obj;
module1.module1(function(result){
obj = result;
});
var callback = function () {
console.log(obj);
};
setTimeout(callback, 10000);
You can share the object by passing it the the global context, which means the object will be usable from any file at any time, but the main JavaScript code (which requires the module) will be able to access it too. If you don't want it to access the code, comment this post and I'll make a script to do that.
The most simple way is to share the object :
global.__module_object = obj;
And anywhere you'll be able to access the object by doing global.__module_object.data = "Hello world !"; for example.
I use the following code which is working OK
var ncp = require('ncp').ncp;
function load(folderPath) {
ncp.limit = 16;
var path = require('path');
var localPath = path.join(__dirname, '../pl');
ncp(folderPath, localPath, {dereference: false}, function (err) {
if (err) {
return console.error(err);
}
console.log('done to save the files!');
});
};
I want to use promise instead of callback but when using the following I got error
var Promise = require('bluebird');
var ncp = Promise.promisifyAll(require('ncp').ncp);
function load(folderPath) {
ncp.limit = 16;
var localPath = path.join(__dirname, '../pl');
ncp(folderPath, localPath, {dereference: false})
.then(function (result) {
console.log('done to save the files!');
})
.catch(function (err) {
console.err(err);
});
};
The error is :
TypeError: Cannot read property 'then' of undefined
Promise.promisifyAll() is used to promisify objects. It will iterate the object and promisify all of the function properties on that object.
However, ncp is a function itself, it doesn't have properties, so promisifyAll() won't do. You're looking for promisify() which takes a function as an argument, and returns a promisified function.
So all you need to do is change
var ncp = Promise.promisifyAll(require('ncp').ncp);
to
var ncp = Promise.promisify(require('ncp').ncp);
I've used the following code to call two modules, but the invoke action is called before the validate file (I saw in debug). What I should do to verify that validateFile is called before appHandler.invokeAction? Should I use a promise?
var validator = require('../uti/valid').validateFile();
var appHandler = require('../contr/Handler');
appHandler.invokeAction(req, res);
Update
this is the validate file code
var called = false;
var glob = require('glob'),
fs = require('fs');
module.exports = {
validateFile: function () {
glob("myfolder/*.json", function (err, files) {
var stack = [];
files.forEach(function (file) {
fs.readFile(file, 'utf8', function (err, data) { // Read each file
if (err) {
console.log("cannot read the file", err);
}
var obj = JSON.parse(data);
obj.action.forEach(function (crud) {
for (var k in crud) {
if (_inArray(crud[k].path, stack)) {
console.log("duplicate founded!" + crud[k].path);
break;
}
stack.push(crud[k].path);
}
})
});
});
});
}
};
Because glob and fs.readFile are async functions and appHandler.invokeAction is invoked during i/o from disk.
Promise is a good solution to solve this but an old school callback could do the job.
validator.validateFile().then(function() {
appHandler.invokeAction(req, res);
});
and for validate
var Promise = require("bluebird"), // not required if you are using iojs or running node with `--harmony`
glob = require('mz/glob'),
fs = require('mz/fs');
module.exports = {
validateFile: function () {
return glob("myfolder/*.json").then(function(files) {
return Promise.all(files.map(function(file) {
// will return an array of promises, if any of them
// is rejected, validateFile promise will be rejected
return fs.readFile(file).then(function (content) {
// throw new Error(''); if content is not valid
});
}));
})
}
};
If you want working with promise mz could help :)
As the fs.fileRead is async, you should put the code that you want to execute after validateFile to its callback.
The origin could be:
var validator = require('../uti/valid').validateFile();
var appHandler = require('../contr/Handler');
// create a new function that when execute, will call appHandler.invokeAction with req and res given to its arguments.
validator.validateFile(appHandler.invokeAction.bind(null, req, res));
The validator part should be:
var called = false;
var glob = require('glob'),
fs = require('fs');
module.exports = {
validateFile: function (callback) {
glob("myfolder/*.json", function (err, files) {
var stack = [];
// Use it to decide whether all files processed or not.
var filesToLoad = files.length;
files.forEach(function (file) {
fs.readFile(file, 'utf8', function (err, data) { // Read each file
--filesToLoad;
if (err) {
console.log("cannot read the file", err);
// If the invoke action doesn't depend on the result. You may want to call it here too.
}
var obj = JSON.parse(data);
obj.action.forEach(function (crud) {
for (var k in crud) {
if (_inArray(crud[k].path, stack)) {
console.log("duplicate founded!" + crud[k].path);
break;
}
stack.push(crud[k].path);
}
})
// Only called the callback after all files processed.
if (filesToLoad === 0) {
callback();
}
});
});
});
}
};
Edit: Thanks for Bergi's remind that there's the files are an array and you have to call the callback when all files is processed, so we have to further use a variable to decide how many files are not processed yet.
I'm using the async module's forEachOf method to print the end result after iterating through an object. Here is a shortened version of what I'm doing:
var async = require('async'),
cheerio = require('cheerio'),
request = require('request');
var returnArray = [];
async.forEachOf(myObj, function (value, key, callback) {
var anotherObj = {};
anotherObj.type = "val1";
request(someurl, function(err, res, body) {
if (err) {
return callback(err);
}
var $ = cheerio.load(body);
anotherObj.name = "val2";
var miniObj = {};
$('#some-element', "#an-id").each(function(i, value) {
var val = value.value;
miniObj[size] = val;
});
anotherObj.miniObj = miniObj;
returnArray.push(anotherObj);
return callback();
});
}, function (err) {
if (err) {
console.error(err.message);
}
console.log(returnArray);
});
However, when I run the program, nothing (namely, 'returnArray') gets printed to the console like it should be.
For reference, I have looked at these other, similar posts:
Using async module to fire a callback once all files are read (seems outdated, is using the wrong method, and doesn't address the issue)
Last callback not being called using async (doesn't address the issue)
I'm not sure what I'm doing wrong here. Could anyone please point out what I'm doing wrong?
Thanks!
EDIT: So I think I finally figured out what I was doing wrong. In a different example I provided earlier HERE, using Runnable I forgot to add a 'return callback()' statement. Then it worked. The only difference between that and this example being that my 'return callback()' statement in the above-provided example is itself called within another asynchronous method. I think in order to fix my problem I will somehow have to make sure (probably using some control flow function in async's library) to call 'return callback()' at the correct 'scope/level' after the second asynchronous method has finished. I think I will attribute this, my mistake, to the lack of documentation on proper usage of the 'return callback()' statement in the async docs. I will update this post with a solution once I figure it out (and catch up on some sleep)!
Your statement:
if (err) {
return callback(err);
}
is not valid for asynchronous programming. Instead, you should do:
if(err) callback(err);
This is why you aren't getting anything returned. I rewrote your code with async concepts applied:
var async = require('async'),
var cheerio = require('cheerio'),
var request = require('request');
var returnArray = [];
async.forEachOf(myObj, function (value, key, next) {
var anotherObj = {};
anotherObj.type = "val1";
request(someurl, function(err, res, body) {
if (err) next(err);
var $ = cheerio.load(body);
anotherObj.name = "val2";
var miniObj = {};
async.each($('#some-element', "#an-id"), function (value, next) {
var val = value.value;
miniObj[size] = val;
});
anotherObj.miniObj = miniObj;
returnArray.push(anotherObj);
next();
});
}, function (err) {
if (err) console.error(err.message);
console.log(returnArray);
callback(returnArray);
});
Notice that you have two different named callbacks. The outer function callback is called callback. The inner function callback is called next.
I've been trying to wrap my head around this issue for the last hours but can't figure it out. I guess I still have to get used to the functional programming style ;)
I wrote a recursive function that traverses through a directory structure and does things to certain files. This functions uses the asynchronous IO methods. Now I want to perform some action when this whole traversing is done.
How would I make sure that this action is performed after all parse calls have been performed but still use the asynchronous IO functions?
var fs = require('fs'),
path = require('path');
function parse(dir) {
fs.readdir(dir, function (err, files) {
if (err) {
console.error(err);
} else {
// f = filename, p = path
var each = function (f, p) {
return function (err, stats) {
if (err) {
console.error(err);
} else {
if (stats.isDirectory()) {
parse(p);
} else if (stats.isFile()) {
// do some stuff
}
}
};
};
var i;
for (i = 0; i < files.length; i++) {
var f = files[i];
var p = path.join(dir, f);
fs.stat(p, each(f, p));
}
}
});
}
parse('.');
// do some stuff here when async parse completely finished
Look for Step module. It can chain asynchronous functions calls and pass results from one to another.
You could use async module . Its auto function is awesome . If you have function A() and function B() and function C() . Both function B() and C() depend of function A() that is using value return from function A() . using async module function you could make sure that function B and C will execute only when function A execution is completed .
Ref : https://github.com/caolan/async
async.auto({
A: functionA(){//code here },
B: ['A',functionB(){//code here }],
C: ['A',functionC(){//code here }],
D: [ 'B','C',functionD(){//code here }]
}, function (err, results) {
//results is an array that contains the results of all the function defined and executed by async module
// if there is an error executing any of the function defined in the async then error will be sent to err and as soon as err will be produced execution of other function will be terminated
}
})
});
In above example functionB and functionC will execute together once function A execution will be completed . Thus functionB and functionC will be executed simultaneously
functionB: ['A',functionB(){//code here }]
In above line we are passing value return by functionA using 'A'
and functionD will be executed only when functionB and functionC execution will be completed .
if there will be error in any function , then execution of other function will be terminated and below function will be executed .where you could write your logic of success and failure .
function (err, results) {}
On succesfull execution of all function "results" will contain the result of all the functions defined in async.auto
function (err, results) {}
Take a look at modification of your original code which does what you want without async helper libs.
var fs = require('fs'),
path = require('path');
function do_stuff(name, cb)
{
console.log(name);
cb();
}
function parse(dir, cb) {
fs.readdir(dir, function (err, files) {
if (err) {
cb(err);
} else {
// cb_n creates a closure
// which counts its invocations and calls callback on nth
var n = files.length;
var cb_n = function(callback)
{
return function() {
--n || callback();
}
}
// inside 'each' we have exactly n cb_n(cb) calls
// when all files and dirs on current level are proccessed,
// parent cb is called
// f = filename, p = path
var each = function (f, p) {
return function (err, stats) {
if (err) {
cb(err);
} else {
if (stats.isDirectory()) {
parse(p, cb_n(cb));
} else if (stats.isFile()) {
do_stuff(p+f, cb_n(cb));
// if do_stuff does not have async
// calls inself it might be easier
// to replace line above with
// do_stuff(p+f); cb_n(cb)();
}
}
};
};
var i;
for (i = 0; i < files.length; i++) {
var f = files[i];
var p = path.join(dir, f);
fs.stat(p, each(f, p));
}
}
});
}
parse('.', function()
{
// do some stuff here when async parse completely finished
console.log('done!!!');
});
Something like this would work -- basic change to your code is the loop turned into a recursive call that consumes a list until it is done. That makes it possible to add an outer callback (where you can do some processing after the parsing is done).
var fs = require('fs'),
path = require('path');
function parse(dir, cb) {
fs.readdir(dir, function (err, files) {
if (err)
cb(err);
else
handleFiles(dir, files, cb);
});
}
function handleFiles(dir, files, cb){
var file = files.shift();
if (file){
var p = path.join(dir, file);
fs.stat(p, function(err, stats){
if (err)
cb(err);
else{
if (stats.isDirectory())
parse(p, function(err){
if (err)
cb(err);
else
handleFiles(dir, files, cb);
});
else if (stats.isFile()){
console.log(p);
handleFiles(dir, files, cb);
}
}
})
} else {
cb();
}
}
parse('.', function(err){
if (err)
console.error(err);
else {
console.log('do something else');
}
});
See following solution, it uses deferred module:
var fs = require('fs')
, join = require('path').join
, promisify = require('deferred').promisify
, readdir = promisify(fs.readdir), stat = promisify(fs.stat);
function parse (dir) {
return readdir(dir).map(function (f) {
return stat(join(dir, f))(function (stats) {
if (stats.isDirectory()) {
return parse(dir);
} else {
// do some stuff
}
});
});
};
parse('.').done(function (result) {
// do some stuff here when async parse completely finished
});
I've been using syncrhonize.js with great success. There's even a pending pull request (which works quite well) to support async functions which have multiple parameters. Far better and easier to use than node-sync imho. Added bonus that it has easy-to-understand and thorough documentation, whereas node-sync does not.
Supports two different methods for wiring up the sync, a defered/await model (like what #Mariusz Nowak was suggesting) and a slimmer though not-as-granular function-target approach. The docs are pretty straightforward for each.
Recommend to use node-seq
https://github.com/substack/node-seq
installed by npm.
I'm using it, and I love it..
Look for node-sync, a simple library that allows you to call any asynchronous function in synchronous way. The main benefit is that it uses javascript-native design - Function.prototype.sync function, instead of heavy APIs which you'll need to learn. Also, asynchronous function which was called synchronously through node-sync doesn't blocks the whole process - it blocks only current thread!