Call to modules in specific order in node - javascript

I've used the following code to call two modules, but the invoke action is called before the validate file (I saw in debug). What I should do to verify that validateFile is called before appHandler.invokeAction? Should I use a promise?
var validator = require('../uti/valid').validateFile();
var appHandler = require('../contr/Handler');
appHandler.invokeAction(req, res);
Update
this is the validate file code
var called = false;
var glob = require('glob'),
fs = require('fs');
module.exports = {
validateFile: function () {
glob("myfolder/*.json", function (err, files) {
var stack = [];
files.forEach(function (file) {
fs.readFile(file, 'utf8', function (err, data) { // Read each file
if (err) {
console.log("cannot read the file", err);
}
var obj = JSON.parse(data);
obj.action.forEach(function (crud) {
for (var k in crud) {
if (_inArray(crud[k].path, stack)) {
console.log("duplicate founded!" + crud[k].path);
break;
}
stack.push(crud[k].path);
}
})
});
});
});
}
};

Because glob and fs.readFile are async functions and appHandler.invokeAction is invoked during i/o from disk.
Promise is a good solution to solve this but an old school callback could do the job.
validator.validateFile().then(function() {
appHandler.invokeAction(req, res);
});
and for validate
var Promise = require("bluebird"), // not required if you are using iojs or running node with `--harmony`
glob = require('mz/glob'),
fs = require('mz/fs');
module.exports = {
validateFile: function () {
return glob("myfolder/*.json").then(function(files) {
return Promise.all(files.map(function(file) {
// will return an array of promises, if any of them
// is rejected, validateFile promise will be rejected
return fs.readFile(file).then(function (content) {
// throw new Error(''); if content is not valid
});
}));
})
}
};
If you want working with promise mz could help :)

As the fs.fileRead is async, you should put the code that you want to execute after validateFile to its callback.
The origin could be:
var validator = require('../uti/valid').validateFile();
var appHandler = require('../contr/Handler');
// create a new function that when execute, will call appHandler.invokeAction with req and res given to its arguments.
validator.validateFile(appHandler.invokeAction.bind(null, req, res));
The validator part should be:
var called = false;
var glob = require('glob'),
fs = require('fs');
module.exports = {
validateFile: function (callback) {
glob("myfolder/*.json", function (err, files) {
var stack = [];
// Use it to decide whether all files processed or not.
var filesToLoad = files.length;
files.forEach(function (file) {
fs.readFile(file, 'utf8', function (err, data) { // Read each file
--filesToLoad;
if (err) {
console.log("cannot read the file", err);
// If the invoke action doesn't depend on the result. You may want to call it here too.
}
var obj = JSON.parse(data);
obj.action.forEach(function (crud) {
for (var k in crud) {
if (_inArray(crud[k].path, stack)) {
console.log("duplicate founded!" + crud[k].path);
break;
}
stack.push(crud[k].path);
}
})
// Only called the callback after all files processed.
if (filesToLoad === 0) {
callback();
}
});
});
});
}
};
Edit: Thanks for Bergi's remind that there's the files are an array and you have to call the callback when all files is processed, so we have to further use a variable to decide how many files are not processed yet.

Related

How to save the file path to a variable

I tried and have looked at StackOverflow and the other posts are not answering my questions. This is a unique question. How can I get the path of the most recently uploaded file saved to a variable so it can be used later?
Code:
var pathtocheck = "C:\Users\user1\Downloads";
var path = require('path');
var fs = require('fs');
var getMostRecent = function (dir, cb) {
var dir = path.resolve(dir);
var files = fs.readdir(dir, function (err, files) {
var sorted = files.map(function(v) {
var filepath = path.resolve(dir, v);
return {
name:v,
time:fs.statSync(filepath).mtime.getTime()
};
})
.sort(function(a, b) { return b.time - a.time; })
.map(function(v) { return v.name; });
if (sorted.length > 0) {
cb(null, sorted[0]);
} else {
cb('Y U NO have files in this dir?');
}
})
}
await getMostRecent(pathtocheck, function (err, recent) {
if (err) console.error(err);
console.log(recent);
});
var lastdownloadedimage = ;
With callbacks, you have to write your code in the callback passed to getMostRecent, so
// Note: awaiting this takes no effect unless getMostRecent returns a promise.
getMostRecent(pathtocheck, function (err, recent) {
if (err) console.error(err);
console.log(recent);
var lastdownloadedimage = recent;
// Or just directly use `recent`
});
Or,
Async-await and Promise can also solve your issue, though I'm not sure how much you're familiar with them.
You can use Promisified version of file system API in Node.js v10 or above, by require('fs').promises instead of require('fs') (Documentation here)
Decalration of the functions like this:
// Also it's a good practice to use `const` or `let`, instead of `var`
const pathtocheck = "C:\Users\user1\Downloads";
const path = require('path');
const fs = require('fs');
const fsp = require('fs').promises;
// Decalre the function with `async` to use `await` in it
const getMostRecent = async function (dir) {
dir = path.resolve(dir);
const files = await fsp.readdir(dir)
const sorted = files.map(function(v) {
const filepath = path.resolve(dir, v);
return {
name:v,
time:fs.statSync(filepath).mtime.getTime()
// maybe you can use fsPromises.stat here
};
})
.sort(function(a, b) { return b.time - a.time; })
.map(function(v) { return v.name; });
if (sorted.length > 0) {
return sorted[0];
} else {
// Now you have no callbacks, so there are two options to return the error state.
// 1. Throw an Error with an error message
// 2. Return a special value such as `null` or `false`, which you can track it.
}
}; // <-- perhaps you need place a semicolon here.
And you call the function in async IIFE, wrapping anonymous async function to use await
(async function() {
const lastdownloadedimage = await getMostRecent(pathtocheck);
console.log(lastdownloadedimage)
})();
Or use Promise.then:
getMostRecent(pathtocheck).then(function(recent) {
var lastdownloadedimage = recent;
// Again, you can just directly use `recent`
})

Async await in a node script

I'm trying to execute a node script that downloads some urls. It looks like this:
const https = require('https');
const fs = require('fs');
const async = require('async');
function download_cb(err) {
if (err) {
console.log("Download error:"+err);
}
}
function download_file(task) {
const url = task.url;
const dest = task.dest;
var file = fs.createWriteStream(dest);
var request = https.get(url, function(response) {
response.pipe(file);
file.on('finish', function() {
file.close(download_cb); // close() is async, call cb after close completes.
});
}).on('error', function(err) { // Handle errors
fs.unlink(dest); // Delete the file async. (But we don't check the result)
download_cb(err.message);
});
};
function get_urls() {
var queue = async.queue(download_file, 10);
const urls = []; // some array of urls
for (var i=0; i<urls.length; i++) {
queue.push({url: urls[i], dest: dest/*whatever*/});
}
return queue.drain();
}
(async () {
await get_urls().then(()=>{
console.log("All done");
})
})();
This ends up downloading only the first 10 urls and then exits, while the "All done" message is never shown. It somehow seems that the promise returned by the function (queue.drain()) is never resolved even though it's being awaited for. What am I missing?
I also tried:
queue.drain = function() {
console.log("All files are downloaded");
};
inside the get_urls function but it doesn't change anything and the code in it isn't get executed either.

Node.js emitter null data on callback

there is a function that I use to read all files in a directory and then sent an object with emitter to the client.
this is my code that works fine,
const getFilesList = (path, emitter) => {
fs.readdir(path, (err, files) => {
emitter('getFileList', files);
});
};
but when I want to filter hidden files with this code, the 'standardFolders' will send empty in the emitter.
const getFilesList = (path, emitter) => {
let standardFolders = [];
fs.readdir(path, (err, files) => {
if (files) {
files.map((file) => {
winattr.get(path + file, function (err, attrs) {
if (err == null && attrs.directory && (!attrs.hidden && !attrs.system)) {
standardFolders.push(file)
}
});
});
} else {
standardFolders = null;
}
emitter('getFileList', standardFolders);
});
};
what is wrong with my code in the second part?
winattr.get(filepath,callback) is asynchronous, so imagine your code "starts" the file.map() line and then immediately skips to emitter('getFileList',standardFolders) --- which standardFolders is empty because it hasn't finished yet!
You can use a library like async.io to handle your callback functions, or you can use a counter and keep track of when all of the callbacks (for each file) has finished yourself.
Example:
// an asynchronous function because setTimeout
function processor(v,cb){
let delay = Math.random()*2000+500;
console.log('delay',delay);
setTimeout(function(){
console.log('val',v);
cb(null,v);
},delay);
}
const main = function(){
const list = ['a','b','c','d'];
let processed = [];
let count = 0;
console.log('starting');
list.map(function(v,i,a){
console.log('calling processor');
processor(v,function(err,value){
processed.push(v);
count+=1;
console.log('count',count);
if(count>=list.length){
// all are finished, continue on here.
console.log('done');
}
})
})
console.log('not done yet!');
};
main();
Similarly, for your code:
const getFilesList = (path, emitter) => {
let standardFolders = [];
fs.readdir(path, (err, files) => {
if (files) {
let count = 0;
files.map((file) => {
winattr.get(path + file, function (err, attrs) {
if (err == null && attrs.directory && (!attrs.hidden && !attrs.system)) {
standardFolders.push(file)
}
count+=1;
if(count>=files.length){
// finally done
emitter('getFileList', standardFolders);
}
});
});
} else {
standardFolders = null;
emitter('getFileList', standardFolders);
}
});
};
As already sayed in the other answer winattr.get is async, so the loop finishes before any of the callbacks of winattr.get is called.
You could convert your code using async/await and primitify into a code that looks almost like a sync version, and you can completely get rid of the callbacks or counters
const {promisify} = require('util')
const readdir = promisify(require('fs').readdir)
const winattrget = promisify(require('winattr').get)
const getFilesList = async (path, emitter) => {
let standardFolders = [];
try {
let files = await readdir(path);
for (let file of files) {
try {
let attrs = await winattrget(path + file)
if (attrs.directory && (!attrs.hidden && !attrs.system)) {
standardFolders.push(file)
}
} catch (err) {
// do nothing if an error occurs
}
}
} catch (err) {
standardFolders = null;
}
emitter('getFileList', standardFolders);
};
An additional note: In your code you write files.map, but mapping is use to transform the values of a given array and store them in a new one, and this is not done in your current code, so in the given case you should use a forEach loop instead of map.

Error while using bluebird with ncp

I use the following code which is working OK
var ncp = require('ncp').ncp;
function load(folderPath) {
ncp.limit = 16;
var path = require('path');
var localPath = path.join(__dirname, '../pl');
ncp(folderPath, localPath, {dereference: false}, function (err) {
if (err) {
return console.error(err);
}
console.log('done to save the files!');
});
};
I want to use promise instead of callback but when using the following I got error
var Promise = require('bluebird');
var ncp = Promise.promisifyAll(require('ncp').ncp);
function load(folderPath) {
ncp.limit = 16;
var localPath = path.join(__dirname, '../pl');
ncp(folderPath, localPath, {dereference: false})
.then(function (result) {
console.log('done to save the files!');
})
.catch(function (err) {
console.err(err);
});
};
The error is :
TypeError: Cannot read property 'then' of undefined
Promise.promisifyAll() is used to promisify objects. It will iterate the object and promisify all of the function properties on that object.
However, ncp is a function itself, it doesn't have properties, so promisifyAll() won't do. You're looking for promisify() which takes a function as an argument, and returns a promisified function.
So all you need to do is change
var ncp = Promise.promisifyAll(require('ncp').ncp);
to
var ncp = Promise.promisify(require('ncp').ncp);

Executing asynchronous calls in a synchronous manner

I've been trying to wrap my head around this issue for the last hours but can't figure it out. I guess I still have to get used to the functional programming style ;)
I wrote a recursive function that traverses through a directory structure and does things to certain files. This functions uses the asynchronous IO methods. Now I want to perform some action when this whole traversing is done.
How would I make sure that this action is performed after all parse calls have been performed but still use the asynchronous IO functions?
var fs = require('fs'),
path = require('path');
function parse(dir) {
fs.readdir(dir, function (err, files) {
if (err) {
console.error(err);
} else {
// f = filename, p = path
var each = function (f, p) {
return function (err, stats) {
if (err) {
console.error(err);
} else {
if (stats.isDirectory()) {
parse(p);
} else if (stats.isFile()) {
// do some stuff
}
}
};
};
var i;
for (i = 0; i < files.length; i++) {
var f = files[i];
var p = path.join(dir, f);
fs.stat(p, each(f, p));
}
}
});
}
parse('.');
// do some stuff here when async parse completely finished
Look for Step module. It can chain asynchronous functions calls and pass results from one to another.
You could use async module . Its auto function is awesome . If you have function A() and function B() and function C() . Both function B() and C() depend of function A() that is using value return from function A() . using async module function you could make sure that function B and C will execute only when function A execution is completed .
Ref : https://github.com/caolan/async
async.auto({
A: functionA(){//code here },
B: ['A',functionB(){//code here }],
C: ['A',functionC(){//code here }],
D: [ 'B','C',functionD(){//code here }]
}, function (err, results) {
//results is an array that contains the results of all the function defined and executed by async module
// if there is an error executing any of the function defined in the async then error will be sent to err and as soon as err will be produced execution of other function will be terminated
}
})
});
In above example functionB and functionC will execute together once function A execution will be completed . Thus functionB and functionC will be executed simultaneously
functionB: ['A',functionB(){//code here }]
In above line we are passing value return by functionA using 'A'
and functionD will be executed only when functionB and functionC execution will be completed .
if there will be error in any function , then execution of other function will be terminated and below function will be executed .where you could write your logic of success and failure .
function (err, results) {}
On succesfull execution of all function "results" will contain the result of all the functions defined in async.auto
function (err, results) {}
Take a look at modification of your original code which does what you want without async helper libs.
var fs = require('fs'),
path = require('path');
function do_stuff(name, cb)
{
console.log(name);
cb();
}
function parse(dir, cb) {
fs.readdir(dir, function (err, files) {
if (err) {
cb(err);
} else {
// cb_n creates a closure
// which counts its invocations and calls callback on nth
var n = files.length;
var cb_n = function(callback)
{
return function() {
--n || callback();
}
}
// inside 'each' we have exactly n cb_n(cb) calls
// when all files and dirs on current level are proccessed,
// parent cb is called
// f = filename, p = path
var each = function (f, p) {
return function (err, stats) {
if (err) {
cb(err);
} else {
if (stats.isDirectory()) {
parse(p, cb_n(cb));
} else if (stats.isFile()) {
do_stuff(p+f, cb_n(cb));
// if do_stuff does not have async
// calls inself it might be easier
// to replace line above with
// do_stuff(p+f); cb_n(cb)();
}
}
};
};
var i;
for (i = 0; i < files.length; i++) {
var f = files[i];
var p = path.join(dir, f);
fs.stat(p, each(f, p));
}
}
});
}
parse('.', function()
{
// do some stuff here when async parse completely finished
console.log('done!!!');
});
Something like this would work -- basic change to your code is the loop turned into a recursive call that consumes a list until it is done. That makes it possible to add an outer callback (where you can do some processing after the parsing is done).
var fs = require('fs'),
path = require('path');
function parse(dir, cb) {
fs.readdir(dir, function (err, files) {
if (err)
cb(err);
else
handleFiles(dir, files, cb);
});
}
function handleFiles(dir, files, cb){
var file = files.shift();
if (file){
var p = path.join(dir, file);
fs.stat(p, function(err, stats){
if (err)
cb(err);
else{
if (stats.isDirectory())
parse(p, function(err){
if (err)
cb(err);
else
handleFiles(dir, files, cb);
});
else if (stats.isFile()){
console.log(p);
handleFiles(dir, files, cb);
}
}
})
} else {
cb();
}
}
parse('.', function(err){
if (err)
console.error(err);
else {
console.log('do something else');
}
});
See following solution, it uses deferred module:
var fs = require('fs')
, join = require('path').join
, promisify = require('deferred').promisify
, readdir = promisify(fs.readdir), stat = promisify(fs.stat);
function parse (dir) {
return readdir(dir).map(function (f) {
return stat(join(dir, f))(function (stats) {
if (stats.isDirectory()) {
return parse(dir);
} else {
// do some stuff
}
});
});
};
parse('.').done(function (result) {
// do some stuff here when async parse completely finished
});
I've been using syncrhonize.js with great success. There's even a pending pull request (which works quite well) to support async functions which have multiple parameters. Far better and easier to use than node-sync imho. Added bonus that it has easy-to-understand and thorough documentation, whereas node-sync does not.
Supports two different methods for wiring up the sync, a defered/await model (like what #Mariusz Nowak was suggesting) and a slimmer though not-as-granular function-target approach. The docs are pretty straightforward for each.
Recommend to use node-seq
https://github.com/substack/node-seq
installed by npm.
I'm using it, and I love it..
Look for node-sync, a simple library that allows you to call any asynchronous function in synchronous way. The main benefit is that it uses javascript-native design - Function.prototype.sync function, instead of heavy APIs which you'll need to learn. Also, asynchronous function which was called synchronously through node-sync doesn't blocks the whole process - it blocks only current thread!

Categories