I'm actually trying to learn a bit about node.js
At the moment I try to understand the principles about callbacks.
I've written a module that should filter me files from a given directory by a specified file extension. But it won't work. I've tested a bit and I noticed that the function 'getFiles' will be called more the ones. But why? I can't find the mistake, can someone help me, to understood my problem? If someone thinks my code is ugly, please give me a better example, thanks.
So here's my code:
//Returns a list of filtered files from a specified directory and extension
function getFilteredFiles(dir, ext, callback)
{
getFiles(dir, function(error, data){
if(error)
{
callback(error);
}
else
{
var result = getFilteredFiles(data, ext);
callback(null, result);
}
});
}
//Reading files from a given directory
function getFiles(dir, callback)
{
var fs = require('fs');
console.log(typeof dir);
fs.readdir(dir, function(err, data){
if(err)
{
callback(err);
}
else
{
callback(null, data);
}
});
}
//Filters a file by a specified extension
function filterFiles(data, extension)
{
var path = require('path');
return data.filter(function(file){
return path.extname(file) == '.' + extension;
});
}
Related
I need to create file from directory like following
application/userxxx.txt/manifest.txt
The path is constant except the userxxx which can be any alpha numeric
/application/
user12.txt
newfile.txt
newFile2.txt
There is only one file which start with user...
I think of using the which is currently not working..
fs.readdir('c://application', function (err, files) {
if (err) {
throw err;
}
and then get all the files under the application
and search for file which start with userabcd1234.txt and when I find it do the read file like following
readFile('application/userabcd1234/manifest.txt')
There is no two files inside application which start with /user. just one but after the user. and before the third '/manifest.txt' can be any random alpha numeric.
You can do something like
var filePath = path.join(__dirname, '../your path to application folder');
fs.readdir(filePath, function (err, files) {
if (err) {
return console.error(err);
}
files.forEach(function (file) {
if (file.indexOf('user') === 0) {
var relFilePath = filePath + '/' + file + '/manifest.txt';
fs.readFile(relFilePath,'utf8', function read(err, data) {
if (err) {
throw err;
}
console.log(data);
});
}
});
});
I have a gulpfile.js which has a bunch of tasks.
I noticed that my gulp tasks run, but after finishing they don't exit/terminate. I have to use control + c to exit.
I went through a thread that talked about a similar problem, but it was using gulp-mocha and there was an issue in the plug in.
In my case, I also know what is causing this issue. I have required a js(edgecaset.js) file that I've written, into my gulp file (one of the tasks uses a function in that file). It is a simple file with one function that interacts with a rest api. If I don't require that file, everything works great.
I'm exporting just one function from the edgecast.js file to be used in the gulp file. It has a call to the setTimeOut function (could that be the problem?)
Can anyone help me find out the problem?
var main = function () {
console.log(config.edgecast.username);
ftp.put(config.ftp.local_path, config.ftp.remote_path, function(hadError) {
if (!hadError) {
console.log('File transferred successfully');
rest.put(config.edgecast.purge_url, purgeOptions).on('complete', function(data, response) {
if (response.statusCode != 200)
console.log('Purge failed');
else {
console.log('Purge request Issued');
purgeID = data.Id;
setTimeout(function() {
rest.get(config.edgecast.get_purge_url + purgeID, purgeOptions).on('complete', function(data) {
if (data.CompleteDate) {
console.log('Purge completed on ' + data.CompleteDate);
rest.put(config.edgecast.load_url, loadOptions).on('complete', function(data, response) {
if (response.statusCode == 200)
console.log('Asset Loaded');
else {
console.log('Load Failed');
console.log(response.statusCode);
}
});
}
});
}, 180000);
}
});
} else console.log('File Transfer Failed. ' + hadError);
ftp.raw.quit(function(err) {
if (err) return console.error(err);
});
});
};
Above is the function I'm exporting from the edgecast.js file. This is the only function in the file. The rest is only variables. And then in the gulpfile.js there is
var edgecast = require('./edgecast.js');
And this require statement causes all the problems.
The edgecast.js file has the following libraries imported.
var rest = require('restler'),
config = require('./config'),
JSFtp = require('jsftp');
Found the problem. Turns out gulp tasks don't exit because the FTP connection was left open and they wait until it is closed
In my routes.js file, I've this:
var pages = require('./pages')();
...
app.get('/:page', function(req, res, next) {
var p = req.params.page;
if (p in pages) {
res.render('page', pages[p]);
} else {
next();
}
});
pages.js:
module.exports = function() {
var fs = require('fs'),
ret = [],
dir = './pages',
files = fs.readdirSync(dir);
files.forEach(function(file) {
var text = fs.readFileSync(dir + '/' + file, 'utf-8'),
fileName = file.substr(0, file.lastIndexOf('.'));
ret[fileName] = {content: text};
});
return ret;
};
This code runs only one, when I run node. This is how I can make it async:
require('./pages')(function(pages) {
app.get('/:page', function(req, res, next) {
var p = req.params.page;
if (p in pages) {
res.render('page', pages[p]);
} else {
next();
}
});
});
pages.js:
module.exports = function(callback) {
var fs = require('fs'),
ret = [],
dir = './pages';
fs.readdir(dir, function(err, files) {
if (err) throw err;
files.forEach(function(file, i) {
fs.readFile(dir + '/' + file, 'utf-8', function(err, text) {
if (err) throw err;
var fileName = file.substr(0, file.lastIndexOf('.'));
ret[fileName] = {content: text};
if ( i === (files.length - 1) ) callback(ret);
});
});
});
};
Assuming the total pages will not exceed more than 1 MB in size, I can cache the text into memory indefinitely without getting node crashed due to out of memory.
Should I be using the async code?
According to what I've learnt, the async version will make node start listening on localhost faster, but /:page URLs will only work when the files have been loaded into memory.
Is the async code in the right pattern?
What if I need to reuse the pages object in another file? Right now it is only accessible in routes.js.
Can I rewrite pages.js to execute only once like this:
var ret = [];
module.exports = function(callback) {
var fs = require('fs'),
dir = './pages';
if (ret.length < 1) {
fs.readdir(dir, function(err, files) {
if (err) throw err;
files.forEach(function(file) {
fs.readFile(dir + '/' + file, 'utf-8', function(err, text) {
if (err) throw err;
var fileName = file.substr(0, file.lastIndexOf('.'));
ret[fileName] = {content: text};
if ( i === (files.length - 1) ) callback(ret);
});
});
});
} else {
callback(ret);
}
};
What if require('./pages')(function(pages) {}) is called multiple times together? Is there a chance of the if condition failing? I can't wrap my mind around this.
Should I be using the async code?
If you want, why not. But there's no real need for it, synchronous IO on startup is fine. require does it as well.
Is the async code in the right pattern?
No. It does invoke callback once for each directory. Calling app.get('/:page', …) multiple times is not what you want.
What if I need to reuse the pages object in another file? Right now it is only accessible in routes.js.
You could pass it from routes.js to the other modules. Or just rewrite pages.js to store it statically and execute the async things only once, so that you can require it multiple times.
What if require('./pages')(function(pages) {}) is called multiple times together? Is there a chance of the if condition failing?
Yes, it will most certainly fail, because you are populating ret only asynchronously.
I can't wrap my mind around this.
Use promises. The act as asynchronous, unmutable values, just what you need here. They will guarantee that callbacks are only invoked once, that every callback is invoked with the same ret value, and provide many more useful things (like managing the parallel file reads for you).
You'll want to export a promise from pages.js.
I'm pretty new to Node...I need to hammer Node's async behavior and callback structure into my mind. Here where I struggle right now:
// REQUIRE --------------------------------------------------------------------
fs = require('fs');
path = require('path');
// FUNCTION readAllDirs -------------------------------------------------------
function readAllDirs(dir, result) {
if (!result) {
result = function() {};
};
fs.readdir(dir, function(err, list) {
if(err) { return result(err) };
list.forEach(function(file) {
var fullpath = path.resolve(dir, file);
fs.stat(fullpath, function(err, stat) {
if(err) { return result(err) };
if(stat && stat.isDirectory()) {
readAllDirs(fullpath);
//console.log('In: ' + fullpath);
result(null, fullpath);
}
});
});
});
}
// MAIN -----------------------------------------------------------------------
readAllDirs('/somedir', function(err, dirs) {
console.log(dirs);
});
I try to traverse a tree of directories. In principle the function is working nice...as long I do not callback but print the directory names on the console. All folders and sub-folders come up as expected. BUT when I do callbacks the callback is not called for recursion deeper than first level.
Pleeeaaassee help! Thanks guys!
Your problem is here, inside the if (stat ...) branch:
readAllDirs(fullpath);
You need to pass the supplied callback again back into the recursion:
readAllDirs(fullpath, result);
I'm trying to use Node.js to create a zip file from an existing folder, and preserve the structure.
I was hoping there would be a simple module to allow this kind of thing:
archiver.create("../folder", function(zipFile){
console.log('et viola');
});
but I can't find anything of the sort!
I've been googling around, and the best I've found so far is zipstream, but as far as I can tell there's no way to do what I want. I don't really want to call into commandline utilities, as the the app has to be cross platform.
Any help would be greatly appreciated.
Thanks.
It's not entirely code free, but you can use node-native-zip in conjunction with folder.js. Usage:
function zipUpAFolder (dir, callback) {
var archive = new zip();
// map all files in the approot thru this function
folder.mapAllFiles(dir, function (path, stats, callback) {
// prepare for the .addFiles function
callback({
name: path.replace(dir, "").substr(1),
path: path
});
}, function (err, data) {
if (err) return callback(err);
// add the files to the zip
archive.addFiles(data, function (err) {
if (err) return callback(err);
// write the zip file
fs.writeFile(dir + ".zip", archive.toBuffer(), function (err) {
if (err) return callback(err);
callback(null, dir + ".zip");
});
});
});
}
This can be done even simpler using node's built-in execfile function. It spawns a process and executes the zip command through the os, natively. Everything just works.
var execFile = require('child_process').execFile;
execFile('zip', ['-r', '-j', zipName, pathToFolder], function(err, stdout) {
console.log(err);
logZipFile(localPath);
});
The -j flag 'junks' the file path, if you are zipping a sibdirectory, and don't want excessive nesting within the zip file.
Here's some documentation on execfile.
Here's a man page for zip.
Using Easy-zip, npm install easy-zip, you can do:
var zip5 = new EasyZip();
zip5.zipFolder('../easy-zip',function(){
zip5.writeToFile('folderall.zip');
});