whenever readDirectory invoked i am checking if any file created Date is past 30 days i want to remove that file from direcotry. This is working as expected and removing the files but i dont think its efficient. So I want to move comapreDates method to separate file and call directory logs/St every night and check if any file expired passed 30 days period remove it. is it doable ?
service.js
function readDirectory(callback) {
var dirPath = './logs/St';
var files = fs.readdirSync(dirPath);
async.eachSeries(files, function(file, callback) {
var filePath = path.join(dirPath, file);
var fileInfo = {};
fs.stat(filePath, function(err, stats) {
if (err) {
console.info("File doesn't");
} else {
fileInfo.fileDate = stats.birthtime;
fileInfo.filename = file;
compareDates(fileInfo, filePath);
objToReturn.push(fileInfo);
callback();
}
});
}, function(err) {
//final callback when all files completed here send objToReturn to client
callback(objToReturn);
});
}
}
function compareDates(file, path) {
var timeDiff = Math.abs(currentDate.getTime() - file.fileDate.getTime());
var dayDifference = Math.ceil(timeDiff / (1000 * 3600 * 24));
console.log('Days', dayDifference);
console.log('FileName', file.filename);
if (dayDifference >= 30) {
fs.unlink(path, function(err) {
if (err) {
// file doens't exist
console.info("File doesn't exist, won't remove it.");
} else {
console.log('removed file', file);
}
});
}
}
See one of those modules:
https://www.npmjs.com/package/node-schedule
https://www.npmjs.com/package/node-cron
https://www.npmjs.com/package/node-cron-job
and more modules here:
https://www.npmjs.com/browse/keyword/schedule
https://www.npmjs.com/browse/keyword/cron
https://www.npmjs.com/browse/keyword/job
Related
Can someone please help me figure out why am I failing the mocha test even after setting the timeout to 15000ms. I know it can be a memory leak etc but I cannot find it anywhere in my code. I used the chrome debugger and checked that the JS Heap uses 48MB memory at the max. (which is not too bad?) Here's the function I'm testing, it's in a file called xmlParser.js
var fs = require('fs'),
xml2js = require('xml2js'),
parser = new xml2js.Parser();
/**
* [parse - parses a xml file to json]
* #param {[string]} filename [name of the file to parse]
* #param {Function} callback [callback function]
* #return {[Function]} [returning callback function]
*/
function parse(filename, callback){
fs.readFile(filename, function(err, data){
if(err){
console.log('cannot read file.');
return callback(err);
} else {
parser.parseString(data, function(err, result){
if(err) {
console.log('cannot parse file.');
return callback(err);
}
else {
return callback(null, result);
}
});
}
});
}
module.exports = {
parse
}
What does it do? Parses a xml file to JSON. As simple as that!
Here's how I'm testing it,
var assert = require('chai').assert,
mocha = require('mocha'),
xmlParser = require('../utils/xmlParser.js'),
extractInformation = require('../utils/extractInformation.js');
//data-structures for parsed XML data (lists)
var logJSON = [], listJSON = [];
describe('parse xml files', function(){
this.timeout(150000);
it('should parse correctly', function(done){
this.timeout(150000);
setTimeout(done, 150000);
xmlParser.parse(__dirname + "/../xml/svn_log_test.xml", function(err, log) {
if(err) {
return done(err);
// assert.equal(true, false);
}
this.logJSON = log["log"]["logentry"];
xmlParser.parse(__dirname + "/../xml/svn_list_test.xml", function(err, list) {
if(err) {
return done(err);
// assert.equal(true, false);
}
this.listJSON = list["lists"]["list"][0]["entry"];
});
});
});
});
Now, before you say that I'm not using any assert statements as of now, yes I'm not. But it should pass anyway, right? It's not passing, it gets a timeout error after 15000s
Why don't you try to decrease the setTimout to 14 seconds?
setTimeout(function () {
done()
}, 140000);
The timers are not quite precise in javascript
I am quite new to Jquery, node.js, Phantom.js and JavaScript in general and having a few problems wrapping my head around all of it.
I'm working on a simple script using Jquery.go.js (https://github.com/travist/jquery.go.js) that should log me in to a site, then access a specific resource and parse the response (JSON) that it returns.
So far I managed to get the login working, but am really lost now for the second part.
What I have so far is based on https://github.com/travist/makemeasandwich.js
var async = require('async'),
$ = require('jquerygo'),
path = require('path'),
fs = require('fs');
// Add some default configs.
$.config.site = 'https://host.com';
$.config.addJQuery = false;
var login = function(done) {
async.series([
$.go(false, 'visit', '/login'),
$.go(false, 'waitForElement', '#email'),
debugCapture('login1'),
$('#email').go('val', 'email#example.com'),
$('#password').go('val', 'securepassword'),
debugCapture('login2'),
$("form").go('attr','id','validForm'),
$('#validForm').go('submit'),
sleep(3000),
debugCapture('login3'),
print('Successfully logged in.'),
sleep(3000),
], done);
};
var viewJournalEntries = function(done){
$.config.addJQuery = true;
async.series([
$.go(false, 'visit', '/api/journals/show/546'),
$.getPage(function(page) {
// Is this right, what do I need to do here??
}),
debugCapture('step2'),
],done);
}
var debugCapture = function(fileName) {
// if (config.get('debug')) {
return capture(fileName);
// }
return function(done) { done(); }
};
/**
* Method to capture and ensure the screenshots directory exists.
*/
var capture = function(fileName) {
// Return the async function.
return function(done) {
// The directory to store the screenshots.
var dir = __dirname + '/screenshots-ls';
// Check that the directory exists.
fs.exists(dir, function(exists) {
if (exists) {
$.capture(dir + '/' + fileName + '.png', done);
}
else {
fs.mkdir(dir, function(err) {
if (err) return done(err);
$.capture(dir + '/' + fileName + '.png', done);
});
}
});
}
}
/**
* Helper to print something when it is executed.
*
* #param {type} text
* #returns {unresolved}
*/
var print = function(text) {
return function(done) {
console.log(text);
done();
};
};
/**
* Helper function to take a pause...
*
* #param {type} time
* #returns {unresolved}
*/
var sleep = function(time) {
return function(done) {
setTimeout(done, time);
};
};
async.series([
login,
viewJournalEntries,
], function() {
$.close();
});
The login works like a charm and I can call the viewJournalEntries function which calls the url /api/journals/show/546 that returns a typical JSON string like this:
{"data":{"id":546,"user_id":1,[etc...]
The screen capture proofs that it works, but I am just not sure how to proceed to get the JSON into a variable that I can parse.
Any help/directions are appreciated.
Thanks,
Ben
I am getting confuse how to read a list of files recursively.
Assume I have 3 text files in my filesytem api root directory
text1.txt
text2.txt
text3.txt
My goal is to read each text files one by one and then concatenate all the entries in each file into one string but I am currently at lost
how to do this in Javascript FileSystem API.
window.requestFileSystem(window.TEMPORARY, 1024*1024, onInitFs, errorHandler);
function onInitFs(fs) {
var dirReader = fs.root.createReader();
var entries = [];
// Call the reader.readEntries() until no more results are returned.
var readEntries = function() {
dirReader.readEntries (function(results) {
if (!results.length) {
readAllFiles(results);
} else {
entries = entries.concat(toArray(results));
readEntries();
}
}, errorHandler);
};
readEntries(); // Start reading dirs.
}
function readAllFiles(entries){
//Loop thru all the files and read each entries
}
I have seen how to read one text file but I dont know how to implement the reading of all files and concatenate the value.
They all implement callback functions so I am getting confused on how to handle it. Any points please?
I actually have been basing all my works in this http://www.html5rocks.com/en/tutorials/file/filesystem
UPDATE 2
As per #Johan
I actually changed my code to make use of callback
window.requestFileSystem(window.TEMPORARY, 1024*1024, onInitFs, errorHandler);
function onInitFs(fs) {
var dirReader = fs.root.createReader();
var entries = [];
// Call the reader.readEntries() until no more results are returned.
var readEntries = function() {
dirReader.readEntries (function(results) {
if (!results.length) {
readAllFiles(results, concatMessages);
} else {
entries = entries.concat(toArray(results));
readEntries();
}
}, errorHandler);
};
readEntries(); // Start reading dirs.
}
var concatMessage = '';
function concatMessages(message){
concatMessage += message;
}
function readAllFiles(logs, callBack) {
logs.forEach(function(entry, iCtr) {
var message;
entry.file(function(file) {
var reader = new FileReader();
reader.onloadend = function(e) {
//message += this.result;
if(callBack)
callBack('==================' + iCtr + '==========================');
callBack(this.result);
};
reader.readAsText(file); // Read the file as plaintext.
}, function(err) {
console.log(err);
});
});
}
My only problem is this, the callback function is not sequential.
It reads text2.txt first then text3.txt then text1.txt so the end result is not sequential which is not what I want to do. Any more hints?
Highly recommend you consider using something like caolan's async library to accomplish this.
You can do something like this:
async.each(openFiles, function(file, callback) {
// Perform operation on file here.
console.log('Processing file ' + file);
callback();
}, function(err) {
// if any of the file processing produced an error, err would equal that error
if (err) {
// One of the iterations produced an error.
// All processing will now stop.
console.log('A file failed to process');
} else {
// do your concatenation here
}
});
I'm trying to delete some files in a folder using the module fs.
I need to delete a file only if it is older (modified time) that 1 minute.
But I get a strange error.
var fs = require("fs");
var files = fs.readdirSync("./folder");
for (var i in files) {
fs.stat("./folder/" + files[i], function (err, stats) {
var currentTimestamp = Math.round(new Date().getTime() / 1000);
var fileTimestamp = Math.round((new Date(stats.mtime).getTime() / 1000));
if ((currentTimestamp - fileTimestamp) > 60 * 1) {
//Elimino il file di cache
fs.unlinkSync("./folder/" + files[i], function (err) {
if (err) {
console.log(err);
} else {
console.log("File deleted");
}
});
}
});
}
Unfortunately I get this error
fs.js:765
return binding.unlink(pathModule._makeLong(path));
^
Error: ENOENT, no such file or directory
Why does this happen?
How can I solve ??
Thanks
EDIT
I make a log before the fs.unlinkSync("./folder/" + files[i], function (err) and I see that is called 2 times with the same file... but there is only one file in the folder.... with that name
Below is the problem:-
You are doing fs.stat an async call in for loop. And for is a sync block. There are chances that it will be called after the end of your for loop. In the end, it will call all callbacks with last value of i.
Since value of i is same it will search for the same file more than once and it will end up throwing an error.
There are two solutions to your problem.
1) Use fs.statSync & fs.unlinkSync so it will be sync call. The value of i won't increment until the whole loop executes, but it's not the good way of doing it.
2) Use Array.forEach because it is an async process, so it will call the function on each file. I will prefer this method.
I've modified your problem and got it working with Array.forEach.
var fs = require("fs");
var files = fs.readdirSync("./Folder");
var deleteFile = function (fileName) {
fs.stat("./Folder/" + fileName, function (err, stats) {
var currentTimestamp = Math.round(new Date().getTime() / 1000);
var fileTimestamp = Math.round((new Date(stats.mtime).getTime() / 1000));
if ((currentTimestamp - fileTimestamp) > 60 * 1) {
//Elimino il file di cache
console.log('deleting ',fileName)
fs.unlink("./Folder/" + fileName, function (err) {
if (err) {
console.log(err);
} else {
console.log("File deleted");
}
});
}
});
}
files.forEach(deleteFile);
I am trying to write a newer watch module that uses the fs.watch method instead of the watchFile approach.
So far, it works beautifully, but only when I run it outside of mocha. I can't figure out why my unit test is throwing a tantrum, maybe someone here can?
Here is my class code:
/**
* requirements
*/
var fs, path, events;
fs = require('fs');
path = require('path');
events = require('events');
/**
* private
*/
var Monitor = function(directory, options) {
this.directory = directory;
this.options = options || {};
(this.options.lazy && this.empty()) || this.walk(this.directory);
this.watch(this.directory);
};
Monitor.prototype = new events.EventEmitter();
Monitor.prototype.watch = function(directory, stats) {
var stats = stats || {};
if (!this.directories[directory]) {
var w = fs.watch(directory, this.options, this.justlookatit.bind(this));
}
this.directories[directory] = { 'stats': stats, 'w': w };
};
Monitor.prototype.directories = function() {
if (!Object.keys(this.directories).length) {
this.walk(this.directory);
}
return this.directories;
};
Monitor.prototype.files = function() {
if (!Object.keys(this.files).length) {
this.walk(this.directory);
}
return this.files;
};
Monitor.prototype.unwatch = function() {
if (!Object.keys(this.directories).length) {
for (var dir in this.directories) {
dir.w.close();
}
}
};
Monitor.prototype.empty = function() {
this.unwatch();
this.files = {};
this.directories = {};
};
Monitor.prototype.walk = function(directory) {
var monitor = this;
this.empty();
fs.readdir(directory, function(err, files) {
if (err) return;
for (var file in files) {
var fullname = path.resolve(files[file]);
if (!monitor.options.filter || monitor.options.filter(fullname)) {
fs.stat(fullname, function(err, stats) {
if (err) return;
if (stats.isDirectory()) {
monitor.walk(fullname);
monitor.watch(fullname, stats);
} else {
monitor.files[fullname] = stats;
}
});
}
}
});
};
Monitor.prototype.justlookatit = function(action, file) {
var monitor = this;
var fullname = path.resolve(file);
if (this.options.filter && !this.options.filer(fullname)) return;
fs.exists(fullname, function(exists) {
if (exists) {
fs.stat(fullname, function(err, stats) {
if (stats.isDirectory()) {
monitor.watch(fullname, stats);
} else {
if (monitor.files[fullname]) {
if (stats.mtime.getTime() > monitor.files[fullname].mtime.getTime()) {
monitor.emit('modified', fullname, stats);
}
} else {
monitor.emit('added', fullname, stats);
}
monitor.files[fullname] = stats;
}
});
} else {
if (monitor.files[fullname]) {
delete monitor.files[fullname];
monitor.emit('deleted', fullname);
} else if (monitor.directories[fullname]) {
monitor.directories[fullname].w.close();
delete monitor.directories[fullname];
}
}
});
};
/**
* exports
*/
exports.start = function(directory, options) {
return new Monitor(directory, options);
};
Here is my Working external test code:
var watch = require("./watch.js");
var fs = require('fs');
monitor = watch.start(__dirname);
monitor.on('added', function(file, stats) {
console.log("Caught Added: " + file);
});
monitor.on('modified', function(file, stats) {
console.log("Caught Modified: " + file);
});
monitor.on('deleted', function(file) {
console.log("Caught deleted: " + file);
});
// try creating a file immediately
fs.openSync('v.md', 'w');
The first test file runs perfectly fine, and I've tried both openSync and open. Finally, here is a version of the same test code, wrapped in a mocha unit test which is timing out:
/**
* requirements
*/
var watch, Q, fs, path, mocha, chai, assert;
watch = require('../lib/watch.js');
Q = require('q');
fs = require('fs');
path = require('path');
mocha = require('mocha');
chai = require('chai');
assert = chai.assert;
/**
* variables
*/
var watch_directory = path.join(__dirname, './watch');
/**
* tests
*/
describe('test watch', function() {
it('should create a monitor and run callbacks after fs changes', function(done) {
// I had planned to implement promises that chained the three callbacks
// but couldn't get one of them working in general
var added = function(file, stats) {
console.log("added");
done();
};
var modified = function(file, stats) {
console.log("modified");
};
var deleted = function(file, stats) {
console.log("deleted");
};
// create our service
var monitor = watch.start(watch_directory);
// assert it is defined
assert.isDefined(monitor);
// establish a listener
monitor.on('added', added);
monitor.on('modified', modified);
monitor.on('deleted', deleted);
// here is a file name using the current date to prevent duplication during tests
var file = path.join(watch_directory, (new Date()).getTime() + '.md');
// let's create the file, then delete it
fs.open(file, 'w+', function(err, fileDescriptor) {
// this prints before console output from the watch.js's `justlookatit` method
console.log(err);
console.log("writing to file");
// we probably don't want to try closing the fileDescriptor if the open failed
if (err) return;
// close the file descriptor
fs.close(fileDescriptor, function() {
// delete the file we just created
// fs.unlink(file, function() { /* not a big deal */ });
});
});
// modify a known-existing test file
fs.open('test.md', 'w+', function() {/* we don't care about this */});
})
});
I checked with console.log(fullname) inside the justlookatit method on the watch code, and it spits out the correct file name, matching the one generated by the unit test.
However, it then proceeds to return false when I run fs.exists. As I undestand it, that means the file system is notifying me that a file exists before it exists, which doesn't make sense really. So I tried adding an additional delay by wrapping my fs.exists method in a setTimeout, and that didn't change the results. I have also tried using both openSync and existsSync, and that made no difference.
I'm stumped, does anyone have any ideas why the mocha code isn't working?
So, the solution was to go for a walk. I came back, looked at the code again and figured out the cause of the problem with mocha, and also identified many other bugs.
The problem was the lack of context. The justlookatit method does not have a context, and in the test.js scenario it is watching the current directory, while the mocha test is watching a sub-directory.
The path.resolve was receiving only the file name, not the directory, and therefore merged it with the default (executables) directory, so the level of test.js, or watch_test.js for mocha. It proceeded to fail to locate any of the files in the mocha test case because they were all one level below the executable.
I won't go into detail about all the other bugs, but I may come back and post the repository link when I get to a point that I want to push it online.
You're missing the callback return(done); at the end of your test. Unless you call that callback, Mocha will time out every time.