Currently i'm reading and writing files asynchronously, the thing is that i'm not sure that all the line from the file were read before the rest of my code is executed, here is my code:
var fs = require('fs');
//var str = fs.readFileSync("stockststs.json");
function updateJson(ticker) {
//var stocksJson = JSON.parse(fs.readFileSync("stocktest.json"));
fs.readFile('stocktest.json', function(error, file) {
stocksJson = JSON.parse(file);
if (stocksJson[ticker]!=null) {
console.log(ticker+" price : " + stocksJson[ticker].price);
console.log("changin the value...")
stocksJson[ticker].price = 989898;
console.log("Price after the change has been made -- " + stocksJson[ticker].price);
fs.writeFile('stocktest.json',JSON.stringify(stocksJson, null, 4) , function(err) {
if(!err) {
console.log("File successfully written");
}
});
}
else {
console.log(ticker + " doesn't exist on the json");
}
});
}
updateJson("APPL");
I'm wondering if there is any better way to to implement ?
its always a good practice to check in your callback for error. For example, i have written a small getCache function that checks if the file exists first, then tries to read that file and fire a callback afterwards
Cache.prototype.getCache = function(cache_filename, callback) {
cache_filename = this.cache_foldername + cache_filename;
fs.exists(cache_filename, function(exists) {
if (exists) {
fs.readFile(cache_filename, 'utf8', function (error, data) {
if (!error) {
console.log('File: '+ cache_filename +' successfully Read from Cache!');
_.isObject(data) ? callback(null,data) : callback(null,JSON.parse(data));
} else console.log("Error Reading Cache from file: " + cache_filename + " with Error: " + error);
});
} else callback(null,false);
});
}
note that in here what you need to do is to write the file after it has been read and checked with the if (!error)
i hope this helps
Related
Here is my piece of my router who manage my uploads :
fs.readFile(files.photo.path, function (err, data) {
// Here is the futur name of my file (ex: SERVER_PATH/images/moka/moka22/11/2016_1.jpg)
var newPath = __dirname + "/images/moka/moka" + new Date().toLocaleDateString() + "_" + Math.floor(Math.random() * 10) + 1 + "." + ext;
fs.writeFile(newPath, data, function (err) {
if(err) {
res.render('error', { error: err, message: "Erreur lors de l'upload"});
} else {
// insctructions
});
}
});
});
When the code is triggered I have this error :
Error: ENOENT: no such file or directory, open 'D:\projects\raspberry\routes\images\moka\moka2016-11-22_91.jpg'
at Error (native)
If I well understood the fs doc (https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback) :
fs.writeFile(theNameOfTheFuturFile, theDataToPutIn, callback);
So I'm a bit confused.
Sorry for my english, it may be bad, i hope that you guess what i mean :)
Thanks.
The problem could be the directory you are writing to does not exist.
so make sure it exists:
fs.readFile(files.photo.path, function (err, data) {
var dirPath = __dirname + "/images/moka/moka";
if (!fs.existsSync(dirPath)){
fs.mkdirSync(dirPath);
}
...
or do it by hand.
I have the following function to look for changes in a file, open the file and read the said changes. For this I use fs.statSync to calculate the length of my buffer.
var allFiles=[logfile]
allFiles.forEach (function (file) {
var fName = file;
var fNameStat = new Object;
fNameStat = fs.statSync(fName);
if (!fNameStat.isFile()) {
console.log(fName + ' is not a file');
process.exit(1);
}
console.log('watching ' + fName + ' bytes: ' + fNameStat.size);
fs.watch(fName, function (event, filename) {
var fNameStatChanged = fs.statSync(fName);
console.log('file changed from ' + fNameStat.size + ' to ' + fNameStatChanged.size);
fs.open(fName, 'r', function(err, fd) {
var newDataLength = fNameStatChanged.size - fNameStat.size;
var buffer = new Buffer(newDataLength, 'utf-8');
fs.readSync(fd, buffer, 0, newDataLength, fNameStat.size, function (err, bytesRead, newData) {
if (err) {
console.log(err);
};
logline = newData.toString()
console.log(logline);
socket.emit("log-tail",logline);
});
fNameStat = fs.statSync(fName);
});
}); // fs.watch
}); // allFiles.forEach
But I get a buffer outofbounds error. This happens when the log file gets two messages one after the other.
file changed from 40756 to 40789
file changed from 40756 to 40844
fs.js:620
binding.read(fd, buffer, offset, length, position, req);
^
Error: Offset is out of bounds
I am assuming its to do with the asynchronous calls. The file changes before the first read operation is complete and as such the buffer is wrong. But even after using the corresponding synchronous calls the code doesn't work. Any ideas?
I'm doing this
gm(jpgName).setFormat('jpg')
.resize(160,158)
.compress('JPEG')
.write(fs.createWriteStream(jpgName),function(err){
if(err){
console.log(err,jpgName);
res.send(400);
}else{
console.log('file formated to jpg' + jpgName);
And I get
{ [Error: Command failed: gm convert: Empty input file (/home/ubuntu/node/uploads/icon.jpg). ] code: 1, signal: null }
If I stop the code right before the first line written here, and then look at the file, it has a file length. If I let this process happen (error out) then look at the file, then it has a size of 0 bytes.
I also tried this:
fs.stat(jpgName, function (err, stats) {
console.log('file size:',stats.size);
gm(jpgName).setFormat('jpg')
.resize(160,158)
.compress('JPEG')
.write(fs.createWriteStream(jpgName),function(err){
if(err){
console.log('console.log error writing jpg file',err,jpgName);
fs.unlink(jpgName);
callback(400);
return;
}else{//...
And I'm getting a file size back, so I know it's not empty before I start this process.
EDIT: I now start this process on form.on('end') so I could be sure the entire upload process was done.
The entire function:
function formatProfileImage(req,form,file,callback){
console.log('formatting profile image');
var ext = file.name.split('.')[1].toLowerCase();
var name = encodeURIComponent(file.name.split('.')[0].toLowerCase());
var smallName = form.uploadDir+"/"+"small_"+name+".jpg";
var jpgName = form.uploadDir + "/" + name+'.jpg';
console.log('extension:',ext);
console.log('name:',name);
console.log('smallName:',smallName);
console.log('jpgName:',jpgName);
if(!(ext == "png" || ext == "jpeg"|| ext == "jpg"|| ext == "gif")){
fs.unlink(file.path);
console.log("extension rejected");
callback(415);
return;
}
console.log('renaming file from ' + file.path + ' to ' + jpgName);
fs.rename(file.path,jpgName,function(err){
if(err){
console.log('error renaming file',err);
callback(400);
return;
}
console.log('file renamed');
fs.stat(jpgName, function (err, stats) {
console.log('file size:',stats.size);
if(!(typeof req.query.tenant === 'undefined')){
//rename to jpg
gm(jpgName).setFormat('jpg')
.resize(160,158)
.compress('JPEG')
.write(fs.createWriteStream(jpgName),function(err){
if(err){
console.log('console.log error writing jpg file',err,jpgName);
fs.unlink(jpgName);
callback(400);
return;
}else{
console.log('file formated to jpg' + jpgName);
//make "thumbnail"
gm(jpgName).setFormat('jpg')
.resize(50)
.write(fs.createWriteStream(smallName),function(err){
if(err){
fs.unlink(smallName);
console.log('error writing thumbnail',err,smallName);
callback(400);
return;
}else{
console.log('thumbnail created' + smallName);
//upload everything
uploadS3(jpgName,req.query.tenant,function(back){
if(back.success){
console.log('success ' +back.url);
callback(back.url);
fs.unlink(jpgName);
}else{
console.log('error uploading' + jpgName, back);
callback(400);
fs.unlink(jpgName);
return;
}
});
uploadS3(smallName,req.query.tenant,function(back){
if(back.success){
console.log('success ' +back.url);
fs.unlink(smallName);
}else{
console.log('error uploading ' + smallName, back);
callback(400);
fs.unlink(smallName);
return;
}
});
}
});
}
});
}
});
});
}
.write() accept string as path. So you need replace your stream with string (location, where resulting image must me saved).
GM support 3 ways of saving files.
1) Save to file. Simple, just use .write() and send as parameter string, where file should saved. Example:
gm('image.png')
.resize()
.write('resized.png', function (error) {
if (error) {
console.error(error);
}
});
2) Use streams, example:
gm('image.png')
.resize()
.stream(function (error, stdout, stderr) {
var writeStream = fs.createWriteStream('resized.jpg');
stdout.pipe(writeStream);
});
3) And last one - buffers:
gm('image.png')
.resize()
.toBuffer(function (error, buffer) {
if (error) {
console.error(error);
}
});
I'm trying to change a working code that doesn't have any control flow to display when everything is done how much change were done. WIthout control flow, it's not possible.
To do so, I'm trying async, but I'm a bit confused here.
Old code:
public static populate(){
// Read all files inside the populate folder.
fs.readdir(__config.path.populate, function(err, files){
if(err){
consoleDev(err);
}else{
// For each file, get the data to populate stored in the file.
_.each(files, function(file){
_.each(require(__config.path.base + __config.path.populate + file), function(documents){
// The file name must be the same as the targeted model.
var Model = __Dao.getModel(file.split('.')[0]);// Filter the extension.
if(Model){
// For each data, try to add it if it does not exist in the DB.
Model.findOrCreate(documents, function(err, doc, created){
if(created){
consoleDev('The following document was created in the [' + Model.modelName + '] collection: ' + JSON.stringify(doc));
}
});
}else{
consoleDev('Unable to populate the file: ' + file + '. The model was not found.')
}
});
});
}
});
}
New code:
public static populate(){
// Read all files inside the populate folder.
fs.readdir(__config.path.populate, function(err, files){
if(err){
consoleDev(err);
}else{
_.each(files, function(file){
// Read all files in async mode with control flow.
async.each(require(__config.path.base + __config.path.populate + file), function(documents, callback){
// The file name must be the same as the targeted model.
var Model = __Dao.getModel(file.split('.')[0]);// Filter the extension.
if(Model){
// For each data, try to add it if it does not exist in the DB.
Model.findOrCreate(documents, function(err, doc, created){
if(created){
consoleDev('The following document was created in the [' + Model.modelName + '] collection: ' + JSON.stringify(doc));
Populate._added++;
}else{
Populate._read++;
}
callback();
});
}else{
callback('Unable to populate the file: ' + file + '. The model was not found.')
}
});
}, function(err){
if(err){
console.log(err);
}
consoleDev(Populate._added + ' documents were added. There are now ' + Populate._read + ' + ' + Populate._added + ' = ' + (Populate._read + Populate._added) + ' documents populated.');
});
}
});
}
Nothing is showed on the console at the end and I don't have any error on the console. The script still works and add documents that wasn't present in the DB before. But I still don't have a message saying me how much were added.
I tried to do async.each(files, function(file, callback){ instead but I get the error Error: Callback was already called..
What am I missing here?
Which is the simplest way to compare a hash of a file without storing it in a database?
For example:
var filename = __dirname + '/../public/index.html';
var shasum = crypto.createHash('sha1');
var s = fs.ReadStream(filename);
s.on('data', function(d) {
shasum.update(d);
});
s.on('end', function() {
var d = shasum.digest('hex');
console.log(d + ' ' + filename);
fs.writeFile(__dirname + "/../public/log.txt", d.toString() + '\n', function(err) {
if(err) {
console.log(err);
} else {
console.log("The file was saved!");
}
});
});
The above code returns the hash of the HTML file. If I edit the file how can I know if it has been changed? In other words, how can I know if the hash has been changed?
Any suggestions?
Edited
Now the hash is being saved in the log file. How can I retrieve the hash from the file and match it with the new generated one? A code example would be awesome to give me a better understanding.
There is no difference with this question, but it isn't clear for me yet how to implement it.
If you're looking for changes on a file, then you can use one of Node's filesystem functions, fs.watch. This is how it's used:
fs.watch(filename, function (event, filename) {
//event is either 'rename' or 'change'
//filename is the name of the file which triggered the event
});
The watch function is however not very consistent, so you can use fs.watchFile as an alternative. fs.watchFile uses stat polling, so it's quite a bit slower than fs.watch, which detects file changes instantly.
Watching a file will return an instance of fs.FSWatcher, which has the events change and error. Calling .close will stop watching for changes on the file.
Here's an example relating to your code:
var filename = __dirname + '/../public/index.html';
var shasum = crypto.createHash('sha1');
var oldhash = null;
var s = fs.ReadStream(filename);
s.on('data', function(d) {
shasum.update(d);
});
s.on('end', function() {
var d = shasum.digest('hex');
console.log(d + ' ' + filename);
oldhash = d.toString();
fs.writeFile(__dirname + "/../public/log.txt", d.toString() + '\n', function(err) {
if(err) {
console.log(err);
}
else {
console.log("The file was saved!");
}
});
});
//watch the log for changes
fs.watch(__dirname + "/../public/log.txt", function (event, filename) {
//read the log contents
fs.readFile(__dirname + "/../public/log.txt", function (err, data) {
//match variable data with the old hash
if (data == oldhash) {
//do something
}
});
});
What's the difference between this question and the previous one you asked? If you're not wanting to store it in a database, then store it as a file. If you want to save the hash for multiple files, then maybe put them in a JSON object and write them out as a .json file so they're easy to read/write.
EDIT
Given what you added to your question, it should be pretty simple. You might write a function to do check and re-write:
function updateHash (name, html, callback) {
var sha = crypto.createHash('sha1');
sha.update(html);
var newHash = sha.digest('hex');
var hashFileName = name + '.sha';
fs.readFile(hashFileName, 'utf8', function (err, oldHash) {
var changed = true;
if (err)
console.log(err); // probably indicates the file doesn't exist, but you should consider doing better error handling
if (oldHash === newHash)
changed = false;
fs.writeFile(hashFileName, newHash, { encoding: 'utf8' }, function (err) {
callback(err, changed);
});
});
}
updateHash('index.html', "<html><head><title>...", function (err, isChanged) {
// do something with this information ?
console.log(isChanged);
});