I am reading below data from file now i want to create json object from it,how can i create that using nodejs fs module ?
app.js
var path = "./ditLogs/" + file;
fs.readFile(path, function(err, data) {
console.log('reading file data', data.toString());
});
Here is data in file that i need to create json for
file.txt
{"event":"test data"}
{"event":"test data"}
{"event":"test data"}
You can use this sample function:
function(strFromFile) {
try {
return JSON.parse(strFromFile);
} catch(e) {
return {};
}
};
FS Module
While developing in NodeJS the need to access a computer's file system is sometimes necessary. FS Module is a utility that assists with this process.
FS includes some functions that allow for different activities to be done on the file system via a wrapper around the API.
This should be included/required/imported into any JS that needs to interact with the file system API.
var fs = require("fs");
These are the different methods you can use on this API which are all asynchronous:
fs.readFile {fs.readFile('input.txt', function(err, data)
// Asynchronous read
fs.readFile('input.txt', function (err, data) {
if (err) {
return console.error(err);
}
console.log("Asynchronous read: " + data.toString());
});
fs.writeFile
fs.writeFile('input.txt', 'Simply Easy Learning!', function(err) {
if (err) {
return console.error(err);
}
console.log("Data written successfully!");
console.log("Let's read newly written data");
fs.readFile('input.txt', function (err, data) {
if (err) {
return console.error(err);
}
console.log("Asynchronous read: " + data.toString());
});
});
open
// Asynchronous - Opening File
console.log("Going to open file!");
fs.open('input.txt', 'r+', function(err, fd) {
if (err) {
return console.error(err);
}
console.log("File opened successfully!");
});
fs.stat (provides information about the file) e.g.
fs.stat('input.txt', function (err, stats) {
if (err) {
return console.error(err);
}
console.log(stats);
console.log("Got file info successfully!");
// Check file type
console.log("isFile ? " + stats.isFile());
console.log("isDirectory ? " + stats.isDirectory());
});
fs.read (similar to readFile and should not be the first choice for reading a file)
fs.close
// Close the opened file.
fs.close(fd, function(err){
if (err){
console.log(err);
}
console.log("File closed successfully.");
});
ftruncate (truncate an opened file)
unlink (delete an opened file)
fs.unlink('input.txt', function(err) {
if (err) {
return console.error(err);
}
console.log("File deleted successfully!");
});
fs.mkdir (make new directory)
fs.mkdir('/tmp/test',function(err){
if (err) {
return console.error(err);
}
console.log("Directory created successfully!");
});
fs.readdir (reads a directory)
fs.readdir("/tmp/",function(err, files){
if (err) {
return console.error(err);
}
files.forEach( function (file){
console.log( file );
});
});
fs.rmdir (remove directory)
fs.rmdir("/tmp/test",function(err){
if (err) {
return console.error(err);
}
console.log("Going to read directory /tmp");
fs.readdir("/tmp/",function(err, files){
if (err) {
return console.error(err);
}
files.forEach( function (file){
console.log( file );
});
});
});
Synchronous functions:
readFileSync
// Synchronous read
var data = fs.readFileSync('input.txt');
console.log("Synchronous read: " + data.toString());
writeFileSync
// Synchronous write
var data = fs.writeFileSync('input.txt', 'asdasdasd');
Simply read using line-by-line package, less headache, more control on reading process (it can pause, resume reading, close file descriptor ondemand, skip N lines) with less code.
1) install:
npm i --save line-by-line
npm i --save lodash
2) implement
var lineByLine = require('line-by-line'),
_ = require('lodash'),
path = require('path');
var lines = [];
var filePath = path.join(__dirname, "ditLogs", file);
var fileReader = new lineByLine(filePath);
fileReader.on('line', function(line) {
line = JSON.parse(line);
if(_.isPlainObject(line) && !_.isEmpty(line)) {
lines.push(line);
}
// optional
doSomethingWithLine(line);
});
function doSomethingWithLine(line) {
// for example You can save to db or send to somewhere using request libary or just show in console
}
fileReader.on('error', function(error) {
console.error(error);
process.exit(-1);
});
fileReader.on('end', function() {
doSomethingAfterParsingAllLines(lines);
});
function doSomethingAfterParsingAllLines(records) {
// do something with data
}
'use strict';
const fs = require('fs');
let rawdata = fs.readFileSync('student.json');
let student = JSON.parse(rawdata);
console.log(student);
Related
I'm new to Node. I have a txt file with three different URLs, one on each line. I want to read each line, axios.get that URL, and then write the content of each response to a different txt file and name that file the name of the website.
For example, urls.txt currently has:
https://google.com
https://facebook.com
https://twitter.com
I want to be able to run node app.js urls.txt and have three files created for me with the names google.txt, facebook.txt, and twitter.txt, and inside each file is the response.data for each call.
Right now I have it working, but only if there is a single URL in the txt file. It also writes the response to a file I already created temp.txt.
app.js
const fs = require('fs');
const process = require('process');
const axios = require('axios');
async function webCat(url) {
let resp = await axios.get(url);
fs.writeFile('temp.txt', resp.data, 'utf8', function (err) {
if (err) {
console.log(`An error occured. , ${err}`);
process.exit(1);
}
});
}
fs.readFile('urls.txt', 'utf8', function (err, url) {
if (err) {
console.error(err);
process.exit(1);
}
webCat(url);
});
How can I change this so that I can handle different URLs?
You only need to split URLs in the file.
const fs = require('fs');
const process = require('process');
const axios = require('axios');
async function webCat(url) {
let resp = await axios.get(url);
const domain = url.split(/[/.]/)[2];
fs.writeFile(`${domain}.txt`, resp.data, 'utf8', function (err) {
if (err) {
console.log(`An error occured. , ${err}`);
process.exit(1);
}
});
}
fs.readFile('urls.txt', 'utf8', function (err, data) {
if (err) {
console.error(err);
process.exit(1);
}
const urls = data.split('\n');
urls.forEach(url => webCat(url));
});
How to read the data of a file(say, read.txt) without getting the buffered Data?
I mean I want to get the content of the file directly without getting the buffer data?
and hoe to delete the folder in Node.js?
// Try this //
fs = require('fs')
fs.readFile('/read.txt', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
console.log(data);
});
try {
fs.rmdirSync(dir, { recursive: true });
console.log(`${dir} is deleted!`);
}catch (err) {
console.error(`Error while deleting ${dir}.`);
}
I have a Node application, within which I am using Graphics Magick to do some image/pdf manipulation.
I have the following code which calls mosaic() to combine a pdf and png. If I export the result as a png then the process is successful. However if I try to export the result as a pdf then the resulting pdf file does have a size, but opening it shows that there is nothing to see, it appears to be blank. No errors are thrown.
var newFileName = "result.pdf";
gm()
.in('-page', '+0+0')
.in('C:\\Code\\ProjectName\\src\\api\\test\\TestTemplatePDF.pdf')
.in('-page', '+103+70')
.in('C:\\Code\\ProjectName\\src\\api\\test\\pic1.png')
.mosaic()
.stream('pdf', (err, stdout, stderr) => {
if (err) console.log('stream error', err);
console.log('stream');
var writeStream = fs.createWriteStream('./etc/' + newFileName);
stdout.pipe(writeStream);
stderr.on('end', () => {
fs.readFile('./etc/streamError.txt', (err, data) => {
console.log('reading errorStream');
// if (err) console.error(err);
if (err) {
console.log('We found an error reading streamError.txt', err);
res.send(err);
} else if (data.length !== 0) {
console.log('streamError.txt should contain a detailed error message', data);
res.send(data);
} else {
console.log('streamError.txt contains no errors');
}
});
});
stdout.on('end', () => {
fs.readFile('./etc/' + newFileName, (err, data) => {
if (err) {
console.log("stdout error: " + err);
res.end();
} else {
console.log('Successfully read our new image file');
}
})
})
})
Output/console shows:
stream
reading errorStream
streamError.txt contains no errors
successfully read our new file
In the end this problem went away when I converted the pdf to a png before editing.
Presumably the conclusion is that when using mosaic() that they need to be the same type.
The following will read and import many CSV files from disk into MongoDB but NodeJS won't exit after importing all the files if it doesn't go through the resizePhoto() function (Which contains a process.exit after resizing the images).
How can I have it to close properly after importing all files without interrupting? If I add a process.exit .on end it will exit after importing the first file.
var importData = function(fileName) {
// Get file from disk.
var filePath = path.join(folder, fileName);
// Read and import the CSV file.
csv.fromPath(filePath, {
objectMode: true,
headers: keys
})
.on('data', function (data) {
var Obj = new models[fileName](data);
models[fileName].find({}).remove().exec();
Obj.save(function (err, importedObj) {
if (err) {
console.log(fileName, err);
} else if (fileName === 'PHOTOS') {
resizePhoto(importedObj);
}
});
})
.on('end', function() {
console.log(fileName + ': Imported.');
});
};
module.exports = importData;
Use the module async, method parallel (https://github.com/caolan/async#parallel). It can call your tasks (import) in parallel and call the final handler (exit) after all tasks end.
In your case:
1) Somewhere in project
csvImp=require('importData.js');
async.parallel([
function(done){csvImp(name1,done);},
function(done){csvImp(name2,done);},
...
function(done){csvImp(nameN,done);}
],
function(err, results){
process.exit();
});
2) in importData.js
var importData = function(fileName,done) {
...
.on('end', function() {
console.log(fileName + ': Imported.');
done();
});
So, next you need to prepare list of tasks. Something like
names.forEach(function(n){
tasks.push(function(done){csvImp(n,done);});
});
And call async.parallel with tasks.
Good luck)
How can I append data to a file using node.js
I already have a file named myfile.json with data. I want to check if the file name exists and then append some data to that file.
I'm using following code
var writeTempFile = function (reportPath, data, callback) {
fs.writeFile(reportPath, data, function (err) {
//if (err) //say(err);
callback(err);
});
}
writeTempFile(reportDir + '_' + query.jobid + ".json", data, function (err) {
context.sendResponse(data, 200, {
'Content-Type': 'text/html'
});
You can use jsonfile
var jf = require('jsonfile');
var yourdata;
var file = '/tmp/data.json';
jf.readFile(file, function(err, obj) {
if(!err) {
var finalData = merge(obj, yourdata);
jf.writeFile(file, finalData, function(err) {
console.log(err);
});
}
});
You need to implement your merging logic in merge(object1, object2)
https://npmjs.org/package/jsonfile
Check out the following code.
function addToFile(reportPath, data, callback){
fs.appendFile(reportPath, data, function (err) {
callback(err);
});
}
Node offers fs module to work with file system.
To use this module do
var fs = require('fs')
To append some data to file you can do:
fs.appendFile('message.txt', 'data to append', function (err) {
if (err) throw err;
console.log('The "data to append" was appended to file!');
});
Node offers you both synchronous and asynchronous method to append data to file, For more information please refer to this documentation