how to save node js consol log to .json file - javascript

I want to save the output of node.js & console.log as a JSON file.
The first part is working fine but unable to save it on the file. I also tried to save through:
$node filename.js > test.json
It's working but it can't format files properly
var gplay = require('google-play-scraper');
const fs = require('fs')
function def() {
gplay.app({appId: 'com.sablostudio.printing3d.town.builder', country: 'us'}).then(console.log) ;
}
def();
fs.writeFile('./newconstomer.json', JSON.stringify(--------???, null, 2), err => {
if (err) {
console.log(err);
} else {
console.log("file done");
}

You can do this with writeFile in this way:
First, you need a logger function that takes the log and saves it to the newconstomer.json file:
function logger(log){
fs.writeFile('./newconstomer.json', JSON.stringify(log), function (err) {
if (err) {
console.log(err);
}
console.log("file done");
});
}
Now, replace the console.log method with the logger function:
function def() {
gplay.app({appId: 'com.sablostudio.printing3d.town.builder', country: 'us'}).then( logger ) ; // <--- place the logger function here
}
def();
Explanation:
The logger function takes a log parameter and parsed it by JSON.stringify() method.

Related

fs.writeFile refreshing index.html whenever called

I have an issue where I am writing data from an array to a JSON file every 10 secs on an express server, and this is causing the main page to reload whenever the writeFile function is called.
The main page makes a GET request to retrieve Entry objects in the array, however I don't understand why it is reloading when the array isn't being changed in anyway, it is just being used to write to the JSON file.
In index.js (server code):
const server = require('./app');
const { readFromFile, writeToFile } = require('./helpers/readWrite');
const port = process.env.PORT || 3000;
readFromFile();
// start the server
server.listen(port, () => {
console.log(`Listening at http://localhost:${port}`);
// set the server to save to file every 10 seconds
setInterval(writeToFile, 10000);
});
In readWrite.js:
const Entry = require('../models/entry'); // file containing array that the data is written from.
function writeToFile() {
const entriesDataStringified = JSON.stringify(Entry.all); // stringify the entriesData array
// write to the json file, overwriting any data already in the file
fs.writeFile('./data/entries.json', entriesDataStringified, (err) => {
// check for error when writing file
if (err) {
console.log(err);
} else {
console.log('File successfully written');
}
});
}
Retrieving entries on client side:
async function getPosts(e) {
try{
response = await fetch(`http://localhost:3000/search/page/${pageNum}`);
data = await response.json();
console.log(data)
data.entries.forEach(post => {
if(!postArray.includes(post)){
newestArray.push(post);
postArray.push(post);
emojiArray.push({id: post.id, emojis: {loveCount: false, laughCount: false, likeCount: false}})
};
});
console.log(emojiArray);
Post.drawAll();
pageNum++
} catch(err) {
console.log(err)
}
}
Thanks.

How to write console.log to a file instead

Now I show the information using:
console.log (kraken.id, markets)
However, I want to write all the information that goes to the console to a file instead. How can that be done by completing the below code?
'use strict';
var ccxt = require('ccxt');
(async () => {
let kraken = new ccxt.kraken()
let markets = await kraken.load_markets()
//console.log (kraken.id, markets)
//How to write above console.log to file?
const fs = require('fs');
fs.writeFile("/Users/Andreas/Desktop/NODE/myproject/files/test.txt", "allinfoAsstring", function (err) {
if (err) {
return console.log(err);
}
console.log("The file was saved!");
});
})()
You can try to create an Object out of your variables and format them as a JSON string.
/* ... */
const obj = {kraken, markets}
const fs = require('fs');
fs.writeFile("/Users/Andreas/Desktop/NODE/myproject/files/test.txt", JSON.stringify(obj), function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
});
Later, you can retrieve the values from the file, by running
fs.readFile('/Users/Andreas/Desktop/NODE/myproject/files/test.txt', 'utf8', function(err, data) {
const obj = JSON.parse(data)
console.log("The data from the file is: " + obj)
})
thx for the diverse solutions. the simplest way for me was
node app.js > app.log 2>&1
This would redirect stdout to a file named app.log and redirect stderr to stdout.
So all my console.log are going to app.log
You can use JSON.stringify(obj), every object can be convert into string via this method.
I recommend not using console.log in production as it sync code.
you can use winston instead
and you got in an easy way all the logs to file (if you wanted) by using new transports
const logger = winston.createLogger({
level: 'info',
format: winston.format.json(),
defaultMeta: { service: 'user-service' },
transports: [
//
// - Write to all logs with level `info` and below to `combined.log`
// - Write all logs error (and below) to `error.log`.
//
new winston.transports.File({ filename: 'error.log', level: 'error' }),
new winston.transports.File({ filename: 'combined.log' })
]
});
An addition to loicnestler's answer that means you don't need to change your 'console.log' statements:
Update the reference to 'console.log', so it calls 'writeFile' when logging:
const log = console.log;
console.log = (data) => {
log(data);
<loicnestler's writeFile logic>
}
In Node, console.log() calls util.inspect() to print objects.
You should call that directly and write it to a file.

Nodejs fs module: How to readFile and create json object?

I am reading below data from file now i want to create json object from it,how can i create that using nodejs fs module ?
app.js
var path = "./ditLogs/" + file;
fs.readFile(path, function(err, data) {
console.log('reading file data', data.toString());
});
Here is data in file that i need to create json for
file.txt
{"event":"test data"}
{"event":"test data"}
{"event":"test data"}
You can use this sample function:
function(strFromFile) {
try {
return JSON.parse(strFromFile);
} catch(e) {
return {};
}
};
FS Module
While developing in NodeJS the need to access a computer's file system is sometimes necessary. FS Module is a utility that assists with this process.
FS includes some functions that allow for different activities to be done on the file system via a wrapper around the API.
This should be included/required/imported into any JS that needs to interact with the file system API.
var fs = require("fs");
These are the different methods you can use on this API which are all asynchronous:
fs.readFile {fs.readFile('input.txt', function(err, data)
// Asynchronous read
fs.readFile('input.txt', function (err, data) {
if (err) {
return console.error(err);
}
console.log("Asynchronous read: " + data.toString());
});
fs.writeFile
fs.writeFile('input.txt', 'Simply Easy Learning!', function(err) {
if (err) {
return console.error(err);
}
console.log("Data written successfully!");
console.log("Let's read newly written data");
fs.readFile('input.txt', function (err, data) {
if (err) {
return console.error(err);
}
console.log("Asynchronous read: " + data.toString());
});
});
open
// Asynchronous - Opening File
console.log("Going to open file!");
fs.open('input.txt', 'r+', function(err, fd) {
if (err) {
return console.error(err);
}
console.log("File opened successfully!");
});
fs.stat (provides information about the file) e.g.
fs.stat('input.txt', function (err, stats) {
if (err) {
return console.error(err);
}
console.log(stats);
console.log("Got file info successfully!");
// Check file type
console.log("isFile ? " + stats.isFile());
console.log("isDirectory ? " + stats.isDirectory());
});
fs.read (similar to readFile and should not be the first choice for reading a file)
fs.close
// Close the opened file.
fs.close(fd, function(err){
if (err){
console.log(err);
}
console.log("File closed successfully.");
});
ftruncate (truncate an opened file)
unlink (delete an opened file)
fs.unlink('input.txt', function(err) {
if (err) {
return console.error(err);
}
console.log("File deleted successfully!");
});
fs.mkdir (make new directory)
fs.mkdir('/tmp/test',function(err){
if (err) {
return console.error(err);
}
console.log("Directory created successfully!");
});
fs.readdir (reads a directory)
fs.readdir("/tmp/",function(err, files){
if (err) {
return console.error(err);
}
files.forEach( function (file){
console.log( file );
});
});
fs.rmdir (remove directory)
fs.rmdir("/tmp/test",function(err){
if (err) {
return console.error(err);
}
console.log("Going to read directory /tmp");
fs.readdir("/tmp/",function(err, files){
if (err) {
return console.error(err);
}
files.forEach( function (file){
console.log( file );
});
});
});
Synchronous functions:
readFileSync
// Synchronous read
var data = fs.readFileSync('input.txt');
console.log("Synchronous read: " + data.toString());
writeFileSync
// Synchronous write
var data = fs.writeFileSync('input.txt', 'asdasdasd');
Simply read using line-by-line package, less headache, more control on reading process (it can pause, resume reading, close file descriptor ondemand, skip N lines) with less code.
1) install:
npm i --save line-by-line
npm i --save lodash
2) implement
var lineByLine = require('line-by-line'),
_ = require('lodash'),
path = require('path');
var lines = [];
var filePath = path.join(__dirname, "ditLogs", file);
var fileReader = new lineByLine(filePath);
fileReader.on('line', function(line) {
line = JSON.parse(line);
if(_.isPlainObject(line) && !_.isEmpty(line)) {
lines.push(line);
}
// optional
doSomethingWithLine(line);
});
function doSomethingWithLine(line) {
// for example You can save to db or send to somewhere using request libary or just show in console
}
fileReader.on('error', function(error) {
console.error(error);
process.exit(-1);
});
fileReader.on('end', function() {
doSomethingAfterParsingAllLines(lines);
});
function doSomethingAfterParsingAllLines(records) {
// do something with data
}
'use strict';
const fs = require('fs');
let rawdata = fs.readFileSync('student.json');
let student = JSON.parse(rawdata);
console.log(student);

How to exit properly from node.js

The following will read and import many CSV files from disk into MongoDB but NodeJS won't exit after importing all the files if it doesn't go through the resizePhoto() function (Which contains a process.exit after resizing the images).
How can I have it to close properly after importing all files without interrupting? If I add a process.exit .on end it will exit after importing the first file.
var importData = function(fileName) {
// Get file from disk.
var filePath = path.join(folder, fileName);
// Read and import the CSV file.
csv.fromPath(filePath, {
objectMode: true,
headers: keys
})
.on('data', function (data) {
var Obj = new models[fileName](data);
models[fileName].find({}).remove().exec();
Obj.save(function (err, importedObj) {
if (err) {
console.log(fileName, err);
} else if (fileName === 'PHOTOS') {
resizePhoto(importedObj);
}
});
})
.on('end', function() {
console.log(fileName + ': Imported.');
});
};
module.exports = importData;
Use the module async, method parallel (https://github.com/caolan/async#parallel). It can call your tasks (import) in parallel and call the final handler (exit) after all tasks end.
In your case:
1) Somewhere in project
csvImp=require('importData.js');
async.parallel([
function(done){csvImp(name1,done);},
function(done){csvImp(name2,done);},
...
function(done){csvImp(nameN,done);}
],
function(err, results){
process.exit();
});
2) in importData.js
var importData = function(fileName,done) {
...
.on('end', function() {
console.log(fileName + ': Imported.');
done();
});
So, next you need to prepare list of tasks. Something like
names.forEach(function(n){
tasks.push(function(done){csvImp(n,done);});
});
And call async.parallel with tasks.
Good luck)

node.js code to append data to a file

How can I append data to a file using node.js
I already have a file named myfile.json with data. I want to check if the file name exists and then append some data to that file.
I'm using following code
var writeTempFile = function (reportPath, data, callback) {
fs.writeFile(reportPath, data, function (err) {
//if (err) //say(err);
callback(err);
});
}
writeTempFile(reportDir + '_' + query.jobid + ".json", data, function (err) {
context.sendResponse(data, 200, {
'Content-Type': 'text/html'
});
You can use jsonfile
var jf = require('jsonfile');
var yourdata;
var file = '/tmp/data.json';
jf.readFile(file, function(err, obj) {
if(!err) {
var finalData = merge(obj, yourdata);
jf.writeFile(file, finalData, function(err) {
console.log(err);
});
}
});
You need to implement your merging logic in merge(object1, object2)
https://npmjs.org/package/jsonfile
Check out the following code.
function addToFile(reportPath, data, callback){
fs.appendFile(reportPath, data, function (err) {
callback(err);
});
}
Node offers fs module to work with file system.
To use this module do
var fs = require('fs')
To append some data to file you can do:
fs.appendFile('message.txt', 'data to append', function (err) {
if (err) throw err;
console.log('The "data to append" was appended to file!');
});
Node offers you both synchronous and asynchronous method to append data to file, For more information please refer to this documentation

Categories