I am trying to write JSON file using node from loop data, e.g.:
let jsonFile = require('jsonfile');
for (i = 0; i < 11; i++) {
jsonFile.writeFile('loop.json', "id :" + i + " square :" + i * i);
}
outPut in loop.json is:
id :1 square : 1
but I want output file like this (below) and also if I run that code again it should add that new output as elements in same existing JSON file:
{
"table":[
{
"Id ":1,
"square ":1
},
{
"Id ":2,
"square ":3
},
{
"Id ":3,
"square ":9
},
{
"Id ":4,
"square ":16
},
{
"Id ":5,
"square ":25
},
{
"Id ":6,
"square ":36
},
{
"Id ":7,
"square ":49
},
{
"Id ":8,
"square ":64
},
{
"Id ":9,
"square ":81
},
{
"Id ":10,
"square ":100
}
]
}
I want to use same file that I created 1st time but whenever I run that code new elements should add in that same file
const fs = require('fs');
let obj = {
table: []
};
fs.exists('myjsonfile.json', function(exists) {
if (exists) {
console.log("yes file exists");
fs.readFile('myjsonfile.json', function readFileCallback(err, data) {
if (err) {
console.log(err);
} else {
obj = JSON.parse(data);
for (i = 0; i < 5; i++) {
obj.table.push({
id: i,
square: i * i
});
}
let json = JSON.stringify(obj);
fs.writeFile('myjsonfile.json', json);
}
});
} else {
console.log("file not exists");
for (i = 0; i < 5; i++) {
obj.table.push({
id: i,
square: i * i
});
}
let json = JSON.stringify(obj);
fs.writeFile('myjsonfile.json', json);
}
});
If this JSON file won't become too big over time, you should try:
Create a JavaScript object with the table array in it
var obj = {
table: []
};
Add some data to it, for example:
obj.table.push({id: 1, square:2});
Convert it from an object to a string with JSON.stringify
var json = JSON.stringify(obj);
Use fs to write the file to disk
var fs = require('fs');
fs.writeFile('myjsonfile.json', json, 'utf8', callback);
If you want to append it, read the JSON file and convert it back to an object
fs.readFile('myjsonfile.json', 'utf8', function readFileCallback(err, data){
if (err){
console.log(err);
} else {
obj = JSON.parse(data); //now it an object
obj.table.push({id: 2, square:3}); //add some data
json = JSON.stringify(obj); //convert it back to json
fs.writeFile('myjsonfile.json', json, 'utf8', callback); // write it back
}});
This will work for data that is up to 100 MB effectively. Over this limit, you should use a database engine.
UPDATE:
Create a function which returns the current date (year+month+day) as a string. Create the file named this string + .json. the fs module has a function which can check for file existence named fs.stat(path, callback).
With this, you can check if the file exists. If it exists, use the read function if it's not, use the create function. Use the date string as the path cuz the file will be named as the today date + .json. the callback will contain a stats object which will be null if the file does not exist.
Please try the following program. You might be expecting this output.
var fs = require('fs');
var data = {}
data.table = []
for (i=0; i <26 ; i++){
var obj = {
id: i,
square: i * i
}
data.table.push(obj)
}
fs.writeFile ("input.json", JSON.stringify(data), function(err) {
if (err) throw err;
console.log('complete');
}
);
Save this program in a javascript file, say, square.js.
Then run the program from command prompt using the command node square.js
What it does is, simply overwriting the existing file with new set of data, every time you execute the command.
Happy Coding.
try
var fs = require("fs");
var sampleObject = { your data };
fs.writeFile("./object.json", JSON.stringify(sampleObject, null, 4), (err) => {
if (err) { console.error(err); return; };
console.log("File has been created");
});
For synchronous approach
const fs = require('fs')
fs.writeFileSync('file.json', JSON.stringify(jsonVariable));
you should read the file, every time you want to add a new property to the json, and then add the the new properties
var fs = require('fs');
fs.readFile('data.json',function(err,content){
if(err) throw err;
var parseJson = JSON.parse(content);
for (i=0; i <11 ; i++){
parseJson.table.push({id:i, square:i*i})
}
fs.writeFile('data.json',JSON.stringify(parseJson),function(err){
if(err) throw err;
})
})
Above example is also correct, but i provide simple example:
var fs = require("fs");
var sampleObject = {
name: 'pankaj',
member: 'stack',
type: {
x: 11,
y: 22
}
};
fs.writeFile("./object.json", JSON.stringify(sampleObject, null, 4), (err) => {
if (err) {
console.error(err);
return;
};
console.log("File has been created");
});
For formatting jsonfile gives spaces option which you can pass as a parameter:
jsonfile.writeFile(file, obj, {spaces: 2}, function (err) {
console.error(err);
})
Or use jsonfile.spaces = 4. Read details here.
I would not suggest writing to file each time in the loop, instead construct the JSON object in the loop and write to file outside the loop.
var jsonfile = require('jsonfile');
var obj={
'table':[]
};
for (i=0; i <11 ; i++){
obj.table.push({"id":i,"square":i*i});
}
jsonfile.writeFile('loop.json', obj, {spaces:2}, function(err){
console.log(err);
});
I agree with above answers, Here is a complete read and write sample for anyone who needs it.
router.post('/', function(req, res, next) {
console.log(req.body);
var id = Math.floor((Math.random()*100)+1);
var tital = req.body.title;
var description = req.body.description;
var mynotes = {"Id": id, "Title":tital, "Description": description};
fs.readFile('db.json','utf8', function(err,data){
var obj = JSON.parse(data);
obj.push(mynotes);
var strNotes = JSON.stringify(obj);
fs.writeFile('db.json',strNotes, function(err){
if(err) return console.log(err);
console.log('Note added');
});
})
});
Promise based solution [Javascript (ES6) + Node.js (V10 or above)]
Write to the File:
const fsPromises = require('fs').promises;
fsPromises.writeFile('myFile.json', JSON.stringify({ name: "Sridhar", salary: 1234 }))
.then( () => { console.log('JSON saved'); })
.catch(er => { console.log(er);});
Append to the File:
const fsPromises = require('fs').promises;
fsPromises.readFile('myFile.json', 'utf8')
.then(data => {
let json = JSON.parse(data);
json.myArr.push({name: "Krishnan", salary: 5678});
fsPromises.writeFile('myFile.json', JSON.stringify(json))
.then( () => { console.log('Append Success'); })
.catch(err => { console.log("Append Failed: " + err);});
})
.catch(err => { console.log("Read Error: " +err);});
If your project supports Javascript ES8 then you could use asyn/await instead of native promise.
Related
I need to write a NodeJS script for the following task:
I have a temp.json file with content like:
{
"name": "foo",
"id": "1.2.15"
}
When we run the script, I want the temp.json files content changed. Specifically, I want the number after the 2nd decimal in id to be incremented as follows:
{
"name": "foo",
"id": "1.2.16"
}
I don't know JavaScript and would appreciate any help.
Thanks!
"use strict";
const fs = require('fs');
const data = JSON.parse(fs.readFileSync("file.json"));
const nums = data.id.split('.');
++nums[2];
data.id = nums.join('.');
fs.writeFileSync("file.json", JSON.stringify(data, null, 4));
And if you want to do it without breaking the async nature of Node, you can do it with the async functions as well:
const fs = require('fs');
fs.readFile('temp.json', 'utf8', (e, data) => {
const obj = JSON.parse(data);
const idParts = obj.id.split('.').map((el) => parseInt(el, 10))
idParts[2] = idParts[2] + 1;
obj.id = idParts.join('.');
fs.writeFile('out.json', JSON.stringify(obj), (err) => {
console.log(err || 'complete');
});
});
I’m a bit confused with how to proceed. I am using Archive ( node js module) as a means to write data to a zip file. Currently, I have my code working when I write to a file (local storage).
var fs = require('fs');
var archiver = require('archiver');
var output = fs.createWriteStream(__dirname + '/example.zip');
var archive = archiver('zip', {
zlib: { level: 9 }
});
archive.pipe(output);
archive.append(mybuffer, {name: ‘msg001.txt’});
I’d like to modify the code so that the archive target file is an AWS S3 bucket. Looking at the code examples, I can specify the bucket name and key (and body) when I create the bucket object as in:
var s3 = new AWS.S3();
var params = {Bucket: 'myBucket', Key: 'myMsgArchive.zip' Body: myStream};
s3.upload( params, function(err,data){
…
});
Or
s3 = new AWS.S3({ parms: {Bucket: ‘myBucket’ Key: ‘myMsgArchive.zip’}});
s3.upload( {Body: myStream})
.send(function(err,data) {
…
});
With regards to my S3 example(s), myStream appears to be a readable stream and I am confused as how to make this work as archive.pipe requires a writeable stream. Is this something where we need to use a pass-through stream? I’ve found an example where someone created a pass-through stream but the example is too terse to gain proper understanding. The specific example I am referring to is:
Pipe a stream to s3.upload()
Any help someone can give me would greatly be appreciated. Thanks.
This could be useful for anyone else wondering how to use pipe.
Since you correctly referenced the example using the pass-through stream, here's my working code:
1 - The routine itself, zipping files with node-archiver
exports.downloadFromS3AndZipToS3 = () => {
// These are my input files I'm willing to read from S3 to ZIP them
const files = [
`${s3Folder}/myFile.pdf`,
`${s3Folder}/anotherFile.xml`
]
// Just in case you like to rename them as they have a different name in the final ZIP
const fileNames = [
'finalPDFName.pdf',
'finalXMLName.xml'
]
// Use promises to get them all
const promises = []
files.map((file) => {
promises.push(s3client.getObject({
Bucket: yourBubucket,
Key: file
}).promise())
})
// Define the ZIP target archive
let archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
})
// Pipe!
archive.pipe(uploadFromStream(s3client, 'someDestinationFolderPathOnS3', 'zipFileName.zip'))
archive.on('warning', function(err) {
if (err.code === 'ENOENT') {
// log warning
} else {
// throw error
throw err;
}
})
// Good practice to catch this error explicitly
archive.on('error', function(err) {
throw err;
})
// The actual archive is populated here
return Promise
.all(promises)
.then((data) => {
data.map((thisFile, index) => {
archive.append(thisFile.Body, { name: fileNames[index] })
})
archive.finalize()
})
}
2 - The helper method
const uploadFromStream = (s3client) => {
const pass = new stream.PassThrough()
const s3params = {
Bucket: yourBucket,
Key: `${someFolder}/${aFilename}`,
Body: pass,
ContentType: 'application/zip'
}
s3client.upload(s3params, (err, data) => {
if (err)
console.log(err)
if (data)
console.log('Success')
})
return pass
}
The following example takes the accepted answer and makes it work with local files as requested.
const archiver = require("archiver")
const fs = require("fs")
const AWS = require("aws-sdk")
const s3 = new AWS.S3()
const stream = require("stream")
const zipAndUpload = async () => {
const files = [`test1.txt`, `test2.txt`]
const fileNames = [`test1target.txt`, `test2target.txt`]
const archive = archiver("zip", {
zlib: { level: 9 } // Sets the compression level.
})
files.map((thisFile, index) => {
archive.append(fs.createReadStream(thisFile), { name: fileNames[index] })
})
const uploadStream = new stream.PassThrough()
archive.pipe(uploadStream)
archive.finalize()
archive.on("warning", function (err) {
if (err.code === "ENOENT") {
console.log(err)
} else {
throw err
}
})
archive.on("error", function (err) {
throw err
})
archive.on("end", function () {
console.log("archive end")
})
await uploadFromStream(uploadStream)
console.log("all done")
}
const uploadFromStream = async pass => {
const s3params = {
Bucket: "bucket-name",
Key: `streamtest.zip`,
Body: pass,
ContentType: "application/zip"
}
return s3.upload(s3params).promise()
}
zipAndUpload()
I am trying to find a way to get the currently logged in user and than append them to a JSON file. Below is my code to first read the dir, then get the most recent file, return it and then append the current user that is logged in.
I can append a string to the file but when trying to perform req.user it states
Cannot read property 'user' of undefined
What would I need to include in this file so that it knows what user is?
let fs = require("fs"),
express = require("express"),
_ = require("underscore"),
User = require("./models/user"),
path = require("path");
let getFileAddUser = () => {
let filePath = '../automation_projects/wss-automation-u/results/temp/';
fs.readdir(filePath, (err, files) => {
if (err) { throw err; }
let file = getMostRecentFile(files, filePath);
console.log(file);
fs.readFile(filePath + file, 'utf8', (err, data) => {
let json = JSON.parse(data);
if(err){
console.error(err);
return;
} else {
//Un-comment to write to most recent file.
//==================================================
//This should find the currently logged in user and append them to the most recent file found.
json.currentuser = req.user;
fs.writeFile(filePath + file, JSON.stringify(json), (error) => {
if(error){
console.error(error);
return;
} else {
console.log(json);
}
});
//==================================================
console.log(data);
}
});
});
};
//Get the most recent file from the results folder.
function getMostRecentFile(files, path) {
let out = [];
files.forEach(function(file) {
let stats = fs.statSync(path + "/" +file);
if(stats.isFile()) {
out.push({"file":file, "mtime": stats.mtime.getTime()});
}
});
out.sort(function(a,b) {
return b.mtime - a.mtime;
})
return (out.length>0) ? out[0].file : "";
}
module.exports = getFileAddUser;
Thanks to a knowledgeable co-worker and some further research we were able to get this working. I'd like to share the code we came up with to append the currently logged in user to our results file. You will also notice we got some help using the Ramada.js library.
let fs = require("fs"),
express = require("express"),
_ = require("underscore"),
User = require("./models/user"),
r = require("ramda"),
path = require("path");
//This will be our function to get the most recent file from our dir and
//return it to us. We than user this function below.
function getMostRecentFile(files, path) {
let out = [];
let f = r.tail(files);
console.log(files);
f.forEach(function(file) {
let stats = fs.statSync(path + "/" +file);
if(stats.isFile()) {
out.push({"file":file, "mtime": stats.mtime.getTime()});
}
});
out.sort(function(a,b) {
return b.mtime - a.mtime;
})
return (out.length>0) ? out[0].file : "";
}
//Passing in 'u' as a argument which can than be used in a route and pass in
//anything that we want it to be. In our case it was the currently logged
//in user.
let getUser = (u) => {
let user = u;
let filePath = '../automation_projects/wss-automation-u/results/temp/';
//Comment above and uncomment below for testing locally.
// let filePath = "./temp/";
let file = "";
//Below we read our dir then get the most recent file using the
//getMostRecentfile function above.
read_directory(filePath).then( files => {
file = getMostRecentFile(files, filePath)
console.log(file);
return(read_file(filePath + file))
}).then( x => {
// Here we parse through our data with x representing the data that we
//returned above.
let json = JSON.parse(x);
return new Promise(function(resolve, reject) {
json.currentuser = u;
//And finally we write to the end of the latest file.
fs.writeFile(filePath + file, JSON.stringify(json), (error) => {
if(error) reject(error);
else resolve(json);
// console.log(json);
});
});
});
}
let read_directory = (path) => {
return new Promise((resolve, reject) => {
fs.readdir(path, (err, items) => {
if (err){
return reject(err)
}
return resolve([path, ...items])
})
})
}
let read_file = (path) => {
return new Promise((resolve, reject) => {
fs.readFile(path, "utf8", (err, items) => {
if (err){
return reject(err)
}
return resolve(items)
})
})
}
module.exports = getUser;
Than below is an example route with how to use the getUser module. You will want to require it like you do everything else with node.js and dependencies. Hope this helps someone in the future.
let getUser = require("getuser");
//Make a route to use the getUser module and pass in our argument value.
app.get("/", (req, res) => {
//With in the get user function pass in whatever you want to equal 'u' from the getuser module.
getUser(req.user.username);
res.render("index", { username: req.user });
});
I have the following strange situation. I've started writing map/reduce jobs for MongoDB. I now have code (see Map/Reduce/Finalize functions in code below) that is working, except for the "finalize" step. The same code, ran through the mongo shell is producing the expected results (file_count field is added to the result, based upon arraylength. Both JS and Shell command populate the files_per_disc collection equally (files array, per disc, contains the same results). I already tried setting jsMode to true/false.
Any help / Suggestions?
Thanks!!
Sander
var mongojs = require('mongojs');
var db = mongojs('mongodb://super.local/sha', ['files', 'files_per_disc']);
db.on('error', function(err) {
console.log('database error', err)
})
db.on('connect', function() {
console.log('database connected')
})
var files_per_disc = {};
files_per_disc.execute = function() {
console.log('execute');
var mapper = function() {
var key = this.disc; // Disc
var mapObject = {
files: [this.filename]
};
if (this.filesize > 5000000000) {
emit(key, mapObject);
}
};
var reducer = function(key, values) {
var reducedObject = {
files: []
};
values.forEach(function(value) {
value.files.forEach(function(item) {
if (reducedObject.files.indexOf(item) == -1) {
reducedObject.files.push(item);
}
});
});
return reducedObject;
};
var finalizer = function(key, reducedObject) {
if (reducedObject.files) {
reducedObject.file_count = reducedObject.files.length;
}
return reducedObject;
};
db.files.mapReduce(
mapper,
reducer, {
jsMode: false,
out: 'files_per_disc',
finalize: finalizer
},
function(err, result) {
if (err) {
return console.log(err)
};
return console.log(result);
}
);
};
module.exports = files_per_disc;
I am struggling to find a way to write data to a CSV in Node.js.
There are several CSV plugins available however they only 'write' to stdout.
Ideally I want to write on a row-by-row basis using a loop.
You can use fs (https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback):
var dataToWrite;
var fs = require('fs');
fs.writeFile('form-tracking/formList.csv', dataToWrite, 'utf8', function (err) {
if (err) {
console.log('Some error occured - file either not saved or corrupted file saved.');
} else{
console.log('It\'s saved!');
}
});
The docs for node-csv-parser (npm install csv) specifically state that it can be used with streams (see fromStream, toStream). So it's not hard-coded to use stdout.
Several other CSV parsers also come up when you npm search csv -- you might want to look at them too.
Here is a simple example using csv-stringify to write a dataset that fits in memory to a csv file using fs.writeFile.
import stringify from 'csv-stringify';
import fs from 'fs';
let data = [];
let columns = {
id: 'id',
name: 'Name'
};
for (var i = 0; i < 10; i++) {
data.push([i, 'Name ' + i]);
}
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err;
fs.writeFile('my.csv', output, (err) => {
if (err) throw err;
console.log('my.csv saved.');
});
});
If you want to use a loop as you say you can do something like this with Node fs:
let fs = require("fs")
let writeStream = fs.createWriteStream('/path/filename.csv')
someArrayOfObjects.forEach((someObject, index) => {
let newLine = []
newLine.push(someObject.stringPropertyOne)
newLine.push(someObject.stringPropertyTwo)
....
writeStream.write(newLine.join(',')+ '\n', () => {
// a line was written to stream
})
})
writeStream.end()
writeStream.on('finish', () => {
console.log('finish write stream, moving along')
}).on('error', (err) => {
console.log(err)
})
In case you don't wanna use any library besides fs, you can do it manually.
let fileString = ""
let separator = ","
let fileType = "csv"
let file = `fileExample.${fileType}`
Object.keys(jsonObject[0]).forEach(value=>fileString += `${value}${separator}`)
fileString = fileString.slice(0, -1)
fileString += "\n"
jsonObject.forEach(transaction=>{
Object.values(transaction).forEach(value=>fileString += `${value}${separator}`)
fileString = fileString.slice(0, -1)
fileString += "\n"
})
fs.writeFileSync(file, fileString, 'utf8')
For those who prefer fast-csv:
const { writeToPath } = require('#fast-csv/format');
const path = `${__dirname}/people.csv`;
const data = [{ name: 'Stevie', id: 10 }, { name: 'Ray', id: 20 }];
const options = { headers: true, quoteColumns: true };
writeToPath(path, data, options)
.on('error', err => console.error(err))
.on('finish', () => console.log('Done writing.'));
**In case you don't wanna use any library besides fs, you can do it manually. More over you can filter the data as you want to write to CSV file
**
router.get('/apiname', (req, res) => {
const data = arrayOfObject; // you will get from somewhere
/*
// Modify old data (New Key Names)
let modifiedData = data.map(({ oldKey1: newKey1, oldKey2: newKey2, ...rest }) => ({ newKey1, newKey2, ...rest }));
*/
const path = './test'
writeToFile(path, data, (result) => {
// get the result from callback and process
console.log(result) // success or error
});
});
writeToFile = (path, data, callback) => {
fs.writeFile(path, JSON.stringify(data, null, 2), (err) => { // JSON.stringify(data, null, 2) help you to write the data line by line
if (!err) {
callback('success');
// successfull
}
else {
callback('error');
// some error (catch this error)
}
});
}
this is the code that worked for me in nest js
import { Parser } from "json2csv";
const csv = require('csvtojson');
const csvFilePath = process.cwd() + '/' + file.path;
let csv data = await csv().fromFile(csvFilePath); /// read data from csv into an array of json
/// * from here how to write data into csv *
data.push({
label: value,
.......
})
}
const fields = [
'field1','field2', ...
]
const parser = new Parser({ fields, header:false }); /// if dont want header else remove header: false
const csv = parser.parse(data);
appendFileSync('./filename.csv',`${csv}\n`); // remove /n if you dont want new line at the end