NodeJS script to modify a JSON file - javascript

I need to write a NodeJS script for the following task:
I have a temp.json file with content like:
{
"name": "foo",
"id": "1.2.15"
}
When we run the script, I want the temp.json files content changed. Specifically, I want the number after the 2nd decimal in id to be incremented as follows:
{
"name": "foo",
"id": "1.2.16"
}
I don't know JavaScript and would appreciate any help.
Thanks!

"use strict";
const fs = require('fs');
const data = JSON.parse(fs.readFileSync("file.json"));
const nums = data.id.split('.');
++nums[2];
data.id = nums.join('.');
fs.writeFileSync("file.json", JSON.stringify(data, null, 4));

And if you want to do it without breaking the async nature of Node, you can do it with the async functions as well:
const fs = require('fs');
fs.readFile('temp.json', 'utf8', (e, data) => {
const obj = JSON.parse(data);
const idParts = obj.id.split('.').map((el) => parseInt(el, 10))
idParts[2] = idParts[2] + 1;
obj.id = idParts.join('.');
fs.writeFile('out.json', JSON.stringify(obj), (err) => {
console.log(err || 'complete');
});
});

Related

Read value at index in a huge json array

I need to get the value at a given index in a json array containing millions of entries.
The json file looks a bit like this:
[
{"street":"123 Here street", "city":"Here city"},
{"street":"456 There street", "city":"There city"}
]
I can get away with it using stream-json with the following code:
const { chain } = require('stream-chain');
const { parser } = require('stream-json');
const { streamArray } = require('stream-json/streamers/StreamArray');
const fs = require('fs');
const chosenIndex = 8
const pipeline = chain([
fs.createReadStream(filepath),
parser(),
streamArray()
]);
pipeline.on('data', data => {
if (data.key == chosenIndex) {
console.log(data.value);
}
});
pipeline.on('end', () => { console.log('All Done'); });
I'm just afraid this is not the fastest way to do this.
Thanx in advance

A JSON file created with NodeJS holds the wrong data

I wrote the following code to create a formatted JSON file that holds all names of files (testfile1, testfile2, testfile3) that exist in a directory called "uploads".
const fs = require("fs");
let toJson = {files: []};
let file = new Object();
function createJSON(){
fs.readdir("./uploads", (err, files) => {
//add each files name to obj
files.forEach((item, index) => {
file.name = item;
console.log(index, item);
toJson.files.push(file);
});
var saveJson = JSON.stringify(toJson, null, 4);
fs.writeFile('./files.json', saveJson, 'utf8', (err) => {
if(err) throw err;
console.log(err);
});
});
}
The output from console.log(item, index) is as expected:
0 testfile1
1 testfile2
2 testfile3
However, the JSON file that is being created holds the following names:
{
"files": [
{
"name": "testfile3"
},
{
"name": "testfile3"
},
{
"name": "testfile3"
}
]
}
instead of the intended
{
"files": [
{
"name": "testfile1"
},
{
"name": "testfile2"
},
{
"name": "testfile3"
}
]
}
Your problem lies in your memory referencing. In the following code, you are setting the name of the file and pushing the file to an array. However, you are not pushing the item to the array, you are pushing the reference of the item to the array. This means that on subsequent iterations, you are changing the attributes of the elements that are in the array as well.
files.forEach((item, index) => {
file.name = item;
console.log(index, item);
toJson.files.push(file);
});
See an example below of these memory issues.
const array = [];
const item = {};
item.name = 5;
array.push(item);
item.name = 4;
console.log(array);
const copy = Object.assign({}, item);
copy.name = 3;
array.push(copy);
item.name = 6;
console.log(array);
To fix this, you need to create a copy of the object at each step. There should be enough information in this post to assist you with this: What is the most efficient way to deep clone an object in JavaScript?
An example of fixed code would be the following:
files.forEach((item, index) => {
const f = Object.assign({}, file);
f.name = item;
console.log(index, item);
toJson.files.push(f);
});
You are not changing the file object inside your forEach loop. You are only changing its name property. That's why you end up with all the same file names
Just move the line file = new Object() or file = {} to the beginning of the forEach loop
You should be calling synchronously. You are also overwriting the same object. You can greatly simplify your code.
const fs = require("fs");
function createJSON(){
const files = fs.readdirSync("./uploads").map((file, index) => {
console.log(index, file)
return {
name: file
}
});
const output = JSON.stringify({
files
}, undefined, 4)
fs.writeFileSync('./files.json', output)
}

How to get the object key and value from another javascript files

i am using $ npm run index.js
in index.js file i am looping and get list of below files, from the file we need to read the "testData", could you please help to get the data
var listOfFiles = ['test/fileOne.js',
,test/example.js,]
each file having
/test/fileOne.js
var testData = {
tags: 'tag1 tag2 tag3',
setup: 'one_tier'
}
/test/example.js
var testData = {
tags: 'tag3',
setup: 'two_tier'
}
My Code: index.js
let fs = require("fs")
const glob = require("glob");
var getDirectories = function (src, callback) {
glob(src + '/**/*.js', callback);
};
getDirectories('tests', function (err, res) {
if (err) {
console.log('Error', err);
}
else {
var listOfFiles = res;
for (let val of listOfFiles){
///// HERE we have to get the Tags and setup from each js file////
}
}
You can read the content of the file as string using fs.readFileSync:
for (const val of listOfFiles) {
///// HERE we have to get the Tags and setup from each js file////
const content = fs.readFileSync(val, 'utf8');
console.log(content);
const tags = content.match(/tags: '(.*?)'/)[1];
console.log(tags);
const setup = content.match(/setup: '(.*?)'/)[1];
console.log(setup);
}

Write / add data in JSON file using Node.js

I am trying to write JSON file using node from loop data, e.g.:
let jsonFile = require('jsonfile');
for (i = 0; i < 11; i++) {
jsonFile.writeFile('loop.json', "id :" + i + " square :" + i * i);
}
outPut in loop.json is:
id :1 square : 1
but I want output file like this (below) and also if I run that code again it should add that new output as elements in same existing JSON file:
{
"table":[
{
"Id ":1,
"square ":1
},
{
"Id ":2,
"square ":3
},
{
"Id ":3,
"square ":9
},
{
"Id ":4,
"square ":16
},
{
"Id ":5,
"square ":25
},
{
"Id ":6,
"square ":36
},
{
"Id ":7,
"square ":49
},
{
"Id ":8,
"square ":64
},
{
"Id ":9,
"square ":81
},
{
"Id ":10,
"square ":100
}
]
}
I want to use same file that I created 1st time but whenever I run that code new elements should add in that same file
const fs = require('fs');
let obj = {
table: []
};
fs.exists('myjsonfile.json', function(exists) {
if (exists) {
console.log("yes file exists");
fs.readFile('myjsonfile.json', function readFileCallback(err, data) {
if (err) {
console.log(err);
} else {
obj = JSON.parse(data);
for (i = 0; i < 5; i++) {
obj.table.push({
id: i,
square: i * i
});
}
let json = JSON.stringify(obj);
fs.writeFile('myjsonfile.json', json);
}
});
} else {
console.log("file not exists");
for (i = 0; i < 5; i++) {
obj.table.push({
id: i,
square: i * i
});
}
let json = JSON.stringify(obj);
fs.writeFile('myjsonfile.json', json);
}
});
If this JSON file won't become too big over time, you should try:
Create a JavaScript object with the table array in it
var obj = {
table: []
};
Add some data to it, for example:
obj.table.push({id: 1, square:2});
Convert it from an object to a string with JSON.stringify
var json = JSON.stringify(obj);
Use fs to write the file to disk
var fs = require('fs');
fs.writeFile('myjsonfile.json', json, 'utf8', callback);
If you want to append it, read the JSON file and convert it back to an object
fs.readFile('myjsonfile.json', 'utf8', function readFileCallback(err, data){
if (err){
console.log(err);
} else {
obj = JSON.parse(data); //now it an object
obj.table.push({id: 2, square:3}); //add some data
json = JSON.stringify(obj); //convert it back to json
fs.writeFile('myjsonfile.json', json, 'utf8', callback); // write it back
}});
This will work for data that is up to 100 MB effectively. Over this limit, you should use a database engine.
UPDATE:
Create a function which returns the current date (year+month+day) as a string. Create the file named this string + .json. the fs module has a function which can check for file existence named fs.stat(path, callback).
With this, you can check if the file exists. If it exists, use the read function if it's not, use the create function. Use the date string as the path cuz the file will be named as the today date + .json. the callback will contain a stats object which will be null if the file does not exist.
Please try the following program. You might be expecting this output.
var fs = require('fs');
var data = {}
data.table = []
for (i=0; i <26 ; i++){
var obj = {
id: i,
square: i * i
}
data.table.push(obj)
}
fs.writeFile ("input.json", JSON.stringify(data), function(err) {
if (err) throw err;
console.log('complete');
}
);
Save this program in a javascript file, say, square.js.
Then run the program from command prompt using the command node square.js
What it does is, simply overwriting the existing file with new set of data, every time you execute the command.
Happy Coding.
try
var fs = require("fs");
var sampleObject = { your data };
fs.writeFile("./object.json", JSON.stringify(sampleObject, null, 4), (err) => {
if (err) { console.error(err); return; };
console.log("File has been created");
});
For synchronous approach
const fs = require('fs')
fs.writeFileSync('file.json', JSON.stringify(jsonVariable));
you should read the file, every time you want to add a new property to the json, and then add the the new properties
var fs = require('fs');
fs.readFile('data.json',function(err,content){
if(err) throw err;
var parseJson = JSON.parse(content);
for (i=0; i <11 ; i++){
parseJson.table.push({id:i, square:i*i})
}
fs.writeFile('data.json',JSON.stringify(parseJson),function(err){
if(err) throw err;
})
})
Above example is also correct, but i provide simple example:
var fs = require("fs");
var sampleObject = {
name: 'pankaj',
member: 'stack',
type: {
x: 11,
y: 22
}
};
fs.writeFile("./object.json", JSON.stringify(sampleObject, null, 4), (err) => {
if (err) {
console.error(err);
return;
};
console.log("File has been created");
});
For formatting jsonfile gives spaces option which you can pass as a parameter:
jsonfile.writeFile(file, obj, {spaces: 2}, function (err) {
console.error(err);
})
Or use jsonfile.spaces = 4. Read details here.
I would not suggest writing to file each time in the loop, instead construct the JSON object in the loop and write to file outside the loop.
var jsonfile = require('jsonfile');
var obj={
'table':[]
};
for (i=0; i <11 ; i++){
obj.table.push({"id":i,"square":i*i});
}
jsonfile.writeFile('loop.json', obj, {spaces:2}, function(err){
console.log(err);
});
I agree with above answers, Here is a complete read and write sample for anyone who needs it.
router.post('/', function(req, res, next) {
console.log(req.body);
var id = Math.floor((Math.random()*100)+1);
var tital = req.body.title;
var description = req.body.description;
var mynotes = {"Id": id, "Title":tital, "Description": description};
fs.readFile('db.json','utf8', function(err,data){
var obj = JSON.parse(data);
obj.push(mynotes);
var strNotes = JSON.stringify(obj);
fs.writeFile('db.json',strNotes, function(err){
if(err) return console.log(err);
console.log('Note added');
});
})
});
Promise based solution [Javascript (ES6) + Node.js (V10 or above)]
Write to the File:
const fsPromises = require('fs').promises;
fsPromises.writeFile('myFile.json', JSON.stringify({ name: "Sridhar", salary: 1234 }))
.then( () => { console.log('JSON saved'); })
.catch(er => { console.log(er);});
Append to the File:
const fsPromises = require('fs').promises;
fsPromises.readFile('myFile.json', 'utf8')
.then(data => {
let json = JSON.parse(data);
json.myArr.push({name: "Krishnan", salary: 5678});
fsPromises.writeFile('myFile.json', JSON.stringify(json))
.then( () => { console.log('Append Success'); })
.catch(err => { console.log("Append Failed: " + err);});
})
.catch(err => { console.log("Read Error: " +err);});
If your project supports Javascript ES8 then you could use asyn/await instead of native promise.

Write to a CSV in Node.js

I am struggling to find a way to write data to a CSV in Node.js.
There are several CSV plugins available however they only 'write' to stdout.
Ideally I want to write on a row-by-row basis using a loop.
You can use fs (https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback):
var dataToWrite;
var fs = require('fs');
fs.writeFile('form-tracking/formList.csv', dataToWrite, 'utf8', function (err) {
if (err) {
console.log('Some error occured - file either not saved or corrupted file saved.');
} else{
console.log('It\'s saved!');
}
});
The docs for node-csv-parser (npm install csv) specifically state that it can be used with streams (see fromStream, toStream). So it's not hard-coded to use stdout.
Several other CSV parsers also come up when you npm search csv -- you might want to look at them too.
Here is a simple example using csv-stringify to write a dataset that fits in memory to a csv file using fs.writeFile.
import stringify from 'csv-stringify';
import fs from 'fs';
let data = [];
let columns = {
id: 'id',
name: 'Name'
};
for (var i = 0; i < 10; i++) {
data.push([i, 'Name ' + i]);
}
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err;
fs.writeFile('my.csv', output, (err) => {
if (err) throw err;
console.log('my.csv saved.');
});
});
If you want to use a loop as you say you can do something like this with Node fs:
let fs = require("fs")
let writeStream = fs.createWriteStream('/path/filename.csv')
someArrayOfObjects.forEach((someObject, index) => {
let newLine = []
newLine.push(someObject.stringPropertyOne)
newLine.push(someObject.stringPropertyTwo)
....
writeStream.write(newLine.join(',')+ '\n', () => {
// a line was written to stream
})
})
writeStream.end()
writeStream.on('finish', () => {
console.log('finish write stream, moving along')
}).on('error', (err) => {
console.log(err)
})
In case you don't wanna use any library besides fs, you can do it manually.
let fileString = ""
let separator = ","
let fileType = "csv"
let file = `fileExample.${fileType}`
Object.keys(jsonObject[0]).forEach(value=>fileString += `${value}${separator}`)
fileString = fileString.slice(0, -1)
fileString += "\n"
jsonObject.forEach(transaction=>{
Object.values(transaction).forEach(value=>fileString += `${value}${separator}`)
fileString = fileString.slice(0, -1)
fileString += "\n"
})
fs.writeFileSync(file, fileString, 'utf8')
For those who prefer fast-csv:
const { writeToPath } = require('#fast-csv/format');
const path = `${__dirname}/people.csv`;
const data = [{ name: 'Stevie', id: 10 }, { name: 'Ray', id: 20 }];
const options = { headers: true, quoteColumns: true };
writeToPath(path, data, options)
.on('error', err => console.error(err))
.on('finish', () => console.log('Done writing.'));
**In case you don't wanna use any library besides fs, you can do it manually. More over you can filter the data as you want to write to CSV file
**
router.get('/apiname', (req, res) => {
const data = arrayOfObject; // you will get from somewhere
/*
// Modify old data (New Key Names)
let modifiedData = data.map(({ oldKey1: newKey1, oldKey2: newKey2, ...rest }) => ({ newKey1, newKey2, ...rest }));
*/
const path = './test'
writeToFile(path, data, (result) => {
// get the result from callback and process
console.log(result) // success or error
});
});
writeToFile = (path, data, callback) => {
fs.writeFile(path, JSON.stringify(data, null, 2), (err) => { // JSON.stringify(data, null, 2) help you to write the data line by line
if (!err) {
callback('success');
// successfull
}
else {
callback('error');
// some error (catch this error)
}
});
}
this is the code that worked for me in nest js
import { Parser } from "json2csv";
const csv = require('csvtojson');
const csvFilePath = process.cwd() + '/' + file.path;
let csv data = await csv().fromFile(csvFilePath); /// read data from csv into an array of json
/// * from here how to write data into csv *
data.push({
label: value,
.......
})
}
const fields = [
'field1','field2', ...
]
const parser = new Parser({ fields, header:false }); /// if dont want header else remove header: false
const csv = parser.parse(data);
appendFileSync('./filename.csv',`${csv}\n`); // remove /n if you dont want new line at the end

Categories