Append text to existing json file node.js - javascript

I'm trying to add a new text to an existing json file, I tried writeFileSync and appendFileSync however the text added doesn't format as json even when i use JSON.stringify.
const fs = require('fs');
fs.readFile("test.json", (err, data) => {
if( err) throw err;
var data = JSON.parse(data);
console.log(data);
});
var student = {
age: "23"
};
fs.appendFileSync("test.json", "age: 23");
// var writeData = fs.writeFileSync("test.json", JSON.stringify(student));
My json file
{ name: "kevin" }
Append turns out like this, {name: "kevin"}age: "23"
and writeFileSync turns out like {name: "kevin"}{age: "23"}
What I want is to continuously add text to my json file like so
{
name: "kevin",
age: "23"
}

First, dont use readFileSync and writeFileSync. They block the execution, and go against node.js standards. Here is the correct code:
const fs = require('fs');
fs.readFile("test.json", (err, data) => { // READ
if (err) {
return console.error(err);
};
var data = JSON.parse(data.toString());
data.age = "23"; // MODIFY
var writeData = fs.writeFile("test.json", JSON.stringify(data), (err, result) => { // WRITE
if (err) {
return console.error(err);
} else {
console.log(result);
console.log("Success");
}
});
});
What this code does:
Reads the data from the file.
Modifies the data to get the new data the file should have.
Write the data(NOT append) back to the file.

Here's what you can do: read the data from the file, edit that data, then write it back again.
const fs = require("fs")
fs.readFile("test.json", (err, buffer) => {
if (err) return console.error('File read error: ', err)
const data = JSON.parse(buffer.toString())
data.age = 23
fs.writeFile("test.json", JSON.stringify(data), err => {
if (err) return console.error('File write error:', err)
})
})

Related

parsem array of string data into json

I have a folder of .TXT extensions file with plain text in it how can I insert that plain text into the MongoDB database using the mongoose and fs module in node js
const fs = require('fs');
var parsedata;
var parsedTwice = [];
let fileName;
fs.readdir("D:/scnner_data/public", (err, data) => {
if(err) throw err
console.log(data);
fileName = data;
console.log(fileName);
});
fs.readFile('D:/scnner_data/public/DC221020042103SC.TXT',"utf8", (err, data) =>{
if (err) {
console.log(err);
} else {
parsedata = data.split("\n");
for(let item of parsedata) {
parsedTwice.push(item.split(";"));
}
//console.log('successful' , parsedTwice);
// console.log('this line is being readed' + parsedata);
}
}
)

[ERR_INVALID_ARG_TYPE]

I keep getting
TypeError [ERR_INVALID_ARG_TYPE]: The "data" argument must be of type string or an instance of Buffer, TypedArray, or DataView. Received an instance of Array
I'm trying to take lines from a file and separate them at a : and put them into another file as two separate lines then run it through an app and separate the lines by whether they work or not.
const fs = require('fs');
const conf = '.\\login.conf'
const directory = '.\\input\\'
fs.readdir(directory, (err, files) => {
files.forEach(files => {
var files_directory = directory + files
fs.readFile(files_directory, 'utf8', (err, data) => {
if (err) {console.log('error', err)}
var data_array = data.split(' ')
var spaced_data_array = data_array + ' '
Array.from(spaced_data_array)
var d = spaced_data_array.split(' ')
console.log(d)
var data = d + ''
login = data.split(':')
fs.writeFile(conf, login, (err, result) => {
if (err) {console.log('error', err)}
})
require('child_process').exec('.\\ovpnconnector.exe start nordvpn.ovpn')
if (work = 0) {
console.log('Not Working')
fs.writeFile('.\\output\\not_working.txt', holder, (err, result) => {
if (err) {console.log('error', err)}
})
}
if (work = 1) {
console.log('Working')
fs.writeFile('.\\output\\working.txt', holder, (err, result) => {
if (err) {console.log('error', err)}
})
}
})
})
})
This block causes the error:
login = data.split(':')
fs.writeFile(conf, login, (err, result) => {
login is an array and it cannot be used as the second parameter of the fs.writeFile method. You can see it in the error message:
The "data" argument must be of type string or an instance of Buffer, TypedArray, or DataView. Received an instance of Array
Action
What you're trying to do:
take lines from a file and separate them at a : and put them into
another file as two separate lines
You can merge the array to string by the newline character.
login = data.split(':').join('\r\n')
fs.writeFile(conf, login, (err, result) => {

Node.js - Cannot append global variable when using fs

Im trying to read multiple xml files and parse data from them and i managed to do that but now new problem appeared.
allData variable is never changed, no matter what i do. What am i supposed to do here?
I dont know what to do or what to try, this is my first time working with files and im honestly surprised ive managed to come this far.
var parseString = require('xml2js').parseString;
var fs = require('fs')
var allData = {
store: []
}
function readFiles(__dirname, onFileContent, onError) {
fs.readdir(__dirname + '\\parse\\', function (err, filenames) {
if (err) {
return;
}
filenames.forEach(function (filename) {
console.log(filename)
fs.readFile(__dirname + '\\parse\\' + filename, 'utf-8', function (err, content) {
if (err) {
console.log(err)
return;
}
parseString(content, function (err, result) {
let tempObj = {}
let data = result.storeD[0]
if (data.name) {
tempObj['name'] = data.name[0];
}
if (data.price) {
tempObj['price'] = data.price[0];
}
//more of the same type of code
console.log(tempObj)
//output: { name: 'Data1', price: '1000' }
allData.store.push(tempObj)
})
})
})
});
console.log("All data: ",allData)
//Outputs once at the begining
//output: All data: { store: [] }
}
readFiles(__dirname)
SOLVED
adjusted code to use.readFileSync()(removed callback function) and now it works.
var parseString = require('xml2js').parseString;
var fs = require('fs')
var allData = {
store: []
}
function readFiles(__dirname, onFileContent, onError) {
fs.readdir(__dirname + '\\parse\\', function (err, filenames) {
if (err) {
return;
}
filenames.forEach(function (filename) {
console.log(filename)
let file = fs.readFileSync(__dirname + '\\parse\\' + filename, 'utf-8')
parseString(file, function (err, result) {
let tempObj = {}
let data = result.storeD[0]
if (data.name) {
tempObj['name'] = data.name[0];
}
if (data.price) {
tempObj['price'] = data.price[0];
}
//more of the same type of code
console.log(tempObj)
//output: { name: 'Data1', price: '1000' }
allData.store.push(tempObj)
})
})
console.log("All data: ",allData)
});
//Outputs once at the begining
//output: All data: { store: [] }
}
readFiles(__dirname)
The .readdir() and .readFile() methods are async, so in fact the console.log() is executed before all of the readFile operations.
In order to access the allData variable after these operations are complete, you have to either make them sync using .readFileSync() instead or you need to promisify the .readFile() method and wait for all of the promises to resolve.

looping through an json array list

the following array list I need to get all the price one by one.
this returns the full json object console.log('File data:', jsonString); but the for loop never seems to get called , it never enters it. I need to loop through a json file but its in different folder the json file is under menu folder called list.json menu-> projectName\menu\list.json the file looks like this
The data:
[
{
"code": "ZC",
"price": "1"
},
{
"code": "ZS",
"price": "3"
},
{
"code": "VC",
"price": "4"
},
...]
JS:
const jsonList = fs.readFile("../menu/list.json", "utf8", (err, jsonString) => {
if (err) {
console.log("File read failed:", err);
return;
}
console.log("File data:", jsonString);
console.log("File data:", jsonString.url);
for (var key in jsonString) {
if (jsonString.hasOwnProperty(key)) {
console.log("===>", jsonString[key].price);
}
return jsonString;
}
});
There are two ways to fix the issue you are facing, one is to have your code run inside the callback:
const jsonList = fs.readFile("../menu/list.json", "utf8", (err, jsonString) => {
if (err) {
console.log("File read failed:", err);
return;
}
console.log("File data:", jsonString);
for (var key in JSON.parse(jsonString)) {
if (jsonList.hasOwnProperty(key)) {
console.log("===>", jsonList[key].price); // This is never called
}
}
});
or by using sync function to read file:
const jsonString = fs.readFileSync("../menu/list.json", "utf8");
console.log("File data:", jsonString);
const jsonList = JSON.parse(jsonString);
for (var key in jsonList) {
if (jsonList.hasOwnProperty(key)) {
console.log("===>", jsonList[key].price); // This is never called
}
}
I think you need to loop in the callback as it is async and so jsonList is not the object you expect when you access it. See Get data from fs.readFile

Parsing JSON result from node oracledb

I am using oracledb with node and fetching data from it asynchronously. For the sake of ease, I have implemented it using separate files like below -
config.js -
module.exports = {
user : "user",
password : "password",
connectString : "*connstring*" ,
deliveredQuery: " SELECT COUNT (DISTINCT order_num) AS Cnt from orders where department = 'HR'
};
query2.js :
module.exports = function(callback) {//pass callback function and return with this
var oracledb = require('oracledb');
var dbConfig = require('./config.js');
this.queryDB = function(query,callback) {
oracledb.getConnection({
user: dbConfig.user,
password: dbConfig.password,
connectString: dbConfig.connectString,
deliveredQuery: dbConfig.deliveredQuery
}, function(err, connection) {
if (err) {
console.error(err.message);
return callback(err);
}
connection.execute(query, function(err, result) {
if (err) {
console.error(err.message);
doRelease(connection);
return;
}
//console.log(result.metaData);
//console.log(JSON.parse(result.rows[0][0]));
doRelease(connection);
return callback(null, JSON.parse(result.rows[0][0]))
});
});
function doRelease(connection) {
connection.release(function(err) {
if (err) {
console.error(err.message);
return callback(err);
}
});
}
};
};
serv_ontime.js :
var dbConfig = require('./config.js');
var res = require('./query2.js')();
var onTime_query = dbConfig.onTime_query;
module.exports = queryDB(onTime_query, function(err, callback){ });
index.js :
var res = require('./serv_ontime.js');
console.log("The result is= "+ res);
Now, When I am doing - node index.js from my cmd then I am getting the output as [object Object]. I suppose it is because the call is happening asynchronously. But if you see in the file query2.js , I am returning the value after parsing(using JSON.parse) but still the value I am getting in the index.js file is not the parsed one. How can I parse the value in index.js? I have already tried JSON.parse but it doesn`t work.
You are getting output as [object Object] because you are doing + with a String('The result is= '), So js engine tries to convert the Object to a String. To view it as an Object, do log it separately or log with comma separated,
console.log('The result is= ', res); (or)
console.log(res); // or console.dir(res)
Or you can get String version of it, by doing JSON.stringify(res)
console.log('The result is= ', JSON.stringify(res));
In serv_ontime.js you are exporting result of queryDB witch indeed is undefined. Ty this:
serv_ontime.js
var dbConfig = require('./config.js');
var res = require('./query2.js')();
var onTime_query = dbConfig.onTime_query;
module.exports = function (callback) {
queryDB(onTime_query, callback)
};
index.js
var serv_ontime = require('./serv_ontime.js');
serv_ontime(function(error, res) {
console.log("The error is= ", error);
console.log("The result is= ", res);
});

Categories