How to get the object key and value from another javascript files - javascript

i am using $ npm run index.js
in index.js file i am looping and get list of below files, from the file we need to read the "testData", could you please help to get the data
var listOfFiles = ['test/fileOne.js',
,test/example.js,]
each file having
/test/fileOne.js
var testData = {
tags: 'tag1 tag2 tag3',
setup: 'one_tier'
}
/test/example.js
var testData = {
tags: 'tag3',
setup: 'two_tier'
}
My Code: index.js
let fs = require("fs")
const glob = require("glob");
var getDirectories = function (src, callback) {
glob(src + '/**/*.js', callback);
};
getDirectories('tests', function (err, res) {
if (err) {
console.log('Error', err);
}
else {
var listOfFiles = res;
for (let val of listOfFiles){
///// HERE we have to get the Tags and setup from each js file////
}
}

You can read the content of the file as string using fs.readFileSync:
for (const val of listOfFiles) {
///// HERE we have to get the Tags and setup from each js file////
const content = fs.readFileSync(val, 'utf8');
console.log(content);
const tags = content.match(/tags: '(.*?)'/)[1];
console.log(tags);
const setup = content.match(/setup: '(.*?)'/)[1];
console.log(setup);
}

Related

Nodejs path.resolve is not defined

// codenotworking
const path = require("path");
const fs = require("fs");
log = console.log;
const names = [];
function collectFileNamesRecursively(path) {
fs.readdir(path, (err, files) => {
err ? log(err) : log(files);
// replacing paths
for (const index in files) {
const file = files[index];
files[index] = path.resolve(path, file);
}
for (let file of files) {
fs.stat(file, (err, stat) => {
err ? log(err) : null;
if (stat.isDirectory()) {
collectFileNamesRecursively(file);
}
names.push(file);
});
}
});
}
collectFileNamesRecursively(path.join(__dirname, "../public"));
i am using nodejs v10.8.0 and the directory stucture is
- project/
- debug/
- codenotworking.js
- public/
- js/
- file2.js
- file.html
whenever i run this code i get the following error
TypeError: path.resolve is not a function
at fs.readdir (C:\backup\project\debug\codenotworking.js:17:24)
at FSReqWrap.oncomplete (fs.js:139:20)
what am i doing wrong here ?
You're shadowing your path import by specifing the path parameter in collectFileNamesRecursively. Change the parameter name to something else.
Apart from that using recursion with callbacks this way won't work - I would recommend using async/await. Something like:
const path = require('path');
const fs = require('fs');
async function collectFileNamesRecursively(currBasePath, foundFileNames) {
const dirContents = await fs.promises.readdir(currBasePath);
for (const file of dirContents) {
const currFilePath = path.resolve(currBasePath, file);
const stat = await fs.promises.stat(currFilePath);
if (stat.isDirectory()) {
await collectFileNamesRecursively(currFilePath, foundFileNames);
} else {
foundFileNames.push(file);
}
}
}

NodeJS script to modify a JSON file

I need to write a NodeJS script for the following task:
I have a temp.json file with content like:
{
"name": "foo",
"id": "1.2.15"
}
When we run the script, I want the temp.json files content changed. Specifically, I want the number after the 2nd decimal in id to be incremented as follows:
{
"name": "foo",
"id": "1.2.16"
}
I don't know JavaScript and would appreciate any help.
Thanks!
"use strict";
const fs = require('fs');
const data = JSON.parse(fs.readFileSync("file.json"));
const nums = data.id.split('.');
++nums[2];
data.id = nums.join('.');
fs.writeFileSync("file.json", JSON.stringify(data, null, 4));
And if you want to do it without breaking the async nature of Node, you can do it with the async functions as well:
const fs = require('fs');
fs.readFile('temp.json', 'utf8', (e, data) => {
const obj = JSON.parse(data);
const idParts = obj.id.split('.').map((el) => parseInt(el, 10))
idParts[2] = idParts[2] + 1;
obj.id = idParts.join('.');
fs.writeFile('out.json', JSON.stringify(obj), (err) => {
console.log(err || 'complete');
});
});

Passing arguments to callback function with in a separte module

I would create a new separate module with a function that contains a callback that I call in main file.
My program should take 3 args: first a file directory, then file extention and finally a callback function that should filter a directory by fileextension.
Here's my module.js file
var fs = require('fs')
module.exports = function (directory, fileExtension, callbackfun) {
fs.readdir(directory, callbackfun);
}
then in main file I import module then use function.
modulejs(process.argv[2], process.argv[3], callbackfun(callbackfn));
var callbackfn = function (err, data, fileExtension) {
console.log(fileExtension);
let filtred = data.filter(file => { if (file.indexOf("." + fileExtension) > -1) { console.log(file); } });
};
function callbackfun(callbackfunc1) {
callbackfunc1(err, data, fileExtension);
}
The error that I get actually is
ReferenceError: err is not defined
UPDATE: I have edited my code, I get different error now
UPDATE2: I get the solution on internet but does not understand how it works, if anyboby explain it to me step by step it would be nice
module.jsconst fs = require('fs')
const path = require('path')
module.exports = function (dir, filterStr, callback) {
fs.readdir(dir, function (err, list) {
if (err) {
return callback(err)
}
list = list.filter(function (file) {
return path.extname(file) === '.' + filterStr
})
callback(null, list)
})
}
main.js
const filterFn = require('./module.js')
const dir = process.argv[2]
const filterStr = process.argv[3]
filterFn(dir, filterStr, function (err, list) {
console.log(list);
if (err) {
return console.error('There was an error:', err)
}
list.forEach(function (file) {
console.log(file)
})
})

Write / add data in JSON file using Node.js

I am trying to write JSON file using node from loop data, e.g.:
let jsonFile = require('jsonfile');
for (i = 0; i < 11; i++) {
jsonFile.writeFile('loop.json', "id :" + i + " square :" + i * i);
}
outPut in loop.json is:
id :1 square : 1
but I want output file like this (below) and also if I run that code again it should add that new output as elements in same existing JSON file:
{
"table":[
{
"Id ":1,
"square ":1
},
{
"Id ":2,
"square ":3
},
{
"Id ":3,
"square ":9
},
{
"Id ":4,
"square ":16
},
{
"Id ":5,
"square ":25
},
{
"Id ":6,
"square ":36
},
{
"Id ":7,
"square ":49
},
{
"Id ":8,
"square ":64
},
{
"Id ":9,
"square ":81
},
{
"Id ":10,
"square ":100
}
]
}
I want to use same file that I created 1st time but whenever I run that code new elements should add in that same file
const fs = require('fs');
let obj = {
table: []
};
fs.exists('myjsonfile.json', function(exists) {
if (exists) {
console.log("yes file exists");
fs.readFile('myjsonfile.json', function readFileCallback(err, data) {
if (err) {
console.log(err);
} else {
obj = JSON.parse(data);
for (i = 0; i < 5; i++) {
obj.table.push({
id: i,
square: i * i
});
}
let json = JSON.stringify(obj);
fs.writeFile('myjsonfile.json', json);
}
});
} else {
console.log("file not exists");
for (i = 0; i < 5; i++) {
obj.table.push({
id: i,
square: i * i
});
}
let json = JSON.stringify(obj);
fs.writeFile('myjsonfile.json', json);
}
});
If this JSON file won't become too big over time, you should try:
Create a JavaScript object with the table array in it
var obj = {
table: []
};
Add some data to it, for example:
obj.table.push({id: 1, square:2});
Convert it from an object to a string with JSON.stringify
var json = JSON.stringify(obj);
Use fs to write the file to disk
var fs = require('fs');
fs.writeFile('myjsonfile.json', json, 'utf8', callback);
If you want to append it, read the JSON file and convert it back to an object
fs.readFile('myjsonfile.json', 'utf8', function readFileCallback(err, data){
if (err){
console.log(err);
} else {
obj = JSON.parse(data); //now it an object
obj.table.push({id: 2, square:3}); //add some data
json = JSON.stringify(obj); //convert it back to json
fs.writeFile('myjsonfile.json', json, 'utf8', callback); // write it back
}});
This will work for data that is up to 100 MB effectively. Over this limit, you should use a database engine.
UPDATE:
Create a function which returns the current date (year+month+day) as a string. Create the file named this string + .json. the fs module has a function which can check for file existence named fs.stat(path, callback).
With this, you can check if the file exists. If it exists, use the read function if it's not, use the create function. Use the date string as the path cuz the file will be named as the today date + .json. the callback will contain a stats object which will be null if the file does not exist.
Please try the following program. You might be expecting this output.
var fs = require('fs');
var data = {}
data.table = []
for (i=0; i <26 ; i++){
var obj = {
id: i,
square: i * i
}
data.table.push(obj)
}
fs.writeFile ("input.json", JSON.stringify(data), function(err) {
if (err) throw err;
console.log('complete');
}
);
Save this program in a javascript file, say, square.js.
Then run the program from command prompt using the command node square.js
What it does is, simply overwriting the existing file with new set of data, every time you execute the command.
Happy Coding.
try
var fs = require("fs");
var sampleObject = { your data };
fs.writeFile("./object.json", JSON.stringify(sampleObject, null, 4), (err) => {
if (err) { console.error(err); return; };
console.log("File has been created");
});
For synchronous approach
const fs = require('fs')
fs.writeFileSync('file.json', JSON.stringify(jsonVariable));
you should read the file, every time you want to add a new property to the json, and then add the the new properties
var fs = require('fs');
fs.readFile('data.json',function(err,content){
if(err) throw err;
var parseJson = JSON.parse(content);
for (i=0; i <11 ; i++){
parseJson.table.push({id:i, square:i*i})
}
fs.writeFile('data.json',JSON.stringify(parseJson),function(err){
if(err) throw err;
})
})
Above example is also correct, but i provide simple example:
var fs = require("fs");
var sampleObject = {
name: 'pankaj',
member: 'stack',
type: {
x: 11,
y: 22
}
};
fs.writeFile("./object.json", JSON.stringify(sampleObject, null, 4), (err) => {
if (err) {
console.error(err);
return;
};
console.log("File has been created");
});
For formatting jsonfile gives spaces option which you can pass as a parameter:
jsonfile.writeFile(file, obj, {spaces: 2}, function (err) {
console.error(err);
})
Or use jsonfile.spaces = 4. Read details here.
I would not suggest writing to file each time in the loop, instead construct the JSON object in the loop and write to file outside the loop.
var jsonfile = require('jsonfile');
var obj={
'table':[]
};
for (i=0; i <11 ; i++){
obj.table.push({"id":i,"square":i*i});
}
jsonfile.writeFile('loop.json', obj, {spaces:2}, function(err){
console.log(err);
});
I agree with above answers, Here is a complete read and write sample for anyone who needs it.
router.post('/', function(req, res, next) {
console.log(req.body);
var id = Math.floor((Math.random()*100)+1);
var tital = req.body.title;
var description = req.body.description;
var mynotes = {"Id": id, "Title":tital, "Description": description};
fs.readFile('db.json','utf8', function(err,data){
var obj = JSON.parse(data);
obj.push(mynotes);
var strNotes = JSON.stringify(obj);
fs.writeFile('db.json',strNotes, function(err){
if(err) return console.log(err);
console.log('Note added');
});
})
});
Promise based solution [Javascript (ES6) + Node.js (V10 or above)]
Write to the File:
const fsPromises = require('fs').promises;
fsPromises.writeFile('myFile.json', JSON.stringify({ name: "Sridhar", salary: 1234 }))
.then( () => { console.log('JSON saved'); })
.catch(er => { console.log(er);});
Append to the File:
const fsPromises = require('fs').promises;
fsPromises.readFile('myFile.json', 'utf8')
.then(data => {
let json = JSON.parse(data);
json.myArr.push({name: "Krishnan", salary: 5678});
fsPromises.writeFile('myFile.json', JSON.stringify(json))
.then( () => { console.log('Append Success'); })
.catch(err => { console.log("Append Failed: " + err);});
})
.catch(err => { console.log("Read Error: " +err);});
If your project supports Javascript ES8 then you could use asyn/await instead of native promise.

Write to a CSV in Node.js

I am struggling to find a way to write data to a CSV in Node.js.
There are several CSV plugins available however they only 'write' to stdout.
Ideally I want to write on a row-by-row basis using a loop.
You can use fs (https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback):
var dataToWrite;
var fs = require('fs');
fs.writeFile('form-tracking/formList.csv', dataToWrite, 'utf8', function (err) {
if (err) {
console.log('Some error occured - file either not saved or corrupted file saved.');
} else{
console.log('It\'s saved!');
}
});
The docs for node-csv-parser (npm install csv) specifically state that it can be used with streams (see fromStream, toStream). So it's not hard-coded to use stdout.
Several other CSV parsers also come up when you npm search csv -- you might want to look at them too.
Here is a simple example using csv-stringify to write a dataset that fits in memory to a csv file using fs.writeFile.
import stringify from 'csv-stringify';
import fs from 'fs';
let data = [];
let columns = {
id: 'id',
name: 'Name'
};
for (var i = 0; i < 10; i++) {
data.push([i, 'Name ' + i]);
}
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err;
fs.writeFile('my.csv', output, (err) => {
if (err) throw err;
console.log('my.csv saved.');
});
});
If you want to use a loop as you say you can do something like this with Node fs:
let fs = require("fs")
let writeStream = fs.createWriteStream('/path/filename.csv')
someArrayOfObjects.forEach((someObject, index) => {
let newLine = []
newLine.push(someObject.stringPropertyOne)
newLine.push(someObject.stringPropertyTwo)
....
writeStream.write(newLine.join(',')+ '\n', () => {
// a line was written to stream
})
})
writeStream.end()
writeStream.on('finish', () => {
console.log('finish write stream, moving along')
}).on('error', (err) => {
console.log(err)
})
In case you don't wanna use any library besides fs, you can do it manually.
let fileString = ""
let separator = ","
let fileType = "csv"
let file = `fileExample.${fileType}`
Object.keys(jsonObject[0]).forEach(value=>fileString += `${value}${separator}`)
fileString = fileString.slice(0, -1)
fileString += "\n"
jsonObject.forEach(transaction=>{
Object.values(transaction).forEach(value=>fileString += `${value}${separator}`)
fileString = fileString.slice(0, -1)
fileString += "\n"
})
fs.writeFileSync(file, fileString, 'utf8')
For those who prefer fast-csv:
const { writeToPath } = require('#fast-csv/format');
const path = `${__dirname}/people.csv`;
const data = [{ name: 'Stevie', id: 10 }, { name: 'Ray', id: 20 }];
const options = { headers: true, quoteColumns: true };
writeToPath(path, data, options)
.on('error', err => console.error(err))
.on('finish', () => console.log('Done writing.'));
**In case you don't wanna use any library besides fs, you can do it manually. More over you can filter the data as you want to write to CSV file
**
router.get('/apiname', (req, res) => {
const data = arrayOfObject; // you will get from somewhere
/*
// Modify old data (New Key Names)
let modifiedData = data.map(({ oldKey1: newKey1, oldKey2: newKey2, ...rest }) => ({ newKey1, newKey2, ...rest }));
*/
const path = './test'
writeToFile(path, data, (result) => {
// get the result from callback and process
console.log(result) // success or error
});
});
writeToFile = (path, data, callback) => {
fs.writeFile(path, JSON.stringify(data, null, 2), (err) => { // JSON.stringify(data, null, 2) help you to write the data line by line
if (!err) {
callback('success');
// successfull
}
else {
callback('error');
// some error (catch this error)
}
});
}
this is the code that worked for me in nest js
import { Parser } from "json2csv";
const csv = require('csvtojson');
const csvFilePath = process.cwd() + '/' + file.path;
let csv data = await csv().fromFile(csvFilePath); /// read data from csv into an array of json
/// * from here how to write data into csv *
data.push({
label: value,
.......
})
}
const fields = [
'field1','field2', ...
]
const parser = new Parser({ fields, header:false }); /// if dont want header else remove header: false
const csv = parser.parse(data);
appendFileSync('./filename.csv',`${csv}\n`); // remove /n if you dont want new line at the end

Categories