Node.js Async File System not working correctly - javascript

i'm just trying to this simply.
Convert and object to a JSON format.
Write a file & that Object to JSON data in that new file.
Read from that newly created file.
Write the read data to another new file
PROBLEM:
Some kind of error with data argument of readFile function but i don't know how to resolve it. (Exactly the reason i'm asking this question)
NODE.JS CODE:
const fs = require('fs')
const object1 = {
name: "Zaeem Javed",
age: 21,
profession: "Web Application Developer"
}
const jsonData = JSON.stringify(object1)
fs.writeFile("jsonDataFile.json", jsonData, (err) => {
console.log(err)
})
const readJSON = fs.readFile("jsonDataFile.json", "utf-8", (err, data) => {
console.log(data)
})
fs.writeFile("readJSON.txt", readJSON, (err) => {
console.log(err)
})

This code solved my problem:
const fs = require('fs')
const object1 = {
name: "Zaeem Javed",
age: 21,
profession: "Web Application Developer"
}
const jsonData = JSON.stringify(object1)
fs.writeFile("jsonDataFile.json", jsonData, (err) => {
console.log("Write Successful")
})
fs.readFile("jsonDataFile.json", "UTF-8",(err, data) => {
const readJSON = data
fs.writeFile("readJSON.json", readJSON, (err) => {
console.log(err)
})
})

Related

Is there any chance to output map method into variable as an array of objects?

Hello All!
I want to store users in folder as a file where file name is equal to user_id.
data
| - users
| - afdcab7e-b595-4a15-be0f-5f0337bd1317.json
| - fdfacb7i-bk00-4a15-be0f-5f0337b1d991.json
Each user has their own data for example
{
"_id": "afdcab7e-b595-4a15-be0f-5f0337bd1317",
"email": "test1#test.pl",
"password": "$2a$12$nIoudV7eXmJbU7e/P6YCbOccUkTbp8tcQKhyCEfmNOLihrW6QqPTC"
}
{
"_id": "fdfacb7i-bk00-4a15-be0f-5f0337b1d991",
"email": "test2#test.pl",
"password": "$2a$12$nIoudV7eXmJbU7e/P6YCbOccUkTbp8tcQKhyCEfmNOLihrW6QqPTC"
}
Then I want to read the contents of all files and put the objects into one temporary array.
exports.indexSignin = (req, res) => {
fs.readdir('./data/users', (err, files) => {
if (err) console.log(err);
const obj = [];
files.map((file) => {
fs.readFile(`./data/users/${file}`, 'utf-8', (err, data) => {
if (err) console.log(err);
obj.push(JSON.parse(data))
console.log(obj)
});
});
console.log(obj) //There obj is empty but I want an array
});
res.render('./index/index');
});
As an output I want to have an array saved into variable like this listed below:
[
{
"_id": "afdcab7e-b595-4a15-be0f-5f0337bd1317",
"email": "test1#test.pl",
"password": "$2a$12$nIoudV7eXmJbU7e/P6YCbOccUkTbp8tcQKhyCEfmNOLihrW6QqPTC"
},
{
"_id": "fdfacb7i-bk00-4a15-be0f-5f0337b1d991",
"email": "test2#test.pl",
"password": "$2a$12$nIoudV7eXmJbU7e/P6YCbOccUkTbp8tcQKhyCEfmNOLihrW6QqPTC"
}
]
Do you have any ideas how to use mapped data externally or refactor it into a better way?
Thank You guys!
I solved my problem with Your help.
Here is a working example which I needed:
exports.indexSignin = (req, res) => {
const readFiles = async () => {
try {
const path = "./data/users"
const files = await readdir(path);
const fileAwaits = files.map(file => readFile(`${path}/${file}`, "utf8"))
const contents = await Promise.all(fileAwaits)
return contents.map(co => JSON.parse(co))
} catch (err) {
throw err;
}
}
readFiles()
.then(test => console.log(test))
.catch(err => console.log('Directory not found.'))
.finally(() => console.log('Rest of the code...'));
res.render('./index/index');
// or IIFY which do the same
(async () => {
try {
const test = await readFiles();
console.log(test);
} catch (err) {
console.log('Directory not found.');
}
console.log('Rest of the code...')
res.render('./index/index');
})()
};
This is the same code of mine that works. I hope that helps you.
const { readdir, readFile } = require("fs/promises");
const readFiles = async () => {
try {
const path = "./test"
const files = await readdir(path);
console.log(files)
const fileAwaits = files.map(file => readFile(`${path}/${file}`, "utf8"))
const contents = await Promise.all(fileAwaits)
console.log(contents.map(co => JSON.parse(co)))
} catch (err) {
console.error(err)
}
}
readFiles()
So if you want to use this inside your API handlers change it as bellow:
exports.indexSignin = async (req, res) => {
try {
const path = "./test" // replace path by your own
const files = await readdir(path);
console.log(files)
const fileAwaits = files.map(file => readFile(`${path}/${file}`, "utf8"))
const contents = await Promise.all(fileAwaits)
const arrayContent = contents.map(co => JSON.parse(co))
console.log(arrayContent);
} catch (err) {
console.error(err)
}
res.render('./index/index');
});

Error: Module name "fs" has not been loaded yet for context. Trying to write in a JSON file

Hi everyone c: I've been working in a Node.js project where I bring data from a JSON file and use it with an API to bring weather, and other things, I'm trying to write the new data into another JSON file using require.js but it does not seems to work, here is my code, hope you can help me :)
const fs = require('fs');
var myObject = {
Latitud: data.coord.lat,
Longitud: data.coord.lon,
Temperatura: data.main.temp,
Ciudad: data.name,
Humedad: data.main.humidity,
Descripción: data.weather[0].description,
Viento: data.wind.speed,
};
// convert JSON object to a string
const _myObject = JSON.stringify(myObject);
// write file to disk
fs.writeFile('../js/datareceived.json', _myObject, 'utf8', (err) => {
if (err) {
console.log(`Error writing file: ${err}`);
} else {
console.log(`File is written successfully!`);
}
});
and here is the full script
async function calcWeather(){
const info = await fetch('../js/data.json') // fetch editable c:
.then(function(response) {
return response.json();
});
for (var i in info) {
const lat = info[i][0].latjson;
const long = info[i][0].lonjson;
const fs = require('fs');
const base = `https://api.openweathermap.org/data/2.5/weather?lat=${lat}&lon=${long}&appid=${api_key}&units=metric&lang=sp`;
fetch(base)
.then((responses) => {
return responses.json();
})
.then((data) => {
var myObject = {
Latitud: data.coord.lat,
Longitud: data.coord.lon,
Temperatura: data.main.temp,
Ciudad: data.name,
Humedad: data.main.humidity,
Descripción: data.weather[0].description,
Viento: data.wind.speed,
};
// convert JSON object to a string
const _myObject = JSON.stringify(myObject);
// write file to disk
fs.writeFile('../js/datareceived.json', _myObject, 'utf8', (err) => {
if (err) {
console.log(`Error writing file: ${err}`);
} else {
console.log(`File is written successfully!`);
}
});
});
}
};
and here is the way I load in my html
<script src = "./node_modules/requirejs/require.js"></script>

how to pipe an archive (zip) to an S3 bucket

I’m a bit confused with how to proceed. I am using Archive ( node js module) as a means to write data to a zip file. Currently, I have my code working when I write to a file (local storage).
var fs = require('fs');
var archiver = require('archiver');
var output = fs.createWriteStream(__dirname + '/example.zip');
var archive = archiver('zip', {
zlib: { level: 9 }
});
archive.pipe(output);
archive.append(mybuffer, {name: ‘msg001.txt’});
I’d like to modify the code so that the archive target file is an AWS S3 bucket. Looking at the code examples, I can specify the bucket name and key (and body) when I create the bucket object as in:
var s3 = new AWS.S3();
var params = {Bucket: 'myBucket', Key: 'myMsgArchive.zip' Body: myStream};
s3.upload( params, function(err,data){
…
});
Or
s3 = new AWS.S3({ parms: {Bucket: ‘myBucket’ Key: ‘myMsgArchive.zip’}});
s3.upload( {Body: myStream})
.send(function(err,data) {
…
});
With regards to my S3 example(s), myStream appears to be a readable stream and I am confused as how to make this work as archive.pipe requires a writeable stream. Is this something where we need to use a pass-through stream? I’ve found an example where someone created a pass-through stream but the example is too terse to gain proper understanding. The specific example I am referring to is:
Pipe a stream to s3.upload()
Any help someone can give me would greatly be appreciated. Thanks.
This could be useful for anyone else wondering how to use pipe.
Since you correctly referenced the example using the pass-through stream, here's my working code:
1 - The routine itself, zipping files with node-archiver
exports.downloadFromS3AndZipToS3 = () => {
// These are my input files I'm willing to read from S3 to ZIP them
const files = [
`${s3Folder}/myFile.pdf`,
`${s3Folder}/anotherFile.xml`
]
// Just in case you like to rename them as they have a different name in the final ZIP
const fileNames = [
'finalPDFName.pdf',
'finalXMLName.xml'
]
// Use promises to get them all
const promises = []
files.map((file) => {
promises.push(s3client.getObject({
Bucket: yourBubucket,
Key: file
}).promise())
})
// Define the ZIP target archive
let archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
})
// Pipe!
archive.pipe(uploadFromStream(s3client, 'someDestinationFolderPathOnS3', 'zipFileName.zip'))
archive.on('warning', function(err) {
if (err.code === 'ENOENT') {
// log warning
} else {
// throw error
throw err;
}
})
// Good practice to catch this error explicitly
archive.on('error', function(err) {
throw err;
})
// The actual archive is populated here
return Promise
.all(promises)
.then((data) => {
data.map((thisFile, index) => {
archive.append(thisFile.Body, { name: fileNames[index] })
})
archive.finalize()
})
}
2 - The helper method
const uploadFromStream = (s3client) => {
const pass = new stream.PassThrough()
const s3params = {
Bucket: yourBucket,
Key: `${someFolder}/${aFilename}`,
Body: pass,
ContentType: 'application/zip'
}
s3client.upload(s3params, (err, data) => {
if (err)
console.log(err)
if (data)
console.log('Success')
})
return pass
}
The following example takes the accepted answer and makes it work with local files as requested.
const archiver = require("archiver")
const fs = require("fs")
const AWS = require("aws-sdk")
const s3 = new AWS.S3()
const stream = require("stream")
const zipAndUpload = async () => {
const files = [`test1.txt`, `test2.txt`]
const fileNames = [`test1target.txt`, `test2target.txt`]
const archive = archiver("zip", {
zlib: { level: 9 } // Sets the compression level.
})
files.map((thisFile, index) => {
archive.append(fs.createReadStream(thisFile), { name: fileNames[index] })
})
const uploadStream = new stream.PassThrough()
archive.pipe(uploadStream)
archive.finalize()
archive.on("warning", function (err) {
if (err.code === "ENOENT") {
console.log(err)
} else {
throw err
}
})
archive.on("error", function (err) {
throw err
})
archive.on("end", function () {
console.log("archive end")
})
await uploadFromStream(uploadStream)
console.log("all done")
}
const uploadFromStream = async pass => {
const s3params = {
Bucket: "bucket-name",
Key: `streamtest.zip`,
Body: pass,
ContentType: "application/zip"
}
return s3.upload(s3params).promise()
}
zipAndUpload()

Getting "ReferenceError" error while performing multiple chaining of promise in nodejs application for neo4j import using cypher query

I am writing nodejs application using neo4j js driver (and async utility module) to use a project's source js files' data to be imported in neo4j database.
There are 2 folders namely "State"and "City" and a single "country.js" file in my test project folder "testapp".
The "country.js" file contains the name and id of the countries and the countryid is used as a parameter in the js files present in the folders "State"and "City".
-- snippet from country.js file
const countries= [
{ id: "1", label: "USA" },
{ id: "2", label: "Scotland" }
];
module.exports = countries;
The "State" folder has js files which has naming convention like this: (Country-1-statelist.js, Country-2-statelist.js and so on).
--Sample from js file present in "State" folder.
const data = {"params":{"country_id":"1"},"items":
[{"id":"1","label":"Alaska"},{"id":"2","label":"Alabama"}]
module.exports = data;
Similarly, "City" folder has js files which has naming convention like this: (Country-1-state-1-list.js, Country-1-state-2-list.js and so on).
-- sample from a particular js file present in "City" folder
const data =
{"params":
{"country_id":"1","state_id":"1"},
"items":[
{"id":"1","label":"New York"},
{"id":"2","label":"Chicago"}
]
}
module.exports = data;
So,I am trying to add nodes with label Countries, States and Cities such that they would be imported without their relationship being unaltered. But I am getting error in nodejs while trying to use multiple chain promise to import it into my test db.
so, far I was able to import the data present in country.js and other js files from "State" folder. But while trying to do the same from js files of "City" folder, I am getting "ReferenceError: item is not defined" error.
I have created index.js file in nodejs to do so, which has the following code:
const neo4j = require('neo4j-driver').v1;
const driver = neo4j.driver("bolt://localhost", neo4j.auth.basic("neo4j", "testapp"));
const session = driver.session();
const async = require('async');
const appPathway = '/Users/reactor/Documents/testapp';
const country = require(appPathway+'/country');
async.eachSeries(country, (item, callback) => {
const resPromise = session.run(
`CREATE (c:country {label: "${item.label}", id: "${item.id}"}) RETURN c.id`
);
resPromise
.then((data) => {
return insertState(item.id);
}
)
.then
(
(done) => {
callback(null);
}
)
.catch
(
(err) => {
callback(err);
}
);
},
(err) => {
console.log(err);
session.close();
driver.close();
}
);
function insertState(countryID) {
let state = require(appPathway+'/State/Country-'+countryID+'-statelist.js');
return new Promise((resolve, reject) =>{
async.eachSeries(state.items, (item1,item2, callback) => {
const resPromise = session.run(
`
CREATE (s:state {label: "${item.label}", id: "${item.id}"})
WITH s
MATCH (c:Country)
WHERE c.id = '${countryID}'
CREATE (s)-[r:LOCATED_IN]->(c)
RETURN c,s
`
);
resPromise
.then((state) => {
console.log(state);
callback(null);
})
resPromise
.then((data) => {
return insertCity(item1.id,item2.id);
})
.then((done) => {
callback(null);
}
)
.catch((err) => {
callback(err);
});
}, (err) => {
if(err) {
reject(err);
}
resolve(true);
});
}
);
}
// Problem rose when i inserted the following block of code !!
function insertCity(countryID,stateID) {
let city = require(appPathway+'/city/Country-'+countryID+'-state-'+stateID+'-list.js');
return new Promise((resolve, reject) =>
{
async.eachSeries(city.items, () => {
const resPromise = session.run(
`
CREATE (w:city {label: "${item.label}", id: "${item.id}"})
WITH w
MATCH (s:state)
WHERE s.id = '${stateID}'
/* How am I suppose to use the countryid parameter here in this cypher query ? As it doesn't have direct relation with label "country" in this query */
CREATE (w)-[r:PRESENT_IN]->(s)
RETURN w
`
);
resPromise
.then((city) => {
console.log(city);
callback(null);
})
.then((done) => {
callback(null);
})
.catch((err) => {
callback(err);
});
}, (err) => {
if(err) {
reject(err);
}
resolve(true);
});
}
);
}
I am new to ES6,cypher and promise function which may have caused this problem. Although,
I have looked into most of the posts, blogs and videos related to multiple promise chaining, but still I am unable to figure out how to do it when multiple parameters are present in a single js file and how to use these parameters to pull the data using cypher query.
Any help or suggestion would be really helpful for me now ! Thanks in Advance
In insertState() and insertCity(), you are trying to dereference item.label and item.id, but there is no item in scope.

Write to a CSV in Node.js

I am struggling to find a way to write data to a CSV in Node.js.
There are several CSV plugins available however they only 'write' to stdout.
Ideally I want to write on a row-by-row basis using a loop.
You can use fs (https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback):
var dataToWrite;
var fs = require('fs');
fs.writeFile('form-tracking/formList.csv', dataToWrite, 'utf8', function (err) {
if (err) {
console.log('Some error occured - file either not saved or corrupted file saved.');
} else{
console.log('It\'s saved!');
}
});
The docs for node-csv-parser (npm install csv) specifically state that it can be used with streams (see fromStream, toStream). So it's not hard-coded to use stdout.
Several other CSV parsers also come up when you npm search csv -- you might want to look at them too.
Here is a simple example using csv-stringify to write a dataset that fits in memory to a csv file using fs.writeFile.
import stringify from 'csv-stringify';
import fs from 'fs';
let data = [];
let columns = {
id: 'id',
name: 'Name'
};
for (var i = 0; i < 10; i++) {
data.push([i, 'Name ' + i]);
}
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err;
fs.writeFile('my.csv', output, (err) => {
if (err) throw err;
console.log('my.csv saved.');
});
});
If you want to use a loop as you say you can do something like this with Node fs:
let fs = require("fs")
let writeStream = fs.createWriteStream('/path/filename.csv')
someArrayOfObjects.forEach((someObject, index) => {
let newLine = []
newLine.push(someObject.stringPropertyOne)
newLine.push(someObject.stringPropertyTwo)
....
writeStream.write(newLine.join(',')+ '\n', () => {
// a line was written to stream
})
})
writeStream.end()
writeStream.on('finish', () => {
console.log('finish write stream, moving along')
}).on('error', (err) => {
console.log(err)
})
In case you don't wanna use any library besides fs, you can do it manually.
let fileString = ""
let separator = ","
let fileType = "csv"
let file = `fileExample.${fileType}`
Object.keys(jsonObject[0]).forEach(value=>fileString += `${value}${separator}`)
fileString = fileString.slice(0, -1)
fileString += "\n"
jsonObject.forEach(transaction=>{
Object.values(transaction).forEach(value=>fileString += `${value}${separator}`)
fileString = fileString.slice(0, -1)
fileString += "\n"
})
fs.writeFileSync(file, fileString, 'utf8')
For those who prefer fast-csv:
const { writeToPath } = require('#fast-csv/format');
const path = `${__dirname}/people.csv`;
const data = [{ name: 'Stevie', id: 10 }, { name: 'Ray', id: 20 }];
const options = { headers: true, quoteColumns: true };
writeToPath(path, data, options)
.on('error', err => console.error(err))
.on('finish', () => console.log('Done writing.'));
**In case you don't wanna use any library besides fs, you can do it manually. More over you can filter the data as you want to write to CSV file
**
router.get('/apiname', (req, res) => {
const data = arrayOfObject; // you will get from somewhere
/*
// Modify old data (New Key Names)
let modifiedData = data.map(({ oldKey1: newKey1, oldKey2: newKey2, ...rest }) => ({ newKey1, newKey2, ...rest }));
*/
const path = './test'
writeToFile(path, data, (result) => {
// get the result from callback and process
console.log(result) // success or error
});
});
writeToFile = (path, data, callback) => {
fs.writeFile(path, JSON.stringify(data, null, 2), (err) => { // JSON.stringify(data, null, 2) help you to write the data line by line
if (!err) {
callback('success');
// successfull
}
else {
callback('error');
// some error (catch this error)
}
});
}
this is the code that worked for me in nest js
import { Parser } from "json2csv";
const csv = require('csvtojson');
const csvFilePath = process.cwd() + '/' + file.path;
let csv data = await csv().fromFile(csvFilePath); /// read data from csv into an array of json
/// * from here how to write data into csv *
data.push({
label: value,
.......
})
}
const fields = [
'field1','field2', ...
]
const parser = new Parser({ fields, header:false }); /// if dont want header else remove header: false
const csv = parser.parse(data);
appendFileSync('./filename.csv',`${csv}\n`); // remove /n if you dont want new line at the end

Categories