I am trying to find a way to get the currently logged in user and than append them to a JSON file. Below is my code to first read the dir, then get the most recent file, return it and then append the current user that is logged in.
I can append a string to the file but when trying to perform req.user it states
Cannot read property 'user' of undefined
What would I need to include in this file so that it knows what user is?
let fs = require("fs"),
express = require("express"),
_ = require("underscore"),
User = require("./models/user"),
path = require("path");
let getFileAddUser = () => {
let filePath = '../automation_projects/wss-automation-u/results/temp/';
fs.readdir(filePath, (err, files) => {
if (err) { throw err; }
let file = getMostRecentFile(files, filePath);
console.log(file);
fs.readFile(filePath + file, 'utf8', (err, data) => {
let json = JSON.parse(data);
if(err){
console.error(err);
return;
} else {
//Un-comment to write to most recent file.
//==================================================
//This should find the currently logged in user and append them to the most recent file found.
json.currentuser = req.user;
fs.writeFile(filePath + file, JSON.stringify(json), (error) => {
if(error){
console.error(error);
return;
} else {
console.log(json);
}
});
//==================================================
console.log(data);
}
});
});
};
//Get the most recent file from the results folder.
function getMostRecentFile(files, path) {
let out = [];
files.forEach(function(file) {
let stats = fs.statSync(path + "/" +file);
if(stats.isFile()) {
out.push({"file":file, "mtime": stats.mtime.getTime()});
}
});
out.sort(function(a,b) {
return b.mtime - a.mtime;
})
return (out.length>0) ? out[0].file : "";
}
module.exports = getFileAddUser;
Thanks to a knowledgeable co-worker and some further research we were able to get this working. I'd like to share the code we came up with to append the currently logged in user to our results file. You will also notice we got some help using the Ramada.js library.
let fs = require("fs"),
express = require("express"),
_ = require("underscore"),
User = require("./models/user"),
r = require("ramda"),
path = require("path");
//This will be our function to get the most recent file from our dir and
//return it to us. We than user this function below.
function getMostRecentFile(files, path) {
let out = [];
let f = r.tail(files);
console.log(files);
f.forEach(function(file) {
let stats = fs.statSync(path + "/" +file);
if(stats.isFile()) {
out.push({"file":file, "mtime": stats.mtime.getTime()});
}
});
out.sort(function(a,b) {
return b.mtime - a.mtime;
})
return (out.length>0) ? out[0].file : "";
}
//Passing in 'u' as a argument which can than be used in a route and pass in
//anything that we want it to be. In our case it was the currently logged
//in user.
let getUser = (u) => {
let user = u;
let filePath = '../automation_projects/wss-automation-u/results/temp/';
//Comment above and uncomment below for testing locally.
// let filePath = "./temp/";
let file = "";
//Below we read our dir then get the most recent file using the
//getMostRecentfile function above.
read_directory(filePath).then( files => {
file = getMostRecentFile(files, filePath)
console.log(file);
return(read_file(filePath + file))
}).then( x => {
// Here we parse through our data with x representing the data that we
//returned above.
let json = JSON.parse(x);
return new Promise(function(resolve, reject) {
json.currentuser = u;
//And finally we write to the end of the latest file.
fs.writeFile(filePath + file, JSON.stringify(json), (error) => {
if(error) reject(error);
else resolve(json);
// console.log(json);
});
});
});
}
let read_directory = (path) => {
return new Promise((resolve, reject) => {
fs.readdir(path, (err, items) => {
if (err){
return reject(err)
}
return resolve([path, ...items])
})
})
}
let read_file = (path) => {
return new Promise((resolve, reject) => {
fs.readFile(path, "utf8", (err, items) => {
if (err){
return reject(err)
}
return resolve(items)
})
})
}
module.exports = getUser;
Than below is an example route with how to use the getUser module. You will want to require it like you do everything else with node.js and dependencies. Hope this helps someone in the future.
let getUser = require("getuser");
//Make a route to use the getUser module and pass in our argument value.
app.get("/", (req, res) => {
//With in the get user function pass in whatever you want to equal 'u' from the getuser module.
getUser(req.user.username);
res.render("index", { username: req.user });
});
Related
I am making an Api with Node and Mongo that receives large volumes of data, I was receiving an error because the size of the records that were stored in mongo exceeded 16 MB. So I opted for the alternative offered by mongo in its gridFS documentation, to insert the records, which I had no problems with. But I am having conflicts to insert and filter since I don't know how to do it, I read the documentation and there are several ways. But I can't figure out how to filter (find a record by its field) and how to update.
The function to create a record works but it performs some necessary steps such as storing the json it receives in a file and then reading it and with that creating the record, I would have liked to find a more practical solution such as only inserting the json it receives without having to create a file with its content and then get the information from that file I attach the code to see if you can tell me how to solve this problem:
const { MongoClient, ObjectId, GridFSBucket,} = require('mongodb');
const { config } = require('../../config');
//const USER = encodeURIComponent(config.noRelDbUser);
//const PASSWORD = encodeURIComponent(config.noRelDbPassword);
const DB_NAME = config.noRelDbName;
const fs = require('fs');
const removeFile = require('../../modules/results/utils/RemoveFile');
// const MONGO_URI = `mongodb://${USER}:${PASSWORD}#${config.dbHost}:${config.dbPort}/admin?retryWrites=true&w=majority`
const MONGO_URI = `mongodb://${config.noRelDbHost}:${config.noRelDbPort}`;
class MongoLib {
constructor() {
this.client = new MongoClient(MONGO_URI, { useNewUrlParser: true, useUnifiedTopology: true });
this.dbName = DB_NAME;
}
connect() {
if (!MongoLib.connection) {
MongoLib.connection = new Promise((resolve, reject) => {
this.client.connect((err) => {
if (err) {
reject(err);
}
resolve(this.client.db(this.dbName));
});
});
}
return MongoLib.connection;
}
create(collection, data) {
return this.connect()
.then((db) => {
return db.collection(collection).insertOne(data);
})
.then((result) => result.insertedId);
}
async createWithForBigData(collection, data, vr_id , remove=false){
let vrule_id = vr_id;
return this.connect().then((db)=>{
try{
var bucket = new GridFSBucket(db, {
bucketName: collection,
chunkSizeBytes: 260000 ,
});
bucket
bucket.find()
let uploadStream = fs.createReadStream(data).pipe(bucket.openUploadStream(`resultsdetail${vrule_id}`));
let id = uploadStream.id;
uploadStream.on('error', (err) => {
console.log({ message: "Error uploading file" });
throw new Error(err);
});
bucket.find()
uploadStream.on('finish', () => {
console.log({ message: "File uploaded successfully, stored under Mongo ObjectID: " + id });
if(remove === true){
console.log('remueve archivo archivo de directorio storebigdata');
removeFile(data);
}
return id;
});
}catch(err){
console.log('ocurriĆ³ un error al almacenar big data',err);
throw new Error(err);
}
})
}
findBigData(){
//
}
UpdateBigData(){
//
}
}
module.exports = MongoLib;
In the following node function, it is returning an empty array. Not sure why its doing that. Could this be a async await issue? Would appreciate any help. Thank you
const folderPath = '/public/home.html'
function getCircuitAndFuse(folderPath){
//List containing circuit name with its fuse
let temporaryList = [];
let finalCircuitAndFuseList = []
fs.readFile(__dirname + folderPath, (error, data)=>{
if(error){
console.log(`Unable to read file: ${error}`)
}else{
var $ = cheerio.load(data)
$('img').each(function(index, element){
let getClassAtr = element.attribs.class
temporaryList.push(getClassAtr.slice(0, getClassAtr.lastIndexOf(" ")))
})
finalCircuitAndFuseList = [...new Set(temporaryList)]
}
})
return finalCircuitAndFuseList;
}
let getInfo = getCircuitAndFuse(folderPath)
// Returning empty array
console.log(getInfo)
***Server code****
const server = http.createServer(function(req, res){
res.writeHead(200, {'Content-Type': 'text/plain'})
res.end()
}).listen(port, ()=>{
console.log(`Server listening on port ${port}. Press Ctrl-C to terminate...`)
})
getCircuitAndFuse must return Promise like this:
function getCircuitAndFuse(folderPath) {
return new Promise((resolve, reject) => {
//List containing circuit name with its fuse
let temporaryList = [];
fs.readFile(__dirname + folderPath, (error, data) => {
if (error) {
console.log(`Unable to read file: ${error}`);
} else {
var $ = cheerio.load(data);
$('img').each(function (index, element) {
let getClassAtr = element.attribs.class;
temporaryList.push(
getClassAtr.slice(0, getClassAtr.lastIndexOf(' '))
);
});
resolve([...new Set(temporaryList)]);
}
});
});
}
getCircuitAndFuse(folderPath).then((getInfo) => {
// do something with `getInfo`
});
Another alternative to Faruk's answer would be to just use fs.readFileSync instead of wrapping your function in a promise and requiring some of that extra ceremony. Using fs.readFileSync will ensure that your function doesn't return prematurely.
Here is your code rewritten with that in mind:
function getCircuitAndFuse(folderPath) {
try {
let temporaryList = [];
const data = fs.readFileSync(__dirname + folderPath);
const $ = cheerio.load(data);
$("img").each(function (index, element) {
let getClassAtr = element.attribs.class;
temporaryList.push(getClassAtr.slice(0, getClassAtr.lastIndexOf(" ")));
});
return [...new Set(temporaryList)];
} catch (error) {
console.log(error);
}
}
I am building a node application, and trying to neatly organize my code. I wrote a serial module that imports the serial libs and handles the connection. My intention was to write a basic module and then reuse it over and over again in different projects as needed. The only part that changes per use is how the incoming serial data is handled. For this reason I would like to pull out following handler and redefine it as per the project needs. How can I use module exports to redefine only this section of the file?
I have tried added myParser to exports, but that gives me a null and I would be out of scope.
Handler to redefine/change/overload for each new project
myParser.on('data', (data) => {
console.log(data)
//DO SOMETHING WITH DATA
});
Example usage: main.js
const serial = require('./serial');
const dataParser = require('./dataParser');
const serial = require('./serial');
//call connect with CL args
serial.connect(process.argv[2], Number(process.argv[3]))
serial.myParser.on('data',(data) => {
//Do something unique with data
if (dataParser.parse(data) == 0)
serial.send('Error');
});
Full JS Module below serial.js
const SerialPort = require('serialport');
const ReadLine = require('#serialport/parser-readline');
const _d = String.fromCharCode(13); //char EOL
let myPort = null;
let myParser = null;
function connect(port, baud) {
let portName = port || `COM1`;
let baudRate = baud || 115200;
myPort = new SerialPort(portName, {baudRate: baudRate})
myParser = myPort.pipe(new ReadLine({ delimiter: '\n'}))
//Handlers
myPort.on('open', () => {
console.log(`port ${portName} open`)
});
myParser.on('data', (data) => {
console.log(data)
});
myPort.on('close', () => {
console.log(`port ${portName} closed`)
});
myPort.on('error', (err) => {
console.error('port error: ' + err)
});
}
function getPorts() {
let portlist = [];
SerialPort.list((err, ports) => {
ports.forEach(port => {
portlist.push(port.comName)
});
})
return portlist;
}
function send(data) {
myPort.write(JSON.stringify(data) + _d, function (err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log(`${data} sent`);
});
}
function close() {
myPort.close();
}
module.exports = {
connect, getPorts, send, close
}
The problem is that a module is used where a class or a factory would be appropriate. myParser cannot exist without connect being called, so it doesn't make sense to make it available as module property, it would be unavailable by default, and multiple connect calls would override it.
It can be a factory:
module.exports = function connect(port, baud) {
let portName = port || `COM1`;
let baudRate = baud || 115200;
let myPort = new SerialPort(portName, {baudRate: baudRate})
let myParser = myPort.pipe(new ReadLine({ delimiter: '\n'}))
//Handlers
myPort.on('open', () => {
console.log(`port ${portName} open`)
});
myParser.on('data', (data) => {
console.log(data)
});
myPort.on('close', () => {
console.log(`port ${portName} closed`)
});
myPort.on('error', (err) => {
console.error('port error: ' + err)
});
function getPorts() {
let portlist = [];
SerialPort.list((err, ports) => {
ports.forEach(port => {
portlist.push(port.comName)
});
})
return portlist;
}
function send(data) {
myPort.write(JSON.stringify(data) + _d, function (err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log(`${data} sent`);
});
}
function close() {
myPort.close();
}
return {
myParser, getPorts, send, close
};
}
So it could be used like:
const serial = require('./serial');
const connection = serial(...);
connection.myParser.on('data',(data) => {
//Do something unique with data
if (dataParser.parse(data) == 0)
connection.send('Error');
});
I would create a new separate module with a function that contains a callback that I call in main file.
My program should take 3 args: first a file directory, then file extention and finally a callback function that should filter a directory by fileextension.
Here's my module.js file
var fs = require('fs')
module.exports = function (directory, fileExtension, callbackfun) {
fs.readdir(directory, callbackfun);
}
then in main file I import module then use function.
modulejs(process.argv[2], process.argv[3], callbackfun(callbackfn));
var callbackfn = function (err, data, fileExtension) {
console.log(fileExtension);
let filtred = data.filter(file => { if (file.indexOf("." + fileExtension) > -1) { console.log(file); } });
};
function callbackfun(callbackfunc1) {
callbackfunc1(err, data, fileExtension);
}
The error that I get actually is
ReferenceError: err is not defined
UPDATE: I have edited my code, I get different error now
UPDATE2: I get the solution on internet but does not understand how it works, if anyboby explain it to me step by step it would be nice
module.jsconst fs = require('fs')
const path = require('path')
module.exports = function (dir, filterStr, callback) {
fs.readdir(dir, function (err, list) {
if (err) {
return callback(err)
}
list = list.filter(function (file) {
return path.extname(file) === '.' + filterStr
})
callback(null, list)
})
}
main.js
const filterFn = require('./module.js')
const dir = process.argv[2]
const filterStr = process.argv[3]
filterFn(dir, filterStr, function (err, list) {
console.log(list);
if (err) {
return console.error('There was an error:', err)
}
list.forEach(function (file) {
console.log(file)
})
})
I am struggling to find a way to write data to a CSV in Node.js.
There are several CSV plugins available however they only 'write' to stdout.
Ideally I want to write on a row-by-row basis using a loop.
You can use fs (https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback):
var dataToWrite;
var fs = require('fs');
fs.writeFile('form-tracking/formList.csv', dataToWrite, 'utf8', function (err) {
if (err) {
console.log('Some error occured - file either not saved or corrupted file saved.');
} else{
console.log('It\'s saved!');
}
});
The docs for node-csv-parser (npm install csv) specifically state that it can be used with streams (see fromStream, toStream). So it's not hard-coded to use stdout.
Several other CSV parsers also come up when you npm search csv -- you might want to look at them too.
Here is a simple example using csv-stringify to write a dataset that fits in memory to a csv file using fs.writeFile.
import stringify from 'csv-stringify';
import fs from 'fs';
let data = [];
let columns = {
id: 'id',
name: 'Name'
};
for (var i = 0; i < 10; i++) {
data.push([i, 'Name ' + i]);
}
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err;
fs.writeFile('my.csv', output, (err) => {
if (err) throw err;
console.log('my.csv saved.');
});
});
If you want to use a loop as you say you can do something like this with Node fs:
let fs = require("fs")
let writeStream = fs.createWriteStream('/path/filename.csv')
someArrayOfObjects.forEach((someObject, index) => {
let newLine = []
newLine.push(someObject.stringPropertyOne)
newLine.push(someObject.stringPropertyTwo)
....
writeStream.write(newLine.join(',')+ '\n', () => {
// a line was written to stream
})
})
writeStream.end()
writeStream.on('finish', () => {
console.log('finish write stream, moving along')
}).on('error', (err) => {
console.log(err)
})
In case you don't wanna use any library besides fs, you can do it manually.
let fileString = ""
let separator = ","
let fileType = "csv"
let file = `fileExample.${fileType}`
Object.keys(jsonObject[0]).forEach(value=>fileString += `${value}${separator}`)
fileString = fileString.slice(0, -1)
fileString += "\n"
jsonObject.forEach(transaction=>{
Object.values(transaction).forEach(value=>fileString += `${value}${separator}`)
fileString = fileString.slice(0, -1)
fileString += "\n"
})
fs.writeFileSync(file, fileString, 'utf8')
For those who prefer fast-csv:
const { writeToPath } = require('#fast-csv/format');
const path = `${__dirname}/people.csv`;
const data = [{ name: 'Stevie', id: 10 }, { name: 'Ray', id: 20 }];
const options = { headers: true, quoteColumns: true };
writeToPath(path, data, options)
.on('error', err => console.error(err))
.on('finish', () => console.log('Done writing.'));
**In case you don't wanna use any library besides fs, you can do it manually. More over you can filter the data as you want to write to CSV file
**
router.get('/apiname', (req, res) => {
const data = arrayOfObject; // you will get from somewhere
/*
// Modify old data (New Key Names)
let modifiedData = data.map(({ oldKey1: newKey1, oldKey2: newKey2, ...rest }) => ({ newKey1, newKey2, ...rest }));
*/
const path = './test'
writeToFile(path, data, (result) => {
// get the result from callback and process
console.log(result) // success or error
});
});
writeToFile = (path, data, callback) => {
fs.writeFile(path, JSON.stringify(data, null, 2), (err) => { // JSON.stringify(data, null, 2) help you to write the data line by line
if (!err) {
callback('success');
// successfull
}
else {
callback('error');
// some error (catch this error)
}
});
}
this is the code that worked for me in nest js
import { Parser } from "json2csv";
const csv = require('csvtojson');
const csvFilePath = process.cwd() + '/' + file.path;
let csv data = await csv().fromFile(csvFilePath); /// read data from csv into an array of json
/// * from here how to write data into csv *
data.push({
label: value,
.......
})
}
const fields = [
'field1','field2', ...
]
const parser = new Parser({ fields, header:false }); /// if dont want header else remove header: false
const csv = parser.parse(data);
appendFileSync('./filename.csv',`${csv}\n`); // remove /n if you dont want new line at the end