This question already has answers here:
Serializing an object to JSON
(4 answers)
Closed 2 years ago.
i have a function that scans a directory and builds a key,value pair of all folders and their respective files in it.
const listNotes = () => {
const listing = [];
try{
const folders = fs.readdirSync(defaultPath);
folders.forEach((folder) => {
let v = fs.readdirSync(`${defaultPath}/${folder}`);
listing[folder] = v;
});
} catch (err) {
console.log(err);
}
return listing;
};
and this is an output of such scan
[ DailyTask: [ 'Chores.json' , 'Sunday.json'] ]
that is a folder called DailyTask containing 2 files.
How do i convert this structure to JSON.
The problem with your listing variable is that it's an Array. JSON.stringify() doesn't include all the keys in the array, just the order of items, such as listing[0], listing[1]. In your case, you aren't really using listing as an array, you are using is as an object. You can just make it a object, and then when you call JSON.stringify() it will include the keys.
Solution
const listNotes = () => {
const listing = {};
try{
const folders = fs.readdirSync(defaultPath);
folders.forEach((folder) => {
let v = fs.readdirSync(`${defaultPath}/${folder}`);
listing[folder] = v;
});
} catch (err) {
console.log(err);
}
return listing;
//If you want to convert to JSON
console.log(JSON.stringify(listing));
};
Related
This question already has answers here:
Why is my variable unaltered after I modify it inside of a function? - Asynchronous code reference
(7 answers)
Reading and returning multiple files in Node.js using fs.readFile
(5 answers)
Get data from fs.readFile [duplicate]
(17 answers)
Closed 3 months ago.
I'm looping through scripts in a folder. For each script i check for regex matchings. If a matching appears i want to create an object out of it and push the object to an array. Then I want to console.log the Array but it's empty.
const { getServerFolders, getServerFolderElements } = require('./mainModules/getServerFolders.js')
var fs = require('fs');
const serverFoldersPath = "../electCon/src/scripts/"
function getFolderElements(folder) {
var variableObjArray = []
let folderName = folder.split('_')[0] //
// returns the files of the folder as an array
let serverFolderElements = getServerFolderElements(folderName);
serverFolderElements.forEach(element => {
//read the script
fs.readFile(serverFoldersPath + folderName + "/" + element , "utf-8", (err, data) => {
// throw if error
if(err) {return console.log(err)}
// find the variable with the # prefix
var scriptVariableNames = data.match(/#[^#]+#/g)
// if there is one or more variable in the script
if(scriptVariableNames) {
// for each variable in the script
scriptVariableNames.forEach(variable => {
let variableProp = variable.split("#")[1]
variableObject = {
sourceScript: element,
variableName : variable,
variableProp : variableProp,
variableDescription : "",
}
variableObjArray.push(variableObject)
})
}
})
})
console.log(variableObjArray)
}
when i log the array oe block earlier its full (but will log many times because of the loops), but as soon as i jump out a block its empty again
I have already tried to...
... log the variableObjArray earlier, but this way it would be in the for each loop.
... declare the object before, change the properties and than push it
Here to full code:
index.js
const { getServerFolders, getServerFolderElements } = require('./mainModules/getServerFolders.js')
var fs = require('fs');
const serverFoldersPath = "../electCon/src/scripts/"
function variableFinder(folder) {
var variableObjArray = []
let folderName = folder.split('_')[0] //
// returns the files of the folder as an array
let serverFolderElements = getServerFolderElements(folderName);
serverFolderElements.forEach(element => {
//read the script
fs.readFile(serverFoldersPath + folderName + "/" + element , "utf-8", (err, data) => {
// throw if error
if(err) {return console.log(err)}
// find the variable with the # prefix
var scriptVariableNames = data.match(/#[^#]+#/g)
// if there is one or more variable in the script
if(scriptVariableNames) {
// for each variable in the script
scriptVariableNames.forEach(variable => {
let variableProp = variable.split("#")[1]
variableObject = {
sourceScript: element,
variableName : variable,
variableProp : variableProp,
variableDescription : "",
}
variableObjArray.push(variableObject)
})
}
})
})
console.log(variableObjArray)
}
getServerFolders:
const { readdirSync } = require('fs');
const serverFoldersPath = "../electCon/src/scripts/"
function getServerFolders() {
let serverFolders = readdirSync(serverFoldersPath)
return serverFolders
}
function getServerFolderElements(server) {
let serverFolderElements = readdirSync(serverFoldersPath + server)
return serverFolderElements
}
module.exports = { getServerFolders, getServerFolderElements }
the scripts i want to loop trough:
hello mom #heeelp# #variable10# i hope stackoverflow can help me
not a variable #variablee# and some filler
Test-Connection #variable1#
I have already tried to...
... log the variableObjArray earlier, but this way it would be in the for each loop.
... declare the object before, change the properties and than push it
Note: its actually programmed in Electron and the variableFinder function is actually a ipcMain listener. I rewrote the function this way so no one will be confused. Thanks a lot :)
consider this scenario:
I have 2 csv files, each one is sorted and contains the id filed.
I need to join the rows using the id field. Because the files are already sorted by the id I wanted to perform merge join (https://en.wikipedia.org/wiki/Sort-merge_join).
For that I need to have a way to load some portion of both files, process it and iteratively load more again from one or both files.
(The files are big and would not fit into memory so only streaming approach will work).
The problem is the Node API, what to use? readline will not work because of https://github.com/nodejs/node/issues/33463. Any other ideas?
I had to do something quite similar recently and decided to use the node-line-reader module that has a simpler interface than the built-in readline. I then created a little recursive function that determines which file to read from next by comparing the id of each csv-entry of each provided file. After that the corresponding line gets written out to the target file, and the method is called again until all lines of all files are processed. Here's the whole class I ended up with:
const fs = require('fs');
const LineReader = require('node-line-reader').LineReader;
class OrderedCsvFileMerger {
constructor(files, targetFile) {
this.lineBuffer = [];
this.initReaders(files);
this.initWriter(targetFile);
}
initReaders(files) {
this.readers = files.map(file => new LineReader(file));
}
initWriter(targetFile) {
this.writer = fs.createWriteStream(targetFile);
}
async mergeFiles() {
// initially read first line from all files
for (const reader of this.readers) {
this.lineBuffer.push(await this.nextLine(reader));
}
return this.merge();
}
async nextLine(reader) {
return new Promise((resolve, reject) => {
reader.nextLine(function (err, line) {
if (err) reject(err);
resolve(line);
});
})
}
async merge() {
if (this.allLinesProcessed()) {
return;
}
let currentBufferIndex = -1;
let minRowId = Number.MAX_VALUE;
for (let i = 0; i < this.lineBuffer.length; i++) {
const currentRowId = parseInt(this.lineBuffer[i]); // implement parsing logic if your lines do not start
// with an integer id
if (currentRowId < minRowId) {
minRowId = currentRowId;
currentBufferIndex = i;
}
}
const line = this.lineBuffer[currentBufferIndex];
this.writer.write(line + "\n");
this.lineBuffer[currentBufferIndex] = await this.nextLine(this.readers[currentBufferIndex]);
return this.merge();
}
allLinesProcessed() {
return this.lineBuffer.every(l => !l);
}
}
(async () => {
const input = ['./path/to/csv1.csv', './path/to/csv2.csv'];
const target = './path/to/target.csv';
const merger = new OrderedCsvFileMerger(files, output);
await merger.mergeFiles();
console.log("Files were merged successfully!")
})().catch(err => {
console.log(err);
});
I am trying to implement the following SQL logic in the datastore,
SELECT * from table where id in [1,2,3,4,5]
Implementing this in datastore, I want to retrieve all the corresponding entities with these IDs as an array.
let employees = []
try {
for (let id of idArray) {
const employee = await employeeRepo.getOneById(workspace, id)
employees.push(employee)
}
} catch (e) {
throw e;
}
This is the naive logic of the function, and I am trying to reduce it to a single query.
Are you using the Node.js library referenced here: https://cloud.google.com/datastore/docs/reference/libraries
There is a function get where you can pass in an array of keys and it will return the array of entities.
https://googleapis.dev/nodejs/datastore/latest/Datastore.html#get
It's possible to do by using get as mentioned in the documentation
Here's an example of how to do it based on your code:
const employee1 = this.datastore.key(['Employee', 1]);
const employee2 = this.datastore.key(['Employee', 2]);
const employee3 = this.datastore.key(['Employee', 3]);
const keys = [employee1, employee2, employee3];
try {
const [employees] = await datastore.get(keys);
} catch (e) {
throw e;
}
My Firebase data base contains JSON objects, each with the same parameters as seen below. Firebase data
I want to get an array that has each objects country. So if I have 4,000 objects I want to get an array of 4,000 strings all containing a country.
Right now I can get the console to log all the 4,000 objects into an array using the code below.
componentWillMount() {
this.fetchData();
}
fetchData = async () => {
var data1 = [];
var fireBaseResponse = firebase.database().ref();
fireBaseResponse.once('value').then(snapshot => {
snapshot.forEach(item => {
var temp = item.val();
data1.push(temp);
return false;
});
console.log(data1);
});
}
But when I try doing
var fireBaseResponse = firebase.database().ref().child('country');
I get an array of nothing.
Any help would be great.
As mentioned in the comments, you can create a new temp object containing just country before pushing it into your array.
snapshot.forEach(item => {
var temp = { country: item.val().country };
data1.push(temp);
return false;
});
I have been working on a Node.JS/MongoDB backend for a product catalogue system and I have run into a weird bug.
The Process
First I load the product information from the product collection. This product contains information such as its name, description and also a list of file upload IDs that point to images of the product in an array of strings.
I then, for each upload ID, load the information for each photo. I then replace the array of upload ID strings with an array of file information objects.
However, when I output the resulting product object, the list of images is a list of strings that represent JavaScript as if they were written in code.
The Code
// products.controller.js
async function populateImageInformation(productRecord) {
let imageInfo = [];
for(let uploadId of productRecord.images) {
let imageData = await File.getByUploadId(uploadId);
imageInfo.push(imageData);
console.log(typeof imageData); // => object
}
console.log(typeof imageInfo); // => object
console.log(typeof imageInfo[0]); // => object
productRecord.images = imageInfo;
console.log(typeof productRecord.images); // => object
console.log(typeof productRecord.images[0]); // => string
return productRecord;
}
async function getAll(req, res, next) {
try {
let productRecords = await Product.find({});
for(let productRecordI in productRecords) {
productRecords[productRecordI] = await populateImageInformation(productRecords[productRecordI]);
}
res.json(productRecords);
} catch (e) {
next(e);
}
}
Mongoose schema file:
// file.model.js
getByUploadId: function(uploadId) {
return this.findOne({
uploadId
});
}
I do not understand that when I set the productRecord.images property to imageInfo the values in the array suddenly become string representations of the values.
The string does not contain JSON, but instead contains a human-readable string representation of how an object is hard-coded into JavaScript. These strings cannot be parsed as JSON at all. JSON expects keys to be wrapped in double quotes whereas the JavaScript code produced doesn't have this.
Is there any reason for this to happen? I have never seen this occur.
The problem is due to mongoose schema , they protect the model by types you declared in schema . Object Returning by find is a mongoose model, they do type check when you update any value on it.
Let's come to your scenario . I have created an example . See below
const mongoose = require('mongoose');
const db = require('./db');
const Schema = mongoose.Schema;
const productSchema = new Schema({
name: {type: String},
images: [String] // Note schema have array of string
});
const productModel = mongoose.model('product', productSchema);
db.connect().then(() => {
//find record from productSchema
return productModel.find({})
.then((productRecords) => { //productRecords - don't forget is a mongoose model object
// productRecords from db [{"name":"gilette","images":["idx","idy"],"_id":"5ac324c4fad317265b9df226","__v":0}]
//TRIAL A
for (let productRecordI in productRecords) {
productRecords[productRecordI] = populateImageInformation(productRecords[productRecordI]);
}
console.log(typeof productRecords[0].images[0]); //=> string;
//SOLUTION for the problem is copy mongoose data without reference
//TRIAL B
productRecords = JSON.parse(JSON.stringify(productRecords));
for (let productRecordI in productRecords) {
productRecords[productRecordI] = populateImageInformation(productRecords[productRecordI]);
}
console.log(typeof productRecords[0].images[0]); //=> Object;
});
//just a mock function change images to object
function populateImageInformation(productRecord) {
imageInfo = [];
for (let uploadId of productRecord.images) {
let imageData = {name: uploadId};
imageInfo.push(imageData);
console.log(typeof imageData); // => object
}
productRecord.images = imageInfo;
return productRecord;
}
}).catch((err) => {
console.log('err', err);
});