the following array list I need to get all the price one by one.
this returns the full json object console.log('File data:', jsonString); but the for loop never seems to get called , it never enters it. I need to loop through a json file but its in different folder the json file is under menu folder called list.json menu-> projectName\menu\list.json the file looks like this
The data:
[
{
"code": "ZC",
"price": "1"
},
{
"code": "ZS",
"price": "3"
},
{
"code": "VC",
"price": "4"
},
...]
JS:
const jsonList = fs.readFile("../menu/list.json", "utf8", (err, jsonString) => {
if (err) {
console.log("File read failed:", err);
return;
}
console.log("File data:", jsonString);
console.log("File data:", jsonString.url);
for (var key in jsonString) {
if (jsonString.hasOwnProperty(key)) {
console.log("===>", jsonString[key].price);
}
return jsonString;
}
});
There are two ways to fix the issue you are facing, one is to have your code run inside the callback:
const jsonList = fs.readFile("../menu/list.json", "utf8", (err, jsonString) => {
if (err) {
console.log("File read failed:", err);
return;
}
console.log("File data:", jsonString);
for (var key in JSON.parse(jsonString)) {
if (jsonList.hasOwnProperty(key)) {
console.log("===>", jsonList[key].price); // This is never called
}
}
});
or by using sync function to read file:
const jsonString = fs.readFileSync("../menu/list.json", "utf8");
console.log("File data:", jsonString);
const jsonList = JSON.parse(jsonString);
for (var key in jsonList) {
if (jsonList.hasOwnProperty(key)) {
console.log("===>", jsonList[key].price); // This is never called
}
}
I think you need to loop in the callback as it is async and so jsonList is not the object you expect when you access it. See Get data from fs.readFile
Related
With a source JSON file below, I wish to read in , and apply the delta from a ```changes.JSON file. There is a more elegant way using direction mutation than reading into an array and writing back out. Curious how to do the direct mutation on the object and merge the changes into the source? The changes.json file has "action" and "payload" keys to direction operations on changes.json
source.json
{
"users" : [
{
"id" : "1",
"name" : "Dave Mustaine"
}
]
}
changes.json
{
"users": [{
"action": "add",
"payload": [{
"id": "2",
"name": "James Hetfield"
}
]
}]
becomes this:
{
"users" : [
{
"id" : "1",
"name" : "Dave Mustaine"
},
{
"id" : "2",
"name": "James Hetfield"
]
}
In my noob implementation I have all the logic written to parse out the action and payload, but currently writing into an array, but I think that is unnecessary and there is a more elegant way to just directly mutate the object and then stringify it and write out the updated json.
Here is what I have so far:
// Stream in source file
fs.readFile('./' + inputFile, (err, data) => {
if (err) throw err;
let input = JSON.parse(data);
//console.log(input);
});
// Stream in changes file
fs.readFile('./' + changesFile, 'utf-8', (err, jsonString) => {
if (err) {
console.log(err);
} else {
try {
const data = JSON.parse(jsonString);
const array = [];
Object.entries(data).map(([key, [{ action, payload }]]) => {
switch (key) {
case 'users': {
if (action === 'add') {
console.log("it's an add");
array.push([`${key}`, `${payload}`]);
}
break;
}
case 'playlists': {
if (action === 'add') {
console.log("it's an add");
array.push([`${key}`, `${payload}`]);
}
break;
}
case 'songs': {
if (action === 'add') {
console.log("it's an add");
array.push([`${key}`, `${payload}`]);
}
break;
}
}
});
console.log(array);
} catch (err) {
console.log('Error parsing JSON', err);
}
}
});
// after we have merged changes and source we need to write out
fs.appendFile('./' + outputFile, JSON.stringify(array, null, 2), err => {
if (err) {
console.log(err);
} else {
console.log('File sucessfully written');
}
});
I have two functions as shown below. It is essentially just getting data from a database.
function processRequest(query){
let dynamoData = getDynamoData(query);
console.log('abc')
}
function getDynamoData(key){
var params = {
TableName: 'test_table',
Key: {
'TWEET_KEY' : {S: String(key)}
}
};
// Call DynamoDB to read the item from the table
ddb.getItem(params, function(err, data) {
if (err) {
console.log("Error");
} else {
console.log("Successfully got data from table")
return data.Item;
}
});
}
Currently when I run the code, within the console it prints the following:
abc
Successfully got data from table
However, I need it to print Successfully got data from table before it prints abc.
I know I have to possibly use async within the function however am really struggling to get the code running in order. Would really appreciate it if anyone could help me get the code running in order. Thank you!
You should move both functions into a separate module (if this wasn't done yet) and make them async ones like this:
async function processRequest(query){
let dynamoData = await getDynamoData(query);
console.log('abc')
}
async function getDynamoData(key){
var params = {
TableName: 'test_table',
Key: {
'TWEET_KEY' : {S: String(key)}
}
};
return new Promise((resolve, reject) => {
// Call DynamoDB to read the item from the table
ddb.getItem(params, function(err, data) {
if (err) {
console.log("Error");
reject(err);
} else {
console.log("Successfully got data from table")
resolve(data.Item);
}
});
});
}
You need to make processRequest asynchronous:
async function processRequest(query){
let dynamoData = await getDynamoData(query);
console.log('abc')
}
function getDynamoData(key){
var params = {
TableName: 'test_table',
Key: {
'TWEET_KEY' : {S: String(key)}
}
};
// Call DynamoDB to read the item from the table
ddb.getItem(params, function(err, data) {
if (err) {
console.log("Error");
} else {
console.log("Successfully got data from table")
return data.Item;
}
});
}
The code below prints [ ] in the console, filenames Array is actually being changed, but the print occurs before it happens. How to get the final valeu of filenames array, after all changes occurs?
function search(directoryPath, searchString, filenames) {
fs.readdir(directoryPath, { withFileTypes: true }, function (err, files) {
if (err) {
return console.log('Unable to scan directory: ' + err);
}
files.forEach(function (file) {
if (file.isDirectory()) {
if (file.name === searchString) {
filenames.push(file.name)
}
search(directoryPath + "/" + file.name, searchString, filenames);
}
});
});
}
const filenames = []
search(directoryPath, "1048594132", filenames);
console.log(filenames);
The simplest solution is to only use synchronous functions. You can do this by replacing the asynchronous fs.readdir() with the synchronous fs.readdirSync().
I'm trying to add a new text to an existing json file, I tried writeFileSync and appendFileSync however the text added doesn't format as json even when i use JSON.stringify.
const fs = require('fs');
fs.readFile("test.json", (err, data) => {
if( err) throw err;
var data = JSON.parse(data);
console.log(data);
});
var student = {
age: "23"
};
fs.appendFileSync("test.json", "age: 23");
// var writeData = fs.writeFileSync("test.json", JSON.stringify(student));
My json file
{ name: "kevin" }
Append turns out like this, {name: "kevin"}age: "23"
and writeFileSync turns out like {name: "kevin"}{age: "23"}
What I want is to continuously add text to my json file like so
{
name: "kevin",
age: "23"
}
First, dont use readFileSync and writeFileSync. They block the execution, and go against node.js standards. Here is the correct code:
const fs = require('fs');
fs.readFile("test.json", (err, data) => { // READ
if (err) {
return console.error(err);
};
var data = JSON.parse(data.toString());
data.age = "23"; // MODIFY
var writeData = fs.writeFile("test.json", JSON.stringify(data), (err, result) => { // WRITE
if (err) {
return console.error(err);
} else {
console.log(result);
console.log("Success");
}
});
});
What this code does:
Reads the data from the file.
Modifies the data to get the new data the file should have.
Write the data(NOT append) back to the file.
Here's what you can do: read the data from the file, edit that data, then write it back again.
const fs = require("fs")
fs.readFile("test.json", (err, buffer) => {
if (err) return console.error('File read error: ', err)
const data = JSON.parse(buffer.toString())
data.age = 23
fs.writeFile("test.json", JSON.stringify(data), err => {
if (err) return console.error('File write error:', err)
})
})
I am trying to delete several objects after copying them to a different folder.
My code is like:
var deleteParam = {
Bucket: 'frontpass-test',
Delete: {
Objects: [
{Key: '1.txt'},
{Key: '2.txt'},
{Key: '3.txt'}
]
}
};
s3.deleteObjects(deleteParam, function(err, data) {
if (err) console.log(err, err.stack);
else console.log('delete', data);
});
and the returned data is:
delete { Deleted: [ { Key: '1.txt' }, { Key: '3.txt' }, { Key: '2.txt' } ],
Errors: [] }
so I assume the deletion is completed. But the objects are still exist in the folder, is there something wrong with my code?
I also tried to delete objects using for loop and s3.deleteObject, but it only delete the last object in my list of files.
for (var i = 0; i < files.length; i++) {
var copyParams = {
Bucket: 'frontpass-test',
CopySource: 'frontpass-test/unsold/' + files[i].filename,
Key: 'sold/' + files[i].filename
};
var deleteParam = {
Bucket: 'frontpass-test',
Key: 'unsold/' + files[i].filename
};
s3.copyObject(copyParams, function(err, data) {
if (err) console.log(err, err.stack);
else {
s3.deleteObject(deleteParam, function(err, data) {
if (err) console.log(err, err.stack);
else console.log('delete', data);
});
}
});
}
Any idea on how to delete objects in my case? Thanks in advance.
Well the first example looks good. Do you have object versioning turned on in the bucket? That would keep a copy of a file even after you delete it.
The second example actually contains some bugs that would explain why only the last one gets deleted. Because Node.js is asynchronous, when you hit the copyObject function call, the loop iteration ends and goes to the next iteration, not waiting for the callback on copyObject to be called. You try to define the params variables for each iteration of the loop with the var keyword, but because Javascript has function level scope not block level scope, you aren't actually creating new variables on each iteration. You only have one instance of copyParmas and deleteParams. So you quickly run through the loop and deleteParams stays on the value it receives in the last iteration of the loop. Then eventually the callbacks to the copyObject calls start firing, and they all call deleteObject with deleteParams which by now is the last one. In order to make multiple asynchronous calls in a loop, I like to use the async library. Using it, you could do the following:
async.each(files, function iterator(file, callback) {
var copyParams = {
Bucket: 'frontpass-test',
CopySource: 'frontpass-test/unsold/' + file.filename,
Key: 'sold/' + file.filename
};
var deleteParam = {
Bucket: 'frontpass-test',
Key: 'unsold/' + file.filename
};
s3.copyObject(copyParams, function(err, data) {
if (err) callback(err);
else {
s3.deleteObject(deleteParam, function(err, data) {
if (err) callback(err)
else {
console.log('delete', data);
callback();
}
});
}
});
}, function allDone(err) {
//This gets called when all callbacks are called
if (err) console.log(err, err.stack);
});
Just had to implement folder rename on top of s3, I did it as follows: (promise api)
_getDataForItemRename(from, to) {
return s3.listObjectsV2({Bucket: services.conf.workspace, Prefix: from}).promise()
.then((data) => {
const toCopy = [];
const toRemove = [];
const s3Contents = Object.assign([], data.Contents);
// in case of a single dir (with no children)
if (s3Contents.length === 0) {
s3Contents.push({Key: from});
}
s3Contents.forEach((item) => {
const copyPromise = s3.copyObject({
Bucket: services.conf.workspace,
Key: to,
CopySource: services.conf.workspace + '/' + item.Key
}).promise();
const deletePromise = s3.deleteObjects({
Bucket: services.conf.workspace,
Delete: {Objects: [{Key: from}]}
}).promise();
toCopy.push(copyPromise);
toRemove.push(deletePromise);
});
return {copy: toCopy, remove: toRemove};
}).catch((err) => {
return Promise.reject(err);
});
}
return this._getDataForItemRename(_from, _to).then((files) => {
return Promise.all(files.copy).then(() => {
return Promise.all(files.remove).then(result => {
return result;
});
});
}).catch((err) => {
return Promise.reject(err);
});