I am trying to replace / change the values in an object, but I can't seem to work out how it's done or if it can even be done.
I'm trying to add https://SiteName.com to the start of each of the values so it will be like https://SiteName.com\/asset\/image\/map\/map-grass.svg
var assets = [{
"name": "\/asset\/image\/map\/map-grass.svg",
"url": "\/asset\/image\/map\/map-grass.svg"
}, {
"name": "\/asset\/image\/map\/map-stone.svg",
"url": "\/asset\/image\/map\/map-stone.svg"
}]
Object.keys(assets).forEach(key => {
const val = assets[key];
console.log(val)
});
Try this:
var assets = [{
"name": "\/asset\/image\/map\/map-grass.svg",
"url": "\/asset\/image\/map\/map-grass.svg"
}, {
"name": "\/asset\/image\/map\/map-stone.svg",
"url": "\/asset\/image\/map\/map-stone.svg"
}]
let url = "https://SiteName.com";
Object.keys(assets).forEach(key => {
const val = assets[key];
val.name = url + val.name;
val.url = url + val.url;
});
console.log(assets)
You need a nested loop (or forEach) here - one to go over the elements of the assets array, and then, for each object in there, go over all its properties:
var assets = [{
"name": "\/asset\/image\/map\/map-grass.svg",
"url": "\/asset\/image\/map\/map-grass.svg"
}, {
"name": "\/asset\/image\/map\/map-stone.svg",
"url": "\/asset\/image\/map\/map-stone.svg"
}]
assets.forEach(o => {
Object.keys(o).forEach(key => {
o[key] = 'https://SiteName.com' + o[key];
})
});
console.log(assets);
Related
Example JSON file:
[
{
"discordId": "9273927302020",
"characters": [
{
"name": "Rare_Character",
"value": 1
},
{
"name": "Ultra_Rare_Character",
"value": 1
}
]
}
]
Let's just say for example I ran this simple gacha and got 4 characters:
let i = 1
var picks = []
while(i <= 4){
const { pick } = gacha.simple(alpha)
picks.push(pick)
i++
}
Now, picks has an array like this:
[
{
"name": "Common_Character"
},
{
"name": "Ultra_Rare_Character"
},
{
"name": "Common_Character"
},
{
"name": "Rare_Character"
}
]
How do I increment the value in My Example JSON file based on the name from what I got in my gacha results picks while ignoring the Common_Character and only passing those Rare and Ultra_Rare ones?
I've tried filtering them like this:
var filter = picks.filter(t => t.name === 'Rare_Character' || t.name === 'Ultra_Rare_Character')
Now I don't know how to increase those values in my JSON file and what if in the gacha results I got two Rare_Characters or Ultra_Rare_Character
I'm using fs to read my JSON file but I just don't know the logic to increase values
const src = [
{
"discordId": "9273927302020",
"characters": [
{
"name": "Rare_Character",
"value": 1
},
{
"name": "Ultra_Rare_Character",
"value": 1
}
]
}
];
const gacha = [
{
"name": "Common_Character"
},
{
"name": "Ultra_Rare_Character"
},
{
"name": "Common_Character"
},
{
"name": "Rare_Character"
}
];
const updateValues = (src, gacha) => {
const gachaSums = gacha.reduce((collector, current) => {
collector[current.name] = (collector[current.name] | 0) + 1;
return collector;
}, {});
src.characters.forEach(srcChar => {
gachaSums[srcChar.name] = srcChar.value + (gachaSums[srcChar.name] | 0);
});
src.characters = Object.entries(gachaSums).map(([key, value]) =>
({ name: key, value: value })
);
return src;
}
console.log(updateValues(src[0], gacha));
Maybe this version could help
I have the following code which calls two different API's, parses the JSON data and displays it on a webpage. Both JSON datasets have the same structure, one with 5 columns and the other one with 20 columns.
The JavaScript code I am using is shown below. How can I combine both JSON datasets into one, so there's a resulting dataset with 25 columns, enabling me to search/reference across all those 25 columns?
The Data Structure of both JSON datasets is as follows:
{
"datatable": {
"data": [
[
"TSLA",
"2019-02-22",
"2019-02-22",
58995.9,
-231.2
]
],
"columns": [
{
"name": "ticker",
"type": "String"
},
{
"name": "date",
"type": "Date"
},
{
"name": "lastupdated",
"type": "Date"
},
{
"name": "ev",
"type": "BigDecimal(15,3)"
},
{
"name": "evebit",
"type": "BigDecimal(15,3)"
}
]
},
"meta": {
"next_cursor_id": null
}
}
The JavaScript Code is as follows:
var apiurls = [
'api1.json',
'api2.json'
],
elroot = document.getElementById('root'),
index = 0;
function setup() {
loadJSON(apiurls[index], gotData);
}
function gotData(data) {
var daten = data.datatable.data[0],
spalten = data.datatable.columns,
output = '';
for (var i = 0; i < spalten.length; i++) {
output = '<p>' + spalten[i].name + ': ' + daten[i] + '</p>';
elroot.innerHTML += output;
}
if (++index < apiurls.length) {
setup();
}
}
something like that ?
var
Json_1 = {
"datatable": {
"data" : ['aa','bb','cc'],
"columns": ['x','y','z']
},
"meta": { 'meta1': 15, 'meta2':87 }
},
Json_2 = {
"datatable": {
"data" : ['ZZbb','cZc'],
"columns": ['xf','yf','zf','zgg']
},
"meta": { 'meta1': 879, 'meta2':4 }
},
Json_cumul_typ0 = { Json_1, Json_2 },
Json_cumul_typ1 = {
"data" : [].concat( Json_1.datatable.data, Json_2.datatable.data ),
"columns": [].concat( Json_1.datatable.columns, Json_2.datatable.columns ),
}
;
console.log( Json_cumul_typ0 );
console.log( Json_cumul_typ1 );
It would be easier to make all the API calls first, combining them into a single result object before doing any processing. Currently, you are making an API call, then processing the results before making the next API call.
I think the nature of async callbacks is making your task more difficult. I suggest using async/await to simplify the logic. Something like this:
var apiurls = [
'api1.json',
'api2.json'
],
elroot = document.getElementById('root');
// Combine all API responses into this object
allResults = {
data: [[]],
columns: []
};
// loadJSON() is probably not async, so here is an async version using fetch()
async function loadJSON(url) {
response = await fetch(url);
return response.json()
}
// Wrap logic in async function so await can be used
(async () => {
// First make all the API calls
for (url of apiurls) {
results = await loadJSON(url);
allResults.data[0] = allResults.data[0].concat(results.datatable.data[0]);
allResults.columns = allResults.columns.concat(results.datatable.columns);
}
// Then process combined allResults object here once.
var daten = allResults.data[0],
spalten = allResults.columns,
output = '';
for (var i = 0; i < spalten.length; i++) {
output = '<p>' + spalten[i].name + ': ' + daten[i] + '</p>';
elroot.innerHTML += output;
}
})();
The loadJSON() you are using probably isn't async. Here are some alternatives you can use:
fetch()
axios
var object1 = {
"datatable": {
"data": [],
"columns": [1,2,3,4]
},
"meta": {}
}
var object2 = {
"datatable": {
"data": [],
"columns": [6,7,8,9,0,11,12,123]
},
"meta": {}
}
Now you want to concatenate columns field. So what you can do is create a deep copy of one of the above. There are better ways to do this than the one mentioned below.
var object3 = JSON.parse(JSON.stringify(object1));
Now to concatenate columns do this,
object3.datatable.columns = object3.datatable.columns.concatenate(object2.datatable.columns);
If you want to concatenate multiple fields you can use a for loop on an object, check if the data type is an array and do the concatenation.
I hope this helps.
I have this JSON Response from API call
[
{
"id": 20599,
"name": "Deliver",
"options": [
{
"id": 63775,
"name": "Item",
"dataType": "SelectMultiOption",
"required": false,
"options": [
{
"id": 426,
"name": "Towels"
},
{
"id": 427,
"name": "Toothbrush"
},
{
"id": 428,
"name": "Pillow"
}
]
}
]
}
]
I am using this code to get the id of the service "Deliver"
var data = JSON.parse(responseBody);
var loop_count = 0
for (count = 0; count < data.length; count++)
{
if (data[count].name == "Deliver")
{
var job_id = data[count].id;
postman.setEnvironmentVariable("service_id", job_id);
}
}
The questions are:
How can I get value from array "options", I need to get the "id":
63775 and store as "item_id" and the "name":"Item" as "item_name" postman variables.
Then I need to select the "options" nested in record
"Item" and select the option "name": "Toothbrush" and store in postman
variable "svc_optn_optn_name" and it's "id" stored in
"svc_optn_optn_id"
Here I am giving my own suggestion for your problem with few lines of code. I am not sure, how are you going to use these values. I also don't know if the outer options array will always have 1 item or more. I have just tried to satisfy your questions.
Please ask/comment, if you have more doubts or I am wrong.
I have created a function getAllPostmanDataFrom(obj) which takes object as parameter which is the value of data[count], gathers necessary info in other object postmanObj and returns it to the caller.
function getAllPostmanDataFrom(obj) {
const item_id = obj.options[0].id;
const item_name = obj.options[0].name;
const svc_optn_optn_name = obj.options[0].options[1].name;
const svc_optn_optn_id = obj.options[0].options[1].id;
const postmanObj = {item_id, item_name, svc_optn_optn_id, svc_optn_optn_name}; // Return object
return postmanObj;
}
var data = [
{
"id": 20599,
"name": "Deliver",
"options": [
{
"id": 63775,
"name": "Item",
"dataType": "SelectMultiOption",
"required": false,
"options": [
{
"id": 426,
"name": "Towels"
},
{
"id": 427,
"name": "Toothbrush"
},
{
"id": 428,
"name": "Pillow"
}
]
}
]
}
]
var count = 0;
var obj = data[count];
var postmanObj = getAllPostmanDataFrom(obj);
//var {item_id, item_name, svc_optn_optn_id} = postmanObj;
console. log(postmanObj)
/*
console.log(item_id);
console.log(item_name);
console.log(svc_optn_optn_id);
console.log(svc_optn_optn_name);
*/
Finally, you can use values contained in postmanObj as follows:.
postman.setEnvironmentVariable("item_id", postmanObj.item_id);
postman.setEnvironmentVariable("item_name", postmanObj.item_name);
And so on.
This is the solution
var data = JSON.parse(responseBody);
variable named as data
var loop_count = 0
for (count = 0; count < data.length; count++)
{
if (data[count].name == "Deliver")
{
var job_id = data[count].id;
postman.setEnvironmentVariable("service_id", job_id);
var job1_name = data[count].options[0].name;
postman.setEnvironmentVariable("item_name", job1_name);
var job2_id = data[count].options[0].id;
postman.setEnvironmentVariable("item_id", job2_id);
var job3_id = data[count].options[0].options[1].id;
postman.setEnvironmentVariable("svc_optn_optn_id", job3_id);
var job4_name = data[count].options[0].options[1].name;
postman.setEnvironmentVariable("svc_optn_optn_name", job4_name);
}
const data = JSON.parse(responseBody);
data.forEach(item => {
console.log(item.id); // deliver object id.
item.options.forEach(option => {
console.log(`Option Id ${option.id}`); // option id
postman.setEnvironmentVariable("service_id", option.id);
option.options(optionItem => {
if(optionItem.name == 'Toothbrush'){
postman.setEnvironmentVariable("svc_optn_optn_name", optionItem.name);
postman.setEnvironmentVariable("svc_optn_optn_id", optionItem.id);
}
});
});
});
In my previous question I asked once, but I still didn't get my problem solved. I want to change key and append value of Object in javascript as below :
var dataObj =
[
{
"image": "a.jpg"
},
{
"image": "b.png"
},
..................
..................
];
I want to change like this:
dataObj =
[
{
"src": "stackoverfloow.com/uploads/a.jpg"
},
{
"src": "stackoverfloow.com/uploads/b.png"
},
..........................................
..........................................
];
Thank for help
Use .map:
const data1 = [
{ "image" : "a.jpg" },
{ "image" : "b.png"},
];
const data2 = data1.map(({ image }) => ({ src: 'stackoverfloow.com/uploads/' + image }));
console.log(data2);
var dataObj = [{ "image": "a.jpg" }, { "image": "b.png" }];
let resp = dataObj.map(x => ({"src": "stackoverfloow.com/uploads/" + x.image}));
console.log(resp);
I need to convert a large CSV data set to JSON, however the output should be a JSON dictionary like this:
var products = {
"crystal": {
"description": "This is a crystal",
"price": "2.95"
},
"emerald": {
"description": "This is a emerald",
"price": "5.95"
}
};
This is what the CSV table would look like:
I am using a script referenced here to generate the JSON:
var csv = require('csv')
var fs = require('fs')
var f = fs.createReadStream('Fielding.csv')
var w = fs.createWriteStream('out.txt')
w.write('[');
csv()
.from.stream(f, {columns:true})
.transform(function(row, index) {
return (index === 0 ? '' : ',\n') + JSON.stringify(row);
})
.to.stream(w, {columns: true, end: false})
.on('end', function() {
w.write(']');
w.end();
});
However the output from that script is created in this format:
[
{
"name": "crystal",
"description": "This is a crystal",
"price": "2.95"
},
{
"name": "emerald",
"description": "This is a emerald",
"price": "5.95"
}
]
How would I modify the script to get my desired "dictionary" format?
All you need to do is loop over the array and use item.name as key for your dictionary object
var products ={};
data.forEach(function(item){
products[item.name] = item;
});
This will leave the name property in the item but that shouldn't be an issue
I found csv parser library most useful:
var csvText=`status,path,name,ext,checksum,size,document_service_id,document_service_path,message
success,./15-02-2017_17-11/d77c7886-ffe9-40f2-b2fe-e68410d07891//expE1.txt,expE1.txt,txt,38441337865069eabae7754b29bb43e1,414984,8269f7e3-3221-49bb-bb5a-5796cf208fd1,/neuroinftest/20170215/expE1.txt,
success,./15-02-2017_17-11/d77c7886-ffe9-40f2-b2fe-e68410d07891//expE10.txt,expE10.txt,txt,f27e46979035706eb0aaf58c26e09585,368573,2c94ed19-29c9-4660-83cf-c2148c3d6f61,/neuroinftest/20170215/expE10.txt,
success,./15-02-2017_17-11/d77c7886-ffe9-40f2-b2fe-e68410d07891//expE2.txt,expE2.txt,txt,e1040d9546423c823944120de0e5c46c,333308,b3898f5d-1058-4cf3-acf9-76759117b810,/neuroinftest/20170215/expE2.txt,
`
var csv = require('csv');
csv.parse(csvText, {columns: true}, function(err, data){
console.log(JSON.stringify(data, null, 2));
});
In variable csvText I have my comma-separated file, with the first line serving as a header. I use the parse function and I'm passing the {columns: true} to indicated that the first line has the headers. Second parameter in the callback function (data) has the object with keys being the headers and the values being the corresponding csv cells. I use JSON.stringify to print it nicely and the result object looks like this (it puts it into an array):
[
{
"status": "success",
"path": "./15-02-2017_17-11/d77c7886-ffe9-40f2-b2fe-e68410d07891//expE1.txt",
"name": "expE1.txt",
"ext": "txt",
"checksum": "38441337865069eabae7754b29bb43e1",
"size": "414984",
"document_service_id": "8269f7e3-3221-49bb-bb5a-5796cf208fd1",
"document_service_path": "/neuroinftest/20170215/expE1.txt",
"message": ""
},
{
"status": "success",
"path": "./15-02-2017_17-11/d77c7886-ffe9-40f2-b2fe-e68410d07891//expE10.txt",
"name": "expE10.txt",
"ext": "txt",
"checksum": "f27e46979035706eb0aaf58c26e09585",
"size": "368573",
"document_service_id": "2c94ed19-29c9-4660-83cf-c2148c3d6f61",
"document_service_path": "/neuroinftest/20170215/expE10.txt",
"message": ""
},
{
"status": "success",
"path": "./15-02-2017_17-11/d77c7886-ffe9-40f2-b2fe-e68410d07891//expE2.txt",
"name": "expE2.txt",
"ext": "txt",
"checksum": "e1040d9546423c823944120de0e5c46c",
"size": "333308",
"document_service_id": "b3898f5d-1058-4cf3-acf9-76759117b810",
"document_service_path": "/neuroinftest/20170215/expE2.txt",
"message": ""
}
]
UPD: This array can easily be turned into the object you need with reduce:
var res_obj = data.reduce(function(acc, cur, i) {
acc[cur.name] = cur;
return acc;
}, {});
In my case I use the name property as a key. Make sure it's unique.
I think something like this would work :
var products_arr = [{"name":"crystal","description":"This is a crystal","price":"2.95"},
{"name":"emerald","description":"This is a emerald","price":"5.95"}]
var products = {};
for (var i = 0, l = products_arr.length ; i < l ; ++i) {
var x = products_arr[i];
var name = x.name
delete x.name; // deletes name property from JSON object
products[name] = x;
}
This will output :
{
"crystal": {
"description": "This is a crystal",
"price": "2.95"
},
"emerald": {
"description": "This is a emerald",
"price": "5.95"
}
}
If you would like to modify your specific code, you could change the line
return (index === 0 ? '' : ',\n') + JSON.stringify(row);
to
var clonedRow = JSON.parse(JSON.stringify(row));
var key = clonedRow['name'];
delete clonedRow['name'];
var newRow = {};
newRow[key] = clonedRow;
return (index === 0 ? '' : ',\n') + JSON.stringify(newRow);
This creates a new object for each row, modifying the structure according to your requirement.
Your best bet is to use PapaParse, a powerful csv parser/dumper. It supports streams, various string encodings, header row, and it's fast.