Parse JSON with duplicate keys in JavaScript - javascript

I am working with the Twitter API for trends (see: https://developer.twitter.com/en/docs/trends/trends-for-location/api-reference/get-trends-place).
The API returns the following JSON:
{
trends: [
{
name: 'Boris',
url: 'http://twitter.com/search?q=Boris',
promoted_content: null,
query: 'Boris',
tweet_volume: 1083274
},
{
name: '#COVID19',
url: 'http://twitter.com/search?q=%23COVID19',
promoted_content: null,
query: '%23COVID19',
tweet_volume: 2088454
},
{
name: '#WorldHealthDay',
url: 'http://twitter.com/search?q=%23WorldHealthDay',
promoted_content: null,
query: '%23WorldHealthDay',
tweet_volume: 250817
}
],
as_of: '2020-04-07T14:06:49Z',
created_at: '2020-04-07T14:03:32Z',
locations: [ { name: 'London', woeid: 44418 } ]
}
I would like to transform this into a Javascript array containing all of the values where the key is name; ie.:
arr=["Boris", "#COVID19", "WorldHealthDay"]
How can I achieve this? From what I have read, native JavaScript JSON parsers cannot handle duplicate keys.

your data:
trends = [
{
name: 'Boris',
url: 'http://twitter.com/search?q=Boris',
promoted_content: null,
query: 'Boris',
tweet_volume: 1083274
},
{
name: '#COVID19',
url: 'http://twitter.com/search?q=%23COVID19',
promoted_content: null,
query: '%23COVID19',
tweet_volume: 2088454
},
{
name: '#WorldHealthDay',
url: 'http://twitter.com/search?q=%23WorldHealthDay',
promoted_content: null,
query: '%23WorldHealthDay',
tweet_volume: 250817
}
];
then use map
var ans = trends.map(d => d.name)
results:
ans = ["Boris", "#COVID19", "#WorldHealthDay"]

Arrays can contain duplicate VALUES, which is what you've requested. Arrays do not contain keys, and these strings in the array are also values of the 'name' key inside of the JSON returned from the twitter API.
To solve your problem, for example, with a very basic iteration and no Array.map
const data = JSON.parse(data_str);
const arr = [];
for (const trend of data.trends) {
arr.push(trend.name);
}

Related

Is there a way to transform a JavaScript object into another object with cleaner formatting?

When accessing a public register API, I receive more information than I need, and sometimes the data is returned with minor variations. I would like to delete some unnecessary fields, move nested fields to the top level, and rename them. The goal is to standardise format across several different APIs, and keep the memory requirement to a minimum. Example below:
Raw object:
[
{
startDate: "2022/08/27",
expiryDate: "2025/08/27",
party: {
type: "Business",
name: "Irregular Expressions Inc."
},
location: {
type: "Office",
address: {
locality: "Boston",
postcode: "PE21 8QR"
}
}
},
{
startDate: "2023/12/22",
expiryDate: "2024/06/22",
party: {
type: "Charity",
name: "Save the Badgers"
},
site: {
type: "Office",
address: {
locality: "Badgerton",
postcode: "BA6 6ER"
}
}
},
]
I want to transform this into a smaller, cleaner array:
[
{
startDate: "2022/08/27",
expiryDate: "2025/08/27",
partyName: "Irregular Expressions Inc.",
location: "Boston"
},
{
startDate: "2023/12/22",
expiryDate: "2024/06/22",
partyName: "Save the Badgers",
location: "Badgerton"
},
]
I have tried the below, but I'm getting an error.
module.exports = {
testTransform: (inputArray) => {
const outputArray = []
inputArray.forEach(element => {
outputArray.push({
startDate: element.startDate,
expiryDate: element.expiryDate,
partyName: element.party.name,
location: element.location.address.locality
})
})
return JSON.stringify(outputArray, null, ' ')
}
}
TypeError: Cannot read properties of undefined (reading 'address')
Am I going in the right direction, or is there a simpler way of doing this? I've searched for this type of transformation but with no luck - what am I missing?
You could take either location or site with logical OR || and later the proerties with optional chaining operator ?..
const
data = [{ startDate: "2022/08/27", expiryDate: "2025/08/27", party: { type: "Business", name: "Irregular Expressions Inc." }, location: { type: "Office", address: { locality: "Boston", postcode: "PE21 8QR" } } }, { startDate: "2023/12/22", expiryDate: "2024/06/22", party: { type: "Charity", name: "Save the Badgers" }, site: { type: "Office", address: { locality: "Badgerton", postcode: "BA6 6ER" } } }],
result = data.map(o => ({
startDate: o.startDate,
expiryDate: o.expiryDate,
partyName: o.party.name,
location: (o.location || o.site)?.address?.locality
}));
console.log(result);
Since it looks like you don't know what the outer key will be for the object with the address property, if the object will always have 4 properties, when destructuring, you can use rest syntax to collect the final property into a single object, and then take that object's values to get to the address.
const input=[{startDate:"2022/08/27",expiryDate:"2025/08/27",party:{type:"Business",name:"Irregular Expressions Inc."},location:{type:"Office",address:{locality:"Boston",postcode:"PE21 8QR"}}},{startDate:"2023/12/22",expiryDate:"2024/06/22",party:{type:"Charity",name:"Save the Badgers"},site:{type:"Office",address:{locality:"Badgerton",postcode:"BA6 6ER"}}}];
const output = input.map(({
startDate,
expiryDate,
party,
...rest
}) => ({
startDate,
expiryDate,
partyName: party.name,
location: Object.values(rest)[0].address.locality,
}));
console.log(output);
You are trying to read locality property of undefined. You could use optional chaining operator to prevent the exception throwing. So, you need to use somthing like element?.location?.address?.locality instead of element.location.address.locality.
That would require writing a function that recurcively goes throught the contents of an object an returns a non-nested object. The function below is such a function.
const flattenObject = (obj) => {
let result = {};
for (const i in obj) {
if ((typeof obj[i]) === 'object') {
const temp = flattenObject(obj[i]);
for (const j in temp) {
result[j] = temp[j];
}
}
else {
result[i] = obj[i];
}
}
return result;
};
The function can then be called on each nested object in the array. The map method of arrays would be do nicely for that step.
const result = nested.map(n => flattenObject(n))
console.table(result[0]) would produce the output below

Impossible to query nested mongoose array?

I want to Query and array with regex inside and mongoose (mongoDB) model.
I want to search inside the nested array of the Productmodel :
const productSchema = new schema(
{
name: requiredString,
sku: requiredUniqueNumber,
ean: requiredUniqueNumber,
suppliers: [{ type: mongoose.Schema.Types.ObjectId, ref: SupplierModel }],
categories: [{ type: mongoose.Schema.Types.ObjectId, ref: CategoryModel }],
mainImage: requiredString,
images: [{ type: String }],
description: requiredString,
stock: requiredNumber,
price: requiredNumber,
totalOrders: requiredNumber,
reviews: [review],
},
{
timestamps: true,
count: true,
}
);
The model inside the "suppliers" array is:
const supplierSchema = new schema(
{
supplierName: requiredUniqueString,
invoiceAddress: address,
visitAddress: address,
status: supplierStatusEnum,
contacts: address,
accountType: accountTypeEnum,
logo: requiredString,
user: { type: schema.Types.ObjectId, ref: "products" },
},
{
timestamps: true,
}
);
Now here's the problem, if i query and and populate() i get all the results. But for some reason I cannot search inside the Array containing several suppliers. Here's of what i have:
foundProducts = await ProductModel.find({
$or: [
{
name: {
$regex: regex,
},
},
{
"suppliers.supplierName": {
$regex: regex,
},
},
{
description: {
$regex: regex,
},
},
],
});
The object in JSON:
If he finds that the suppliers model contains the regex he should give back the whole porductmodel containing that supplier.
What is the best way to search in all the items inside of an nested array.
ps. I'm a junior developer comming from PostgreSQL, so bare with me while I'm trying this noSQL "thing" :)
I was doing the wrong query. I need to do
{
"suppliers._id": {
$regex: regex,
},
},
I can only search on _id, since this is the way that i "modeled" it.

What csv format is for json array, and how to convert them into json array, Nodejs

Good day, I been searching for a few hours and couldn't find the result that I want. First of all, I do not know how does a CSV looks like with the following JSON array.
[
{ email: "test#test.com",
contactNo: "123456",
meta: [
{key: "Name", value: "Kevin XYZ", type: "string"},
{key: "Position", value: "Chairman", type: "string"}]
}
]
What's the csv format for the following json array, and how to convert them from csv to json array in NodeJS.
think of your CSV file just like an Excel sheet, where you only have Columns and Rows, so with your preferred structure, you need to define at least these columns:
|| email || contactNo || name || position ||
Those 4 are the most basic header columns your CSV needs, then you'll need a transformer function to handle the data structure you want.
In my experience, I'm using https://www.papaparse.com/ for the CSV Parser, you can have a look at the sample and try it yourself.
In your case, the parser function would be like this:
const parsed = await Papa.parse(csvString, {
header: true,
trimHeaders: true,
skipEmptyLines: 'greedy',
transformHeader: val => isString(val) ? trimAll(val) : val,
transform: (val, headerName) => {
return isString(val) ? trimAll(val) : val
}
});
//
[
{
"email": "user1#user.com",
"contactNo": "1234-5678",
"name": "Doe",
"position": "doctor",
},
{
"email": "user2#user.com",
"contactNo": "1234-5678",
"name": "John",
"position": "developer"
},
...
]
Cheers,
This is the csv format to your JSON structure.you can use this Online converter to see how your csv look like.
This is how you can convert the csv to JSON. You can refer this good tutorial to learn how to convert the csv to JSON format.
The function which is used there is below.
function csvJSON(csv){
var lines=csv.split("\n");
var result = [];
var headers=lines[0].split(",");
for(var i=1;i<lines.length;i++){
var obj = {};
var currentline=lines[i].split(",");
for(var j=0;j<headers.length;j++){
obj[headers[j]] = currentline[j];
}
result.push(obj);
}
return JSON.stringify(result); //JSON
}
You can use json2csv npm module http://www.mircozeiss.com/json2csv/
I found out that using npm csvtojson is much easier.
CSV:
email, contactNo, meta.0.key, meta.0.value, meta.0.type, meta.1.key, meta.1.value, meta.1.type
test#test.com, 12345, Name, Kevin XYZ, string, Position, Chairman, string
test1#test.com, 321433, Name, John ABC, string, Position, Accountant, string
by using the following code:
const csv = require('csvtojson');
const csvStr = "The csv above"
csv()
.fromString(csvStr)
.subscribe((jsonObj) => {
console.log(jsonObj);
});
The result of the json will be:
{ email: 'test#test.com',
contactNo: '12345',
meta:
[ { key: 'Name', value: 'Kevin XYZ', type: 'string' },
{ key: 'Position', value: 'Chairman', type: 'string' } ] }
{ email: 'test1#test.com',
contactNo: '321433',
meta:
[ { key: 'Name', value: 'John ABC', type: 'string' },
{ key: 'Position', value: 'Accountant', type: 'string' } ] }
In conclusion, using the header xx.0.yy is not so user friendly, I recommend just using what Duc Hong said.. header: email, contactNo, Name, Position and transform to the format that you want.

Ramda.js Combine two array of objects that share the same property ID

I have these two array of objects
todos: [
{
id: 1,
name: 'customerReport',
label: 'Report send to customer'
},
{
id: 2,
name: 'handover',
label: 'Handover (in CRM)'
},
]
And:
todosMoreDetails: [
{
id: 1,
checked: false,
link: {
type: 'url',
content: 'http://something.com'
},
notes: []
},
{
id: 2,
checked: false,
link: {
type: 'url',
content: 'http://something.com'
},
notes: []
}
]
So that the final array of objects will be a combination of the two, based on the object ID, like below:
FinalTodos: [
{
id: 1,
checked: false,
link: {
type: 'url',
content: 'http://something.com'
},
notes: [],
name: 'customerReport',
label: 'Report send to customer'
},
{
id: 2,
checked: false,
link: {
type: 'url',
content: 'http://something.com'
},
notes: [],
name: 'handover',
label: 'Handover (in CRM)'
}
]
I tried with merge mergeAll and mergeWithKey but I am probably missing something
You can achieve this with an intermediate groupBy:
Transform the todosMoreDetails array into an object keyed by todo property ID using groupBy:
var moreDetailsById = R.groupBy(R.prop('id'), todosMoreDetails);
moreDetailsById is an object where the key is id, and the value is an array of todos. If the id is unique, this will be a singleton array:
{
1: [{
id: 1,
checked: false,
link: {
type: 'url',
content: 'http://something.com'
},
notes: []
}]
}
Now transform the todos array by merging each todo to it's details you retrieve from the grouped view:
var finalTodos = R.map(todo => R.merge(todo, moreDetailsById[todo.id][0]), todos);
An alternate more detailed way:
function mergeTodo(todo) {
var details = moreDetailsById[todo.id][0]; // this is not null safe
var finalTodo = R.merge(todo, details);
return finalTodo;
}
var moreDetailsById = R.groupBy(R.prop('id'), todosMoreDetails);
var finalTodos = todos.map(mergeTodo);
I guess merge is only used for arrays. Have a search for object "extend". Maybe storing the todo details not in seperate objects is the better solution.
Using jQuery? https://api.jquery.com/jquery.extend/
Using underscore? http://underscorejs.org/#extend
Native approach? https://gomakethings.com/vanilla-javascript-version-of-jquery-extend/
Using underscore:
var result = [];
var entry = {};
_.each(todos, function(todo) {
_.each(todosMoreDetails, function(detail) {
if (todo.id == detail.id) {
entry = _.extend(todo, detail);
result.push(entry);
}
}
});
return result;

How to save form data as an objects and array of objects to mongodb using loop in node.js?

I am new to MongoDB, I am using Node.js, Express 4 and mongoose(mongoDB) for my project.I stuck to save form data to mongoDB within loop and my model contains Objects and array of objects as well.
Model :
var Subscriber = new Schema({
first_name: String,
emergency_contact_1: {
name: String,
number: [{
type: String
}]
},
residential: {
phone: String,
address: String,
...
},
medications: [
{
visit_id: { type: Object },
name: String,
....
}],
food_allergies: [
{type: String}
],
....
});
Controller :
I want to save data in this way:
var subscriber = new Subscriber();
//Here I am trying to save all form's fields to mongoBD fields.
for (var field in form_data) {
subscriber[field] = form_data[field];
}
subscriber.save(function (err1, instance) {
if (err) {
console.log("error");
return res.send("...");
}
console.log("saved successfully");
}
Normal fields are getting saved properly using above loop but when objects or arrays came then it won't get save to mongoDB.
Any solution ? or any other way to insert/save data through loop to mongoDB model ?
Any help would be appreciated.Thank you..!!
Nodejs
var PersonSchema = new Schema({
name: {
given: {
type: String,
required: true
},
family: {
type: String,
required: true
}
},
children: [PersonSchema]
});
var Person = mongoose.model('Person', PersonSchema);
app.post('/person', function (req, res) {
Person.create(req.body)
.then(function (created) {
console.log(created);
res.json(created.id);
});
});
Client
$.ajax({
url: '/person',
type: 'POST',
data: {
name: {
family: 'Green'
},
children: [{
name: {
given: 'Matt',
family: 'Green'
}
}, {
name: {
given: 'Dave',
family: 'Green'
}
}]
}
})
As you can see, I have nested objects and arrays. This works fine for me :)

Categories