I have an array of grouped objects, but I'm unable to iterate through and achieve the desired result.
[ 000000010: [
{
"userId" : "000000010",
"played" : 10,
"lost" : 5,
"date" :"2019-04-01T00:00:00.000Z"
},
{
"userId" : "000000010",
"played": 15,
"lost" : 0,
"date" :"2019-04-02T00:00:00.000Z"
},
],
000000020: [
{
"userId" : "000000020",
"played": 11,
"lost" : 4,
"date" :"2019-04-01T00:00:00.000Z"
},
{
"userId" : "000000020",
"played": 15,
"lost" : 0,
"date" :"2019-04-02T00:00:00.000Z"
},
]
]
I want to eliminate all possible duplicates and group all similar objects as follows
{
"userId" : "000000010",
"played": 30,
"lost" : 5,
},
{
"userId" : "000000020",
"played": 26,
"lost" : 6,
},
I have tried
Object.entries()
but it returned
[obeject: object]
I have also tried
const allResults = {}
Object.keys(result).forEach(function(key) {
let chats = result[key].chats;
allResults[chats] = allResults[chats] ? allResults[chats] + 1 : 1;
});
But I get undefined
If you are looking to sum the played and lost fields you should use reduce to merge the objects, summing the required fields. Then convert the array of entries back into an object.
Try this
const inputData = {
"000000010":[
{
"userId":"000000010",
"played":10,
"lost":5,
"date":"2019-04-01T00:00:00.000Z"
},
{
"userId":"000000010",
"played":15,
"lost":0,
"date":"2019-04-02T00:00:00.000Z"
}
],
"000000020":[
{
"userId":"000000020",
"played":11,
"lost":4,
"date":"2019-04-01T00:00:00.000Z"
},
{
"userId":"000000020",
"played":15,
"lost":0,
"date":"2019-04-02T00:00:00.000Z"
}
]
};
const result = Object.entries(inputData).map(([key, values]) => {
const merged = values.reduce((accum, x) => {
accum.played += x.played;
accum.lost += x.lost;
return accum;
}, {"userId": key, "played": 0, "lost": 0});
return [key, merged];
});
console.log(Object.fromEntries(result));
Node prints the following
{
'000000010': { userId: '000000010', played: 25, lost: 5 },
'000000020': { userId: '000000020', played: 26, lost: 4 }
}
I have corrected the json data format and made this code. The following code does not delete the the key date. Please do tell me if this works for you.
function removeDuplicates() {
// Create an array of objects
var movies = [{
"000000010": [{
"userId": "000000010",
"played": 10,
"lost": 5,
"date": "2019-04-01T00:00:00.000Z"
},
{
"userId": "000000010",
"played": 15,
"lost": 0,
"date": "2019-04-02T00:00:00.000Z"
},
]
},
{
"000000020": [{
"userId": "000000020",
"played": 11,
"lost": 4,
"date": "2019-04-01T00:00:00.000Z"
},
{
"userId": "000000020",
"played": 15,
"lost": 0,
"date": "2019-04-02T00:00:00.000Z"
},
]
}
];
jsonObject = movies.map(JSON.stringify);
uniqueSet = new Set(jsonObject);
uniqueArray = Array.from(uniqueSet).map(JSON.parse);
console.log(uniqueArray);
}
<p>
Click on the button to remove the duplicated in the array
</p>
<p>Check the console for the output</p>
<button onclick="removeDuplicates();">
Click here
</button>
Related
I'm stucked in a (in my opinion) complex reduce method.
Given is an array of objects.
const data =
[
{
"key" : "test1",
"value" : 32,
"type" : "OUT"
},
{
"key" : "test1",
"value" : 16,
"type" : "OUT"
},
{
"key" : "test1",
"value" : 8,
"type" : "IN"
},
{
"key" : "test2",
"value" : 32,
"type" : "OUT"
},
{
"key" : "test2",
"value" : 16,
"type" : "IN"
},
{
"key" : "test2",
"value" : 8,
"type" : "OUT"
},
];
I want to get the sum of values of each object grouped by key attribute. There are two type attributes (IN, OUT) where OUT should be interpreted as negative value.
So in the example above, I'm expecting following result object:
//-32 - 16 + 8 = -40
{
"key" : "test1",
"value" : -40,
"type" : "-"
},
//-32 + 16 - 8 = -24
{
"key" : "test2",
"value" : -24,
"type" : "-"
},
I'm grouping the data using the groupBy function of this SO answer.
Now I'm trying to get the sum using reduce with a filter, like in this SO answer.
However, it delivers me the wrong sums (16 and 8) + since I use filter - only one type is considered.
Here is my code:
const data =
[
{
"key" : "test1",
"value" : 32,
"type" : "OUT"
},
{
"key" : "test1",
"value" : 16,
"type" : "OUT"
},
{
"key" : "test1",
"value" : 8,
"type" : "IN"
},
{
"key" : "test2",
"value" : 32,
"type" : "OUT"
},
{
"key" : "test2",
"value" : 16,
"type" : "IN"
},
{
"key" : "test2",
"value" : 8,
"type" : "OUT"
},
];
//group by key
const groupBy = function(xs, key) {
return xs.reduce(function(rv, x) {
(rv[x[key]] = rv[x[key]] || []).push(x);
return rv;
}, {});
};
const grouped = groupBy(data,"key");
for (const [key, value] of Object.entries(grouped))
{
let x = value.filter(({type}) => type === 'OUT')
.reduce((sum, record) => sum + record.value)
console.log(x);
}
//const filtered = grouped.filter(({type}) => type === 'OUT');
console.log(Object.values(grouped));
Question 1:
Why does the reduce give me the wrong sum for type OUT?
Question 2:
Is there a way to consider both types (IN, OUT) without doing the same procedure again?
You can combine the grouping + counting in 1 reduce() if you set the default value to 0, you can always add (or remove) the value from the current key (type)
const data = [{"key" : "test1", "value" : 32, "type" : "OUT"}, {"key" : "test1", "value" : 16, "type" : "OUT"}, {"key" : "test1", "value" : 8, "type" : "IN"}, {"key" : "test2", "value" : 32, "type" : "OUT"}, {"key" : "test2", "value" : 16, "type" : "IN"}, {"key" : "test2", "value" : 8, "type" : "OUT"}, ];
const res = data.reduce((p, c) => {
(p[c['key']] = p[c['key']] || { ...c, value: 0 });
p[c['key']].value =
(c.type === 'IN')
? (p[c['key']].value + c.value)
: (p[c['key']].value - c.value);
return p;
},{});
console.log(res)
Output:
{
"test1": {
"key": "test1",
"value": -40,
"type": "OUT"
},
"test2": {
"key": "test2",
"value": -24,
"type": "OUT"
}
}
I would break this into two problems:
How to reduce each data value (reduce)
How to evaluate existing/new values (switch)
This way your code is less-coupled and it affords you with greater extensibility. Adding a new operator is as simple as adding a new case in the switch.
const reduceValue = (type, existingValue, newValue) => {
switch (type) {
case 'IN' : return existingValue + newValue;
case 'OUT' : return existingValue - newValue;
default : return existingValue; // or throw new Error(`Unsupported type: ${type}`)
}
};
const processValues = (data) =>
data.reduce((acc, { key, type, value }) => {
acc[key] ??= { key, type: '-', value: 0 };
acc[key].value = reduceValue(type, acc[key].value, value);
return acc;
},{});
const testData = [
{ "key" : "test1", "value" : 32, "type" : "OUT" },
{ "key" : "test1", "value" : 16, "type" : "OUT" },
{ "key" : "test1", "value" : 8, "type" : "IN" },
{ "key" : "test2", "value" : 32, "type" : "OUT" },
{ "key" : "test2", "value" : 16, "type" : "IN" },
{ "key" : "test2", "value" : 8, "type" : "OUT" }
];
console.log(processValues(testData))
.as-console-wrapper { top: 0; max-height: 100% !important; }
I would create 2 functions for applying the sign and store them in a variable.
const applySign = { "IN": nr => +nr, "OUT": nr => -nr };
Then do a simple for...of loop (with object destructuring). If there is no running total at the moment for the current key, set the initial value to 0 (using nullish coalescing assignment ??=). Finally add the current value with applied sign to the running total.
const sums = {};
for (const { key, value, type } of data) {
sums[key] ??= 0;
sums[key] += applySign[type](value);
}
const data = [
{ key: "test1", value: 32, type: "OUT" },
{ key: "test1", value: 16, type: "OUT" },
{ key: "test1", value: 8, type: "IN" },
{ key: "test2", value: 32, type: "OUT" },
{ key: "test2", value: 16, type: "IN" },
{ key: "test2", value: 8, type: "OUT" },
];
const applySign = { "IN": nr => +nr, "OUT": nr => -nr };
const sums = {};
for (const { key, value, type } of data) {
sums[key] ??= 0;
sums[key] += applySign[type](value);
}
console.log(sums);
With a few simple tweaks you can change the above in the output you're looking for:
const sums = {};
for (const { key, value, type } of data) {
sums[key] ??= { key, value: 0 };
sums[key].value += applySign[type](value);
}
const expected = Object.values(sums);
This gives you the base answer, though the type properties that you expect are currently missing. To add them you'll have to do another loop and check the final sum result.
for (const sum of expected) {
sum.type = sum.value < 0 ? "-" : "+";
}
const data = [
{ key: "test1", value: 32, type: "OUT" },
{ key: "test1", value: 16, type: "OUT" },
{ key: "test1", value: 8, type: "IN" },
{ key: "test2", value: 32, type: "OUT" },
{ key: "test2", value: 16, type: "IN" },
{ key: "test2", value: 8, type: "OUT" },
];
const applySign = { "IN": nr => +nr, "OUT": nr => -nr };
const sums = {};
for (const { key, value, type } of data) {
sums[key] ??= { key, value: 0 };
sums[key].value += applySign[type](value);
}
const expected = Object.values(sums);
console.log(expected);
// add type based on the value sign (don't know why)
for (const sum of expected) {
sum.type = sum.value < 0 ? "-" : "+";
}
console.log(expected);
If type is a static "-" and was not supposed to depend on the sign of value, then you can add it when you initially create the sum object.
sums[key] ??= { key, value: 0, type: "-" };
I need a function to filter array of objects based on given structure of object. So I have this object:
{
"2": [
{
"fd_id": 16,
...others
}
],
"3": [
{
"fd_id": 2,
...others
},
{
"fd_id": 3,
...others
}
]
}
I would like to filter another array based on this object. Like this;
const result = products.filter(item => {
// returns array of numbers [1, 2, 3]
const filters = item.filters;
if(filters){
// Here must be refactored
return ((filters.includes(givenObj[2][0].fd_id))
&& (filters.includes(givenObj[3][0].fd_id) || filters.includes(givenObj[3][1].fd_id)));
}
});
But this function must be dynamic. Because the input object may change. So for between each parent "&&", and between each children "||" condition must be applied. Thanks for any help. This is the link to example https://jsfiddle.net/cadkt86n/
A function to loop the data will help.
My Logic
Generate the list of fd_ids from the groups using Array.map
Filter products array. Check for the matching combination in filters node of products array. Condition is there should be a matching combination in each nodes of fdIdList array.
Working Fiddle
var groups = {
"2": [
{ "fd_id": 16, "fd_fRef": 2, "fd_ad": "35 - 50", "fd_siraNo": 255, "checked": true }
],
"3": [
{ "fd_id": 2, "fd_fRef": 3, "fd_ad": "KURU", "fd_siraNo": 255, "checked": true },
{ "fd_id": 3, "fd_fRef": 3, "fd_ad": "KARMA", "fd_siraNo": 255, "checked": true }
]
}
// Aggregates the list of fd_id s - This wil be an array of arrays
// [[16],[2,3]] => This will be the value
const fdIdList = Object.values(groups).map(a => a.map(b => b.fd_id));
var products = [
{
"id": 1,
"filters": [2, 3, 4, 13, 16, 17, 18, 19, 31, 48, 309, 318],
},
{
"id": 2,
"filters": [2, 3, 4, 13, 15, 17, 18, 19, 31, 48, 309, 318],
}
];
// Check if there is a common element in each node of fdIdList
var result = products.filter(item => {
const filters = item.filters;
if (filters) {
let isFound = true;
fdIdList.forEach(idListNode => {
isFound = isFound && idListNode.filter(value => filters.includes(value)).length > 0;
})
return isFound
}
});
console.log(result)
Say I have a dictionary with nested sub-dictionaries:
let dict =
{
"SEATTLE" : {
"gross_sales" : 106766,
"price" : 584.50,
"dates" : [ {
"date" : "2020-03-13",
"total_sales_to_date" : 2,
"new_sales" : 2,
}
, {
"date" : "2020-03-19",
"total_sales_to_date" : 5,
"new_sales" : 3,
}
]
}
,
"PHOENIX" : {
"gross_sales" : 26691.5,
"price" : 292.25,
"dates" : [ {
"date" : "2020-03-13",
"total_sales_to_date" : 9,
"new_sales" : 9,
}
, {
"date" : "2020-03-19",
"total_sales_to_date" : 19,
"new_sales" : 10,
}
]
}
}
And I would like to normalise each numerical value in the key/value pairs against the other key/values and then append these as new key/value pairs.
For the dates array of time-series data I'd like to normalise each key/value pair in each date against both time (within the array) and against the other locations on the same date (other objects).
For example, this is what I'm seeking after the operation:
{
"SEATTLE" : {
"gross_sales" : 106766,
"normalised_gross_sales" : 1.0,
"price" : 584.50,
"normalised_price" : 1.0,
"dates" : [ {
"date" : "2020-03-13",
"total_sales_to_date" : 2,
"norm_total_sales_over_time" : 0.4,
"norm_total_sales_over_locations" : 0.22222222,
"new_sales" : 2,
}
, {
"date" : "2020-03-19",
"total_sales_to_date" : 5,
"norm_total_sales_over_time" : 1.0,
"norm_total_sales_over_locations" : 0.26315789,
"new_sales" : 3,
}
]
}
,
"PHOENIX" : {
"gross_sales" : 26691.5,
"normalised_gross_sales" : 0.25,
"price" : 292.25,
"normalised_price" : 0.5,
"dates" : [ {
"date" : "2020-03-13",
"total_sales_to_date" : 9,
"norm_total_sales_over_time" : 0.47368421,
"norm_total_sales_over_locations" : 1.0,
"new_sales" : 9,
}
, {
"date" : "2020-03-19",
"total_sales_to_date" : 19,
"norm_total_sales_over_time" : 1.0,
"norm_total_sales_over_locations" : 1.0,
"new_sales" : 10,
}
]
}
}
ie: the total_sales_to_date value for the last date in the array should normalise to 1.0 as norm_total_sales_over_time
and the largest total_sales_to_date value for all objects (SEATTLE, PHOENIX) for the current date in the array should normalise to 1.0 as norm_total_sales_over_locations
I'm finding this very complex to handle in JS. My actual task involves dictionaries with hundreds of sub-dictionaries I need to compare, I'm looking for a scalable solution. In a pandas dataframe this would be trivial, however I'd like to learn how to approach this using modern javascript only as I'm running this process from node.js using an ES6 interpreter.
What is an effective ES6 javascript solution to this?
Here is a solution that returns the normalised values in the manner described:
let dict = {
"SEATTLE": {
"gross_sales": 106766,
"price": 584.50,
"dates": [{
"date": "2020-03-13",
"total_sales_to_date": 2,
"new_sales": 2,
}, {
"date": "2020-03-19",
"total_sales_to_date": 5,
"new_sales": 3,
}]
},
"PHOENIX": {
"gross_sales": 26691.5,
"price": 292.25,
"dates": [{
"date": "2020-03-13",
"total_sales_to_date": 9,
"new_sales": 9,
}, {
"date": "2020-03-19",
"total_sales_to_date": 19,
"new_sales": 10,
}]
}
}
async function normaliseDict(_dict) {
let values = await Object.values(_dict);
// make arrays with values from each key
let all_gross_sales = [];
let all_price = [];
let all_total_sales = {};
values.forEach((element) => {
all_gross_sales.push(element.gross_sales);
all_price.push(element.price);
let most_recent_total_sales_value = element.dates[element.dates.length - 1].total_sales_to_date;
element.dates.forEach((date, idx) => {
date.norm_total_sales_over_time = date.total_sales_to_date / most_recent_total_sales_value;
if (all_total_sales[date.date]) all_total_sales[date.date].push(date.total_sales_to_date);
else {
all_total_sales[date.date] = [];
all_total_sales[date.date].push(date.total_sales_to_date);
}
});
});
const newDict = values.map(ob => {
ob.gross_sales_norm = ob.gross_sales / Math.max(...all_gross_sales);
ob.price_norm = ob.price / Math.max(...all_price);
return ob;
});
values.forEach((element) => {
element.dates.forEach((date, idx) => {
date.norm_total_sales_over_locations_for_this_date = date.total_sales_to_date / Math.max(...all_total_sales[date.date]);
});
});
return await dict;
}
(async () => {
console.log(await normaliseDict(dict))
})()
I want to convert object into array of object that fits my needs. I prefer using the most simple solution and smaller amount of code to write. The json is stored inside "monitorings" variable.
monitorings = [
{
"id": 1,
"survey_id": 1,
"region_id": 9101,
"month_id": 1,
"target": 22,
"progress": 22,
"survey": {
"name": "HPG",
"category": "SHP"
},
},
{
"id": 2,
"survey_id": 1,
"region_id": 9102,
"month_id": 1,
"target": 10,
"progress": 10,
"survey": {
"name": "SHPED",
"category": "SHPED"
},
},
}
]
My brain can only think until this code
Object.entries(
monitorings.reduce((monitorings, monitoring) => {
const { name } = monitoring.survey
monitorings[name] = monitorings[name]
? [...monitorings[name], monitoring]
: [monitoring]
return monitorings
}, {})
)
actual output
[
"survey.name", [{grouped object}],
"survey.name", [{grouped object}],
]
expected output
[
"survey.category", [
"survey.name", [{grouped object}],
"survey.name", [{grouped object}],
]
,
"survey.category", [
"survey.name", [{grouped object}],
"survey.name", [{grouped object}],
],
]
Thanks for your help
- Edit -
grouped object's format has the same format as the original object like below
[
{
"id": 2,
"survey_id": 1,
"region_id": 9102,
"month_id": 1,
"target": 10,
"progress": 10,
"survey": {
"name": "SHPED",
"category": "SHPED"
},
},
{same format as above},
{same format as above},
...
],
i found the answer here and modify it.
Object.entries(monitorings.reduce((map, obj) => {
!map[obj.survey["category"]]
? map[obj.survey["category"]] = {}
: [].concat(obj.survey["name"]).forEach(subEl => {
!map[obj.survey["category"]][subEl]
? map[obj.survey["category"]][subEl] = []
: map[obj.survey["category"]][subEl].push(obj);
})
return map;
}, {})
)
explanation
//return convert object into array of object
Object.entries(
//return new form of object
monitorings.reduce((map, obj) => {
//if empty
!map[obj.survey["category"]]
//create new empty object of survey["category"]
? map[obj.survey["category"]] = {}
//else combine all of returned object of survey["name"] into empty array of object
: [].concat(obj.survey["name"])
//iterate over obj.survey["name"]
.forEach(subEl => {
//if that object is empty
!map[obj.survey["category"]][subEl]
//create empty array of survey["category"][subEl]
? map[obj.survey["category"]][subEl] = []
//else push every element of filtered original JSON into array of survey["category"][subEl]
: map[obj.survey["category"]][subEl].push(obj);
})
//return grouped object
return map;
}, {})
)
Elasticsearch nested aggregations allow you to effectively group by multiple fields. But what it returns is buckets which are nested for each field you group by.
What I need is an array of objects for each group combination.
My query:
{
index : 'stats',
type : 'click',
size : 0,
body : {
aggs : {
publisher : {
terms : {
field : 'publisherData.id'
},
aggs : {
advertiser : {
terms : {
field : 'advertiserData.id'
},
aggs : {
country : {
terms : {
field : 'request.location.country.iso_code'
},
aggs : {
revenue : {
sum : {
field : 'revenueData.data.USD'
}
},
cost : {
sum : {
field : 'costData.data.USD'
}
}
}
}
}
}
}
}
}
}
}
The result, limited to one entry per field. Normally there would be more so all combinations of nested fields would have to be mapped to an array for display in a table.
{
"took": 562,
"timed_out": false,
"_shards": {
"total": 5,
"successful": 5,
"failed": 0
},
"hits": {
"total": 4812178,
"max_score": 0,
"hits": []
},
"aggregations": {
"publisher": {
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 3114671,
"buckets": [
{
"key": 4,
"doc_count": 1697507,
"advertiser": {
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 555390,
"buckets": [
{
"key": 5,
"doc_count": 1142117,
"country": {
"doc_count_error_upper_bound": 13807,
"sum_other_doc_count": 544585,
"buckets": [
{
"key": "us",
"doc_count": 424137,
"revenue": {
"value": 772282
},
"cost": {
"value": 53698.84903321415
}
}
]
}
}
]
}
}
]
}
}
}
What I need (normally there would be multiple objects here, one for each combination of nested fields) :
[{
publisher:4,
advertiser:5,
country:'us',
cost:53698.84903321415,
revenue:772282
}]
What's the best way to get this result from the above nested structure or even better and if possible, from elasticsearch itself.
Any help greatly appreciated.
In plain Javascript, you could use an iterative and recursive approach - but I suggest to use some feature of ES for getting the wanted result.
function getValues(object) {
function iter(o, p) {
var add = false;
Object.keys(o).forEach(function (k) {
if (['key', 'doc_count'].indexOf(k) !== -1) {
return;
}
if (Array.isArray(o[k].buckets)) {
o[k].buckets.forEach(function (a) {
iter(a, p.concat([[k, a.key]]));
});
return;
}
add = true;
p.push([k, o[k].value]);
});
add && result.push(Object.assign({}, ...p.map(a => ({[a[0]]: a[1]}))));
}
var result = [];
iter(object.aggregations, []);
return result;
}
var data = { took: 562, timed_out: false, _shards: { total: 5, successful: 5, failed: 0 }, hits: { total: 4812178, max_score: 0, hits: [] }, aggregations: { publisher: { doc_count_error_upper_bound: 0, sum_other_doc_count: 3114671, buckets: [{ key: 4, doc_count: 1697507, advertiser: { doc_count_error_upper_bound: 0, sum_other_doc_count: 555390, buckets: [{ key: 5, doc_count: 1142117, country: { doc_count_error_upper_bound: 13807, sum_other_doc_count: 544585, buckets: [{ key: "us", doc_count: 424137, revenue: { value: 772282 }, cost: { value: 53698.84903321415 } }] } }] } }] } } };
console.log(getValues(data));