I have a function called tree, which takes array of objects (as data fields from a database) and array of strings for keys. The function loops through rowsArray and recursively creates object with nested properties based on keyArray.
const tree = (rowsArray, keysArray) => {
return rows.reduce((acc, row) => {
const groupBy = (row, keys,) => {
const [first, ...rest] = keys;
if (!first) return [row];
return {
[row[first]]: groupBy(row, rest),
}
};
acc = {...groupBy(row, keys), ...acc};
return acc;
}, {});
}
The data is following:
const data = [{
ID: 1,
Main: "Financial",
Sub: "Forecasts",
Detail: "General"
}, {
ID: 2,
Main: "Financial",
Sub: "HR",
Detail: "Headcount"
}];
const result1 = tree(data, ["Main", "Sub", "Detail"]);
console.log(result1);
When I log the result, I get:
/*
// actual output
{
Financial: {
Forecasts: {
General: [Array]
}
}
}
Whereas, I would like to get following:
// expected
{
Financial: {
Forecasts: {
General: [Array]
},
HR: {
Headcount: [Array]
}
}
}
*/
The problem is, that acc variable in main function gets overridden and I get new object, instead of accumulative and I am not quite sure how to recursively build this object. I tried to pass instances of acc to groupBy function (to remember previous results), but no luck.
Do you have any idea how I could rewrite tree function or groupBy function to accomplish my goal? Thanks!
You could do it like this:
function tree(rows, keys) {
return rows.reduce( (acc, row) => {
keys.reduce( (parent, key, i) =>
parent[row[key]] = parent[row[key]] || (i === keys.length - 1 ? [row] : {})
, acc);
return acc;
}, {});
}
const data = [{ID: 1,Main: "Financial",Sub: "Forecasts",Detail: "General"}, {ID: 2,Main: "Financial",Sub: "HR", Detail: "Headcount" }];
const result1 = tree(data, ["Main", "Sub", "Detail"]);
console.log(result1);
Be aware that the spread syntax makes a shallow copy. Instead, in this solution, the accumulator is passed to the inner reduce. And so we actually merge the new row's hierarchical data into the accumulator on-the-spot.
The problem is your merge function is not deep. When you assign the values to the accumulator you overwrite existing properties - in this case Financial.
I included a deep merge function from here and now it works.
I also fixed some reference errors you had:
rows => rowsArray
keys = keysArray
// deep merge function
function merge(current, update) {
Object.keys(update).forEach(function(key) {
// if update[key] exist, and it's not a string or array,
// we go in one level deeper
if (current.hasOwnProperty(key) &&
typeof current[key] === 'object' &&
!(current[key] instanceof Array)) {
merge(current[key], update[key]);
// if update[key] doesn't exist in current, or it's a string
// or array, then assign/overwrite current[key] to update[key]
} else {
current[key] = update[key];
}
});
return current;
}
const tree = (rowsArray, keysArray) => {
return rowsArray.reduce((acc, row) => {
const groupBy = (row, keys, ) => {
const [first, ...rest] = keys;
if (!first) return [row];
return {
[row[first]]: groupBy(row, rest),
}
};
acc = merge(groupBy(row, keysArray), acc);
return acc;
}, {});
}
const data = [{
ID: 1,
Main: "Financial",
Sub: "Forecasts",
Detail: "General"
}, {
ID: 2,
Main: "Financial",
Sub: "HR",
Detail: "Headcount"
}];
const result1 = tree(data, ["Main", "Sub", "Detail"]);
console.log(result1);
You could iterate the keys and take either an object for not the last key or an array for the last key and push then the data to the array.
const tree = (rowsArray, keysArray) => {
return rowsArray.reduce((acc, row) => {
keysArray
.map(k => row[k])
.reduce((o, k, i, { length }) => o[k] = o[k] || (i + 1 === length ? []: {}), acc)
.push(row);
return acc;
}, {});
}
const data = [{ ID: 1, Main: "Financial", Sub: "Forecasts", Detail: "General" }, { ID: 2, Main: "Financial", Sub: "HR", Detail: "Headcount" }];
const result1 = tree(data, ["Main", "Sub", "Detail"]);
console.log(result1);
.as-console-wrapper { max-height: 100% !important; top: 0; }
You can iterate over the data and created a unique key based on the keys provided and then recursively generate the output structure by deep cloning.
const data = [{
ID: 1,
Main: "Financial",
Sub: "Forecasts",
Detail: "General"
}, {
ID: 2,
Main: "Financial",
Sub: "HR",
Detail: "Headcount"
}];
function generateKey(keys,json){
return keys.reduce(function(o,i){
o += json[i] + "_";
return o;
},'');
}
function merge(first,second){
for(var i in second){
if(!first.hasOwnProperty(i)){
first[i] = second[i];
}else{
first[i] = merge(first[i],second[i]);
}
}
return first;
}
function generateTree(input,keys){
let values = input.reduce(function(o,i){
var key = generateKey(keys,i);
if(!o.hasOwnProperty(key)){
o[key] = [];
}
o[key].push(i);
return o;
},{});
return Object.keys(values).reduce(function(o,i){
var valueKeys = i.split('_');
var oo = {};
for(var index = valueKeys.length -2; index >=0 ;index--){
var out = {};
if(index === valueKeys.length -2){
out[valueKeys[index]] = values[i];
}else{
out[valueKeys[index]] = oo;
}
oo = out;
}
o = merge(o,oo);
return o;
},{});
}
console.log(generateTree(data,["Main", "Sub", "Detail"]));
jsFiddle Demo - https://jsfiddle.net/6jots8Lc/
Related
Let's say I have an object containing objects that have 30 key-value pairs each:
const data = {
"foo": {
"3/16/21": 'buzz',
"3/17/21": 'fizz',
...
"4/13/21": 'lorem',
"4/14/21": 'ipsum'
},
"bar": {
"3/16/21": 'sit',
"3/17/21": 'amet',
...
"4/13/21": 'dummy',
"4/14/21": 'text'
},
};
My goal is to rebuild this object into something like this:
myData = [
{date: "3/16/21", foo: 'buzz', bar : 'sit'}
{date: "3/17/21", foo: 'fizz', bar : 'amet'} ,
...
{date: "4/13/21", foo: 'lorem', bar : 'dummy'}
{date: "4/14/21", foo: 'ipsum', bar : 'text'}
];
The function below works like charm but I feel like there is a 10x better way to do it. I would love to see your suggestions on how I could improve it.
const processAPIdata = (data) => {
if (data) {
var myData = [];
for (var key in data) {
if (!data.hasOwnProperty(key)) continue;
var obj = data[key];
for (var prop in obj) {
if (!obj.hasOwnProperty(prop)) continue;
if (myData.length < 30) {
myData.push({ date: prop });
}
let pos = myData.map(function (e) { return e.date; }).indexOf(prop);
myData[pos][key] = obj[prop];
}
}
}
return myData;
};
I'd group into an object indexed by date. When iterating, create the object for that date if it doesn't exist yet, with { date } (where date is the inner property being iterated over), and assign a new property from the outer key (for the new key) and the inner value (for the new value):
const data = {
"foo": {
"3/16/21": 'buzz',
"3/17/21": 'fizz',
"4/13/21": 'lorem',
"4/14/21": 'ipsum'
},
"bar": {
"3/16/21": 'sit',
"3/17/21": 'amet',
"4/13/21": 'dummy',
"4/14/21": 'text'
},
};
const newDataByDate = {};
for (const [key, obj] of Object.entries(data)) {
for (const [date, val] of Object.entries(obj)) {
newDataByDate[date] ??= { date };
newDataByDate[date][key] = val;
}
}
console.log(Object.values(newDataByDate));
You can complete by doing this simple way.
const data = {"foo":{"3/16/21":'buzz',"3/17/21":'fizz',"4/13/21":'lorem',"4/14/21":'ipsum'},"bar":{"3/16/21":'sit',"3/17/21":'amet',"4/13/21":'dummy',"4/14/21":'text'},};
const result = Object.entries(data).reduce((acc, [key, values]) => {
for(const [date, v] of Object.entries(values)){
acc[date] = acc[date] || {date}
acc[date][[key]] = v;
}
return acc;
}, {});
console.log(Object.values(result));
We can achieve this using Object.entries, Array.reduce & Object.values like below
const data = {"foo":{"3/16/21":'buzz',"3/17/21":'fizz',"4/13/21":'lorem',"4/14/21":'ipsum'},"bar":{"3/16/21":'sit',"3/17/21":'amet',"4/13/21":'dummy',"4/14/21":'text'}};
const formatData = (data) => {
//Convert the object to array of arrays with value at first index being the keys and value at second index being values
const result = Object.entries(data).reduce((acc, [key, val]) => {
//Since we have object as value, we need to again convert to array of arrays in order to get the date and the corresponding value
Object.entries(val).forEach(([date, innerVal]) => {
//update the accumulator with new key-value
acc[date] = {
...(acc[date] || {date}),
[key]: innerVal
}
})
return acc;
}, {});
//Return the values of the accumulator
return Object.values(result);
}
console.log(formatData(data));
.as-console-wrapper {
max-height: 100% !important;
}
I would like to merge an array with another array. The only catch is that each array is within an object.
Intuitively I tried {...arrObj, ...newArrObj} however this leads newArrObj overwriting items in the arrObj.
const array = ['an', 'array'];
const newArray = [, , 'new', 'ehrray'];
const obj = {
key: { ...array
}
};
const newObj = {
key: { ...newArray
}
};
const merged = { ...obj,
...newObj
};
console.log(merged);
I would expect merged to be:
{
"key": {
"0": "an",
"1": "array",
"2": "new",
"3": "ehrray"
}
}
but receive
{
"key": {
"2": "new",
"3": "ehrray"
}
}
This might be useful
const a0 = ['1', '2', undefined , undefined, '5', '6', '7'];
const a1 = [undefined, undefined, '3', '4'];
function merge(a, b) {
return a.map(function(v,i){ return v?v:b[i]});
}
console.log(a0 > a1?merge(a0, a1):merge(a1, a0));
I wanted to updated that I ended up going with a recursive merge to get the nested object containing an array merged.
const array = ['an', 'array'];
const newArray = [, , 'new', 'ehrray'];
const obj = {
key: { ...array
}
};
const newObj = {
key: { ...newArray
}
};
const merge = (obj1, obj2) => {
const recursiveMerge = (obj, entries) => {
for (const [key, value] of entries) {
if (typeof value === "object") {
obj[key] = obj[key] ? { ...obj[key]
} : {};
recursiveMerge(obj[key], Object.entries(value))
} else {
obj[key] = value;
}
}
return obj;
}
return recursiveMerge(obj1, Object.entries(obj2))
}
console.log(merge(obj, newObj));
The idea is that there are unset values with only a few set. eg. const newArray = new Array(4); newArray[2] = 'new';
{ value: null }, even { value: undefined } is not the same thing as { foo: 42 } with no value at all. That's the reason that in your example "an" and "array" are overwritten with the nulls from the newArray.
This particular example you can solve by swapping the order in which you add the arrays to the result, but as soon as both arrays contain null-values there is no way to do it with spread-syntax / Object.assign alone. You have to implement the behaviour:
const array = new Array('an', 'array', null, null, "and", "more", "from", "array");
const newArray = new Array(null, null, 'new', 'ehrray');
function merge(a, b) {
const result = [];
for (let i = 0; i < a.length || i < b.length; ++i) {
result[i] = b[i] == null ? a[i] : b[i];
}
return result;
}
console.log(merge(array, newArray));
Below code which I am using for creating the new array if the id is the same in arr1 and arr2. But doesn't work since arr1 and arr2 are different. array 1 has index and arr2 is without index. screenshot for your reference. Can someone help?
Note: ID in arr1 is the same as EmpId in arr2
for(let i=0; i<arr1.length; i++) {
merged.push({
...arr1[i],
...(arr2.find((itmInner) => itmInner.id === arr1[i].id))}
);
}
console.log(merged);
Array1 looks like this :
[{"Active":1,"Id":1},
{"Active":1,"Id":3},
{"Active":1,"Id":2}]
Array2 looks something like this:
Below is the sample code on how I am framing array 2:
renderElement(activity){
var arr2 = [] ;
for(var i = 0; i < activity.length; i++) {
obj = activity[i];
if(obj.Id == 28){
fetch(geturl)
.then(function (response) {
return response.json();
})
.then(function (data) {
res = data;
arr2.push(res)
})
}
else{
// Do nothing
}
}
return arr2
}
Calling Render method like below:
outputarray = currentComponent.renderElement(activity);
console.log('output', outputarray)
Expected Output:
[{"Active":1,"Id":1,"Param1": true},
{"Active":1,"Id":3}, / Keep it as such if nothing exists in other array
{"Active":1,"Id":2, "Param2": false}]
You can try this approach instead:
Example #1
const arr1 = [
{ "Active":1, "Id":1 },
{ "Active":1, "Id":3 },
{ "Active":1, "Id":2 }
];
const arr2 = [
{
0: [
{
EmpId1: 1, Param1: true
}
]
},
{
1: [
{
EmpId2: 2,Param2: false
}
]
},
{
2: [
{
EmpId3: 2
}
]
},
];
const response = arr1
.reduce((acc, value) => {
const secondaryData = arr2.map((val, index) => {
const { [`EmpId${index + 1}`]: Id, ...others } = val[Object.keys(val)][0];
return { Id, ...others };
});
const match = secondaryData.findIndex(({ Id }) => Id === value.Id);
if (match >= 0) acc.push({...value, ...secondaryData[match]})
else acc.push(value);
return acc;
}, []);
console.log(response);
Example #2
const arr1 = [
{ "Active":1, "Id":1 },
{ "Active":1, "Id":3 },
{ "Active":1, "Id":2 }
];
const arr2 = [
[
{
EmpId1: 1,
Param1: true
}
],
[
{
EmpId2: 2,
Param2: false
}
],
[
{
EmpId3: 2
}
],
]
const response = arr1
.reduce((acc, value) => {
const secondaryData = arr2.map(([val], index) => {
const { [`EmpId${index + 1}`]: Id, ...others } = val;
return { Id, ...others };
});
const match = secondaryData.findIndex(({ Id }) => Id === value.Id);
if (match >= 0) acc.push({...value, ...secondaryData[match]})
else acc.push(value);
return acc;
}, []);
console.log(response);
Basically you can create a hash map by a object property and join on that property all the arrays, i.e. reduce an array of arrays into a result object, then convert the object's values back to an array. Since each array is reduced this means each array is only traversed once O(n) and the map object provides constant time O(1) lookup to match objects. This keeps the solution closer to O(n) rather than other solutions with a nested O(n) findIndex search, which yields a solution closer to O(n^2).
const mergeByField = (...arrays) => {
return Object.values(
arrays.reduce(
(result, { data, field }) => ({
...data.flat().reduce(
(obj, el) => ({
...obj,
[el[field]]: {
...obj[el[field]],
...el
}
}),
result
)
}),
{}
)
);
};
Load each array into a payload object that specifies the field key to match on. This will return all fields used to match by, but these can safely be ignored later, or removed, whatever you need. Example:
mergeByField(
{ data: arr1, field: "Id" },
{ data: arr2, field: "EmpId" },
);
const arr1 = [
{
Active: 1,
Id: 1
},
{
Active: 1,
Id: 2
},
{
Active: 1,
Id: 3
}
];
const arr2 = [[{ EmpId: 1, Param1: true }], [{ EmpId: 3, Param2: false }]];
const mergeByField = (...arrays) => {
return Object.values(
arrays.reduce(
(result, { data, field }) => ({
...data.flat().reduce(
(obj, el) => ({
...obj,
[el[field]]: {
...obj[el[field]],
...el
}
}),
result
)
}),
{}
)
);
};
console.log(
mergeByField({ data: arr1, field: "Id" }, { data: arr2, field: "EmpId" })
);
I have a data that is like following:
const data = [{
ratings: [ { rating: 5 } ],
counts: [ { count: 100 } ],
}];
And I want to flatten it in a sense that I want to get rid of arrays and have only objects, and end result to be:
const data = {
ratings: { rating: 5 },
counts: { count: 100 },
};
I tried to do something like this, but it is wrong and I believe I'm kind of over complicating it.
const flatten = data => {
return data.reduce((r, { ...children }) => {
Object.assign(children, r);
if (children) Object.assign(flatten(...Object.values(children)), r);
return r;
}, {})
}
Any ideas?
You could create recursive function with reduce method to turn all arrays to objects assuming you have just objects in those arrays.
const data = [{ratings: [ { rating: 5 } ],counts: [ { count: 100 } ]}];
function flatten(arr) {
return arr.reduce((r, e) => {
const obj = Object.assign({}, e);
for (let p in obj) {
if (Array.isArray(obj[p])) {
obj[p] = flatten(obj[p])
}
}
return Object.assign(r, obj)
}, {})
}
console.log(flatten(data))
If by any chance the data is result from JSON.parse :
var json = JSON.stringify( [{ratings:[{rating: 5}], counts:[{count: 100}]}] )
var result = JSON.parse(json, (k, v) => v[0] || v)
console.log( result )
Please check:
var data = [{ratings: [ { rating: 5 } ], counts: [ { count: 100 } ]}];
var flatten = function(data) {
if (Array.isArray(data)) {
data = data[0];
for (var key in data) data[key] = flatten(data[key]);
}
return data;
}
console.log(flatten(data));
Please check # CodePen
https://codepen.io/animatedcreativity/pen/842e17d2b9f83bc415513f937fc29be8
For a nested complex object or array, I would like to collect all values for a given property name. Example:
var structure = {
name: 'alpha',
array: [
{ name: 'beta' },
{ name: 'gamma' }
],
object: {
name: 'delta',
array: [
{ name: 'epsilon' }
]
}
};
// expected result: [ 'alpha', 'beta', 'gamma', 'delta', 'epsilon' ]
It's obvious how to achieve this using plain JS, but: Is there any elegant, concise approach using lodash?
[edit] Current variant below. Nicer solutions welcome!
function getPropertyRecursive(obj, property) {
var values = [];
_.each(obj, function(value, key) {
if (key === property) {
values.push(value);
} else if (_.isObject(value)) {
values = values.concat(getPropertyRecursive(value, property));
}
});
return values;
}
This can be done elegantly with the following mixin, which is a recursive version of _.toPairs:
_.mixin({
toPairsDeep: obj => _.flatMap(
_.toPairs(obj), ([k, v]) =>
_.isObjectLike(v) ? _.toPairsDeep(v) : [[k, v]])
});
then to get the result you want:
result = _(structure)
.toPairsDeep()
.map(1)
.value()
If there are scalar properties other than name, you'll have to filter them out:
result = _(structure)
.toPairsDeep()
.filter(([k, v]) => k === 'name')
.map(1)
.value()
There's no Lodash/Underscore function that I know if that will do what you're looking for.
So what are you looking to do? Well, specifically you're looking to extract the values of all of the name properties out of a aggregate structure. How would we generalize that? In other words, if you were looking to add such functionality to Lodash/Underscore, how would you reframe the problem? After all, most people don't want to get the values of the name properties. You could create a generic function where you supply the name of the property you want, but...thinking even more abstractly than that, what you really want to do is visit all of the nodes in a aggregate structure and do something with them. If we consider aggregate structures in JavaScript as generic trees, we can take a recursive approach using a depth-first walk:
function walk(o, f) {
f(o);
if(typeof o !== 'object') return;
if(Array.isArray(o))
return o.forEach(e => walk(e, f));
for(let prop in o) walk(o[prop], f);
}
Now we can do what you're looking for by walking the structure and adding things to an array:
const arr = [];
walk(structure, x => if(x !== undefined && x.name) arr.push(x.name));
This isn't quite functional enough for my tastes, though...there's a side effect on arr here. So an even better generic approach (IMO) would be to allow a context object to ride along (or an accumulator if you will, a la Array#reduce):
function walk(o, f, context) {
f(o, context);
if(typeof o !== 'object') return context;
if(Array.isArray(o)) return o.forEach(e => walk(e, f, context)), context;
for(let prop in o) walk(o[prop], f, context);
return context;
}
Now you can call it like this, side-effect free:
const arr = walk(structure, (x, context) => {
if(x !== undefined && x.name) context.push(x.name);
}, []);
Iterate the object recursively using _.reduce():
function getPropertyRecursive(obj, prop) {
return _.reduce(obj, function(result, value, key) {
if (key === prop) {
result.push(value);
} else if (_.isObjectLike(value)) {
return result.concat(getPropertyRecursive(value, prop));
}
return result;
}, []);
}
var structure = {
name: 'alpha',
array: [{
name: 'beta'
}, {
name: 'gamma'
}],
object: {
name: 'delta',
array: [{
name: 'epsilon'
}]
}
};
var result = getPropertyRecursive(structure, 'name');
console.log(result);
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.16.2/lodash.min.js"></script>
You could iterate the object and call it again for arrays or objects. Then get the wanted property.
'use strict';
function getProperty(object, key) {
function iter(a) {
var item = this ? this[a] : a;
if (this && a === key) {
return result.push(item);
}
if (Array.isArray(item)) {
return item.forEach(iter);
}
if (item !== null && typeof item === 'object') {
return Object.keys(item).forEach(iter, item);
}
}
var result = [];
Object.keys(object).forEach(iter, object);
return result;
}
var structure = { name: 'alpha', array: [{ name: 'beta' }, { name: 'gamma' }], object: { name: 'delta', array: [{ name: 'epsilon' }] } };
console.log(getProperty(structure,'name'));
.as-console-wrapper { max-height: 100% !important; top: 0; }
Based on the answer ( https://stackoverflow.com/a/39822193/3443096 ) , here's another idea for mixin:
_.mixin({
extractLeaves: (obj, filter, subnode, subpathKey, rootPath, pathSeparator) => {
var filterKv = _(filter).toPairs().flatMap().value()
var arr = _.isArray(obj) ? obj : [obj]
return _.flatMap(arr, (v, k) => {
if (v[filterKv[0]] === filterKv[1]) {
var vClone = _.clone(v)
delete vClone[subnode]
vClone._absolutePath = rootPath + pathSeparator + vClone[subpathKey]
return vClone
} else {
var newRootPath = rootPath
if (_.isArray(obj)) {
newRootPath = rootPath + pathSeparator + v[subpathKey]
}
return _.extractLeaves(
v[subnode], filter, subnode,
subpathKey, newRootPath, pathSeparator
)
}
})
}
});
This work for this example JSON, where you want to extract leaf-nodes:
{
"name": "raka",
"type": "dir",
"children": [{
"name": "riki",
"type": "dir",
"children": [{
"name": "roko",
"type": "file"
}]
}]
}
Use it this way:
_.extractLeaves(result, {type: "file"}, "children", "name", "/myHome/raka", "/")
And you will get:
[
{
"name": "roko",
"type": "file",
"_absolutePath": "/myHome/raka/riki/roko"
}
]