Extracting values from Array of nested Objects in JavaScript - javascript

I have an array of objects and trying to extract the objects with a matched value of the array.
const A = [{_id: 'a', name: '1'}, {_id: 'b', name: '2'}, {_id: 'c', name: '3'}] and const B = ['2', '3'] So, I want to match values of Array B to the Array A and get the objects into the Array C like const C = [{_id: 'b', name: '2'}, {_id: 'c', name: '3'}]
const C = A.forEach((list) => {
let key = []
if(list.includes[B]) {
key.push(list)
}
})
I am stuck at here, how can I push those objects to the Array C?

You could filter the array.
const
arrA = [{ _id: 'a', name: '1' }, { _id: 'b', name: '2' }, { _id: 'c', name: '3' }];
arrB = ['2', '3'];
arrC = arrA.filter(({ name }) => arrB.includes(name));
console.log(arrC);

when you say matched value, it seems as if you're trying too match the name value..
if that's the case- this shoould work..
const A = [{_id: 'a', name: '1'},
{_id: 'b', name: '2'},
{_id: 'c', name: '3'}];
const B = ['2', '3'];
const C = [];
A.forEach((item) => {
if(B.filter(x=>x == item.name).length > 0) {
C.push(item)
}
});

I hope I have been helpful
const arrA = [{ _id: 'a', name: '1' }, { _id: 'b', name: '2' }, { _id: 'c', name: '3' }];
const arrB = ['2', '3'];
var arrC = [];
arrB.forEach(element => {
arrA.forEach(element2 => {
if (element === element2.name) {
arrC.push(element2)
}
});
});
console.log(arrC);

Related

How to transform array of objects to new array of objects and count the keys [duplicate]

This question already has answers here:
JS converting array of object into another array of objects, using keys of the first one
(4 answers)
Closed 7 months ago.
The code is something like this:
let inputArray=[
{Id: '1', Name: 'Ani'},
{Id: '2', Name: 'George'},
{Id: '4', Name: 'George'},
{Id: '5', Name: 'Ani'}];
I need to make a new array with objects in the format :
let result=[
{ Name: 'Ani', count:2},
{ Name: 'George',count:2},
{ Name: 'Henry',count:1}];
Any idea please? :)
You can try to use a Hash & Array.reduce here.
const hash = inputArray.reduce((memo, item) => {
memo[item.Name] = memo[item.Name] || {name: item.Name, count: 0}
memo[item.Name].count++
return memo
}, {})
result = Object.values(hash)
You can use this function to count the repetition of any field given the field name
let inputArray=[
{Id: '1', Name: 'Ani'},
{Id: '2', Name: 'George'},
{Id: '4', Name: 'George'},
{Id: '5', Name: 'Ani'}];
function countFields(input, field){
const count = {};
input.forEach(e =>{
const v = e[field];
if(!count[v])count[v] = 0;
count[v]++;
})
return count;
}
const result = countFields(inputArray, "Name")
console.log(result)
Another solution with reduce:
let inputArray = [
{Id: '1', Name: 'Ani'},
{Id: '2', Name: 'George'},
{Id: '4', Name: 'George'},
{Id: '5', Name: 'Ani'}];
const counts = inputArray.reduce((acc, curr) => {
const valIndex = acc.findIndex(val => val.name === curr.Name)
if(valIndex >= 0) {acc[valIndex].count += 1}
else {
acc.push({name: curr.Name, count: 1})
}
return acc
}, [])

How to get the intersection of two sets while recognizing equal set values/items not only by reference but by their equal structures and entries too?

I have two deal with two Set instances.
const set1 = new Set([
{ name: 'a' },
{ name: 'b', lastname: 'bb' },
{ name: 'c' },
{ name: 'd' },
]);
const set2 = new Set([
{ name: 'b' },
{ name: 'd' },
]);
Any object within a set will feature several and also distinct keys and values. The goal is to find structurally equal objects (same keys and values) in both sets, which is ... The intersection of equal data items in/of set1 and set2.
In the following example the expected result is [ { name: 'd' } ] ...
console.log([...set1].filter(item => set2.has(item)));
... but it logs an empty array / [] instead.
An object features more than 20 keys so one has to compare them one by one, which can not be done in a hard coded way.
How could one achieve a generic approach for an intersection of two lists of structurally equal data items?
You can do something like this:
const set1 = new Set([
{name: 'a'},
{name: 'b', lastname: 'bb'},
{name: 'c'},
{name: 'd'}
]);
const set2 = new Set([
{name: 'b'},
{name: 'd'}
]);
set1.forEach((value) => {
if (![...set2].some((o) => Object.entries(o).every(([k, v], _, arr) => (Object.keys(value).length === arr.length && value[k] === v)))) {
set1.delete(value);
}
})
console.log([...set1]);
What this does, is to iterate through set1 and if the item at the current iteration is not the same as any item in set2 (![...set2].some(..)), it is deleted.
The items are considered the same if they have the same number of keys and if the values at the same key are strictly equal.
This only works if the values of the objects in the sets are primitives, if they are not, you'll have to change value[k] === v to an appropriate comparison.
One could write a generic solution which compares pure, thus JSON conform, data structures regardless of any object's nesting depth/level and (creation time) key order.
Such a function would be self recursive for Array item (order matters) and Object property (key order does not matter) comparison. Otherwise values are compared strictly.
function isDeepDataStructureEquality(a, b) {
let isEqual = Object.is(a, b);
if (!isEqual) {
if (Array.isArray(a) && Array.isArray(b)) {
isEqual = (a.length === b.length) && a.every(
(item, idx) => isDeepDataStructureEquality(item, b[idx])
);
} else if (
a && b
&& (typeof a === 'object')
&& (typeof b === 'object')
) {
const aKeys = Object.keys(a);
const bKeys = Object.keys(b);
isEqual = (aKeys.length === bKeys.length) && aKeys.every(
(key, idx) => isDeepDataStructureEquality(a[key], b[key])
);
}
}
return isEqual;
}
const objA = { // `objA` equals `objB`.
name: 'foo',
value: 1,
obj: {
z: 'z',
y: 'y',
a: {
name: 'bar',
value: 2,
obj: {
x: 'x',
w: 'w',
b: 'b',
},
arr: ['3', 4, 'W', 'X', {
name: 'baz',
value: 3,
obj: {
k: 'k',
i: 'i',
c: 'c',
},
arr: ['5', 6, 'B', 'A'],
}],
},
},
arr: ['Z', 'Y', 1, '2'],
};
const objB = { // `objB` equals `objA`.
arr: ['Z', 'Y', 1, '2'],
obj: {
z: 'z',
y: 'y',
a: {
obj: {
x: 'x',
w: 'w',
b: 'b',
},
arr: ['3', 4, 'W', 'X', {
obj: {
k: 'k',
i: 'i',
c: 'c',
},
name: 'baz',
value: 3,
arr: ['5', 6, 'B', 'A'],
}],
name: 'bar',
value: 2,
},
},
name: 'foo',
value: 1,
};
const objC = { // `objC` equals neither `objA` nor `objB`.
arr: ['Z', 'Y', 1, '2'],
obj: {
z: 'z',
y: 'y',
a: {
obj: {
x: 'x',
w: 'w',
b: 'b',
},
arr: ['3', 4, 'W', 'X', {
obj: {
k: 'k',
i: 'i',
c: 'C', // the single difference to `objA` and `objB`.
},
name: 'baz',
value: 3,
arr: ['5', 6, 'B', 'A'],
}],
name: 'bar',
value: 2,
},
},
name: 'foo',
value: 1,
};
console.log(
'isDeepDataStructureEquality(objA, objB) ?..',
isDeepDataStructureEquality(objA, objB)
);
console.log(
'isDeepDataStructureEquality(objA, objC) ?..',
isDeepDataStructureEquality(objA, objC)
);
console.log(
'isDeepDataStructureEquality(objB, objC) ?..',
isDeepDataStructureEquality(objB, objC)
);
console.log(
'isDeepDataStructureEquality(objB, objA) ?..',
isDeepDataStructureEquality(objB, objA)
);
.as-console-wrapper { min-height: 100%!important; top: 0; }
Based on the above implementation of isDeepDataStructureEquality one can solve the OP's task, that actually looks for the intersection of two list structures, by additionally providing a getIntersectionOfDeeplyEqualDataStructures functionality ...
function getIntersectionOfDeeplyEqualDataStructures(a, b) {
return [...(a ?? [])]
.reduce((collector, sourceItem) => {
const { target, intersection } = collector;
const targetIndex = target.findIndex(targetItem =>
isDeepDataStructureEquality(targetItem, sourceItem)
);
if (targetIndex >= 0) {
// collect the intersection of
// both, source (a) and target (b).
intersection.push(target[targetIndex]);
}
return collector;
}, {
target: [...(b ?? [])],
intersection: [],
}).intersection;
}
const set1 = new Set([
{ name: 'a' },
{ name: 'b', lastname: 'bb' },
{ name: 'c' },
{ name: 'd' }
]);
const set2 = new Set([
{ name: 'b' },
{ name: 'd' },
]);
console.log(
"getIntersectionOfDeeplyEqualDataStructures(set1, set2) ...",
getIntersectionOfDeeplyEqualDataStructures(set1, set2)
);
const set3 = new Set([
{ name: 'a' },
{ name: 'b', lastname: 'bb' },
{ name: 'c' },
{
name: 'd',
list: ['foo', 1, null, false, 0, {
foo: { bar: { baz: 'bizz', buzz: '' } }
}],
},
]);
const set4 = new Set([
{
list: ['foo', 1, null, false, 0, {
foo: { bar: { buzz: '', baz: 'bizz' } }
}],
name: 'd',
},
{ name: 'C' },
{ lastname: 'bb', name: 'b' },
{ name: 'aa' }
]);
console.log(
"getIntersectionOfDeeplyEqualDataStructures(set3, set4) ...",
getIntersectionOfDeeplyEqualDataStructures(set3, set4)
);
.as-console-wrapper { min-height: 100%!important; top: 0; }
<script>
function isDeepDataStructureEquality(a, b) {
let isEqual = Object.is(a, b);
if (!isEqual) {
if (Array.isArray(a) && Array.isArray(b)) {
isEqual = (a.length === b.length) && a.every(
(item, idx) => isDeepDataStructureEquality(item, b[idx])
);
} else if (
a && b
&& (typeof a === 'object')
&& (typeof b === 'object')
) {
const aKeys = Object.keys(a);
const bKeys = Object.keys(b);
isEqual = (aKeys.length === bKeys.length) && aKeys.every(
(key, idx) => isDeepDataStructureEquality(a[key], b[key])
);
}
}
return isEqual;
}
</script>
Edit
As for Titus' approach ...
set1.forEach(value => {
if (
![...set2].some(o =>
Object.entries(o).every(([k, v], _, arr) =>
(Object.keys(value).length === arr.length && value[k] === v)
)
)
) {
set1.delete(value);
}
});
... which works for flat objects only, though already agnostic to key insertion order, one could optimize the code by ...
... not creating the keys array of the most outer currently processed object again and again with every nested some and every iteration.
thus, something like ... const valueKeys = Object.keys(value); ... before the if clause, already helps improving the code.
... inverting the nested some and every logic which does result in a more efficient way of ... deleting every flat data-item from the processed set which does not equal any flat data-item from the secondary set.
On top of that, one could implement a function statement which not only helps code-reuse but also makes the implementation independent from outer scope references.
For instance, the primary set which is operated and going to be mutated can be accessed as such a function's third parameter. But most important for outer scope independency is the also available thisArg binding for any set's forEach method. Thus any function statement or function expression can access e.g. the other/secondary set via this in case the latter was passed as the forEach's 2nd parameter.
Also an improved wording supports a better readability of the code ...
//the function naming of cause is exaggerated.
function deleteItemFromSourceWhichDoesNotEqualAnyItemFromBoundTarget(sourceItem, _, sourceSet) {
const targetSet = this;
const sourceKeys = Object.keys(sourceItem);
if (
// ... for any data-item from the (bound) target-set ...
[...targetSet].every(targetItem =>
// ... which does not equal the currently processed data-item from the source-set ...
Object.entries(targetItem).some(([targetKey, targetValue], _, targetEntries) =>
sourceKeys.length !== targetEntries.length || sourceItem[targetKey] !== targetValue
)
)
) {
// ... delete the currently processed data-item from the source-set.
sourceSet.delete(sourceItem);
}
}
const set1 = new Set([
{ name: 'a' }, // - to be kept.
{ name: 'b', lastname: 'bb' }, // - to be kept.
{ name: 'c' }, // - to be deleted.
{ name: 'd', nested: { name: 'a' } }, // - to be kept, but fails ...
]); // ... due to not being flat.
const set2 = new Set([
{ name: 'd', nested: { name: 'a' } }, // - should equal, but doesn't.
{ name: 'a' }, // - does equal.
{ lastname: 'bb', name: 'b' }, // - does equal.
{ name: 'e' }, // - doesn't equal.
]);
// `set1` is going to be mutated.
set1.forEach(deleteItemFromSourceWhichDoesNotEqualAnyItemFromBoundTarget, set2);
console.log(
'mutated `set1` now (almost) being equal to the intersection of initial `set1` and `set2` ...',
[...set1]
);
.as-console-wrapper { min-height: 100%!important; top: 0; }
const set1 = new Set([
{name: 'a'},
{name: 'b', lastname: 'bb'},
{name: 'c'},
{name: 'd'}
]);
const set2 = new Set([
{name: 'b'},
{name: 'd'}
]);
const names = [...set2].map(s2 => s2.name);
console.log([...set1].filter(item => names.includes(item.name)));
const set1 = new Set([
{name: 'a'},
{name: 'b', lastname: 'bb'},
{name: 'c'},
{name: 'd'},
{name: 'e'}
]);
const set2 = new Set([
{name: 'c', lastname: 'ccc'},
{name: 'd'},
{name: 'b', lastname: 'cc'},
{name: 'e'}
]);
console.log([...set1].filter(item => {
const s2Arr = [...set2];
const itemKeys = Object.keys(item);
for(let i = 0; i < s2Arr.length; i++){
const s2Obj = s2Arr[i];
const s2ObjKeys = Object.keys(s2Obj);
if(s2ObjKeys.length == itemKeys.length){
let oneSame = true;
for(let j = 0; j < s2ObjKeys.length; j++){
const s2ObjKey = s2ObjKeys[j];
if(item[s2ObjKey] != s2Obj[s2ObjKey]){
oneSame = false;
}
}
if(oneSame)
return true;
}
}
return false;
}));

how to merge two arrays (array 1d and array 2d object) into a array using lodash

I am looking for best ways of doing this. I have two arrays (array 1d and array 2d object):
let keys = [11, 12];
let values = [
[
{ name: '1', link: '1', history: '1' },
{ name: '2', link: '2', history: '2' }
],
[
{ name: '3', link: '3', history: '3' },
{ name: '4', link: '4', history: '4' }
]
]
The end result I want is an array of map:
[
{ name: '1', link: '1', history: '1' , q : 11},
{ name: '2', link: '2', history: '2' , q : 11},
{ name: '3', link: '3', history: '3' , q : 12},
{ name: '4', link: '4', history: '4' , q : 12}
]
How do I do it the most efficient/clean way using lodash? Thanks!
This code without lodash :
let arr = [];
let keys = [ 29, 30 ]
let values = [
[
{ name: '1', link: '1', history: '1' },
{ name: '2', link: '2', history: '2' }
],
[
{ name: '3', link: '3', history: '3' },
{ name: '4', link: '4', history: '4' }
]
]
for (let j = 0; j < values.length; j++) {
for (let i = 0; i < values[j].length; i++) {
arr.push({ ...values[j][i], q: keys[j] });
}
}
console.log(arr)
You may simply just use _.map
let keys = [11, 12];
let values = [[{"name":"1","link":"1","history":"1"},{"name":"2","link":"2","history":"2"}],[{"name":"3","link":"3","history":"3"},{"name":"4","link":"4","history":"4"}]]
console.log(_.map(values, (v, i) => _.map(v, o => (o.q = keys[i], o))))
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.15/lodash.core.js"></script>
or without lodash which is pretty much the same syntax wise
let keys = [11, 12];
let values = [[{"name":"1","link":"1","history":"1"},{"name":"2","link":"2","history":"2"}],[{"name":"3","link":"3","history":"3"},{"name":"4","link":"4","history":"4"}]]
console.log(values.map((v, i) => v.map(o => (o.q = keys[i], o))))
edit: I forgot that the resulting array has to be 1D.
- The lodash variation needs to get some _.flatten over the result
- The native js needs a .flat()
Lastly one may use lodash/fp to get some chaining style
let keys = [11, 12];
let values = [[{"name":"1","link":"1","history":"1"},{"name":"2","link":"2","history":"2"}],[{"name":"3","link":"3","history":"3"},{"name":"4","link":"4","history":"4"}]]
console.log(
_.flow(
// we need to use entries to get the index of fp.map
_.entries,
// for each array
_.map(([i, v]) =>
// foreach "history" of the array
_.map(
// merge the key to the current "history"
_.merge({ q: keys[i] }), v)
),
_.flatten
)(values)
)
console.log(
_.flow(
// or its variant
_.map.convert({ cap: false })((v,i) => _.map(_.merge({ q: keys[i] }), v)),
_.flatten
)(values)
)
<script src="https://cdn.jsdelivr.net/g/lodash#4(lodash.min.js+lodash.fp.min.js"></script>
Not a lodash solution, but a universal approach for more arrays with objects for merging to get a cartesian product.
let keys = [11, 12],
values = [[{ name: '1', link: '1', history: '1' }, { name: '2', link: '2', history: '2' }], [{ name: '3', link: '3', history: '3' }, { name: '4', link: '4', history: '4' }]],
data = [
values.flat(),
keys.map(q => ({ q }))
],
result = data.reduce((a, b) => a.reduce((r, v) => r.concat(b.map(w => ({ ...v, ...w }))), []));
console.log(result);
.as-console-wrapper { max-height: 100% !important; top: 0; }
You can use Array.flatMap() (or lodash's _.flatMap()) to iterate the values, and then map the sub-arrays, and add the respective key:
const keys = [11, 12];
const values = [[{"name":"1","link":"1","history":"1"},{"name":"2","link":"2","history":"2"}],[{"name":"3","link":"3","history":"3"},{"name":"4","link":"4","history":"4"}]]
const result = values.flatMap((arr, i) =>
arr.map(o => ({ ...o, q: keys[i] }))
)
console.log(result)

Looking to group array by values in the sub array

Trying to parse one data set that has a bunch of the same "secondaryIDs" in way that i can group and iterate through them together.
In english what im trying to do is
"select a unique group of all items where the value of field is unique "
'use strict';
const data = [{
Group: 'A',
Name: 'SD'
}, {
Group: 'B',
Name: 'FI'
}, {
Group: 'A',
Name: 'MM'
}, {
Group: 'B',
Name: 'CO'
}];
let unique = [...new Set(data.map(item => item.Group))];
console.log(unique);
Which gives ["A"],["B"]
but what im looking for is
{
A: [ "SD","MM" ],
B: [ "FI","CO" ],
}
For this, I would use array.reduce instead of array.map because what you're actually hoping to return is a new value, not a modified array, the reduce method is perfect when you want to literally reduce the array into a single output value, in your case an object of unique groups. Maybe try something like this:
let unique = data.reduce((acc, { Group, Name }) => {
if (!(acc.hasOwnProperty(Group))) {
acc[Group] = [Name];
} else {
acc[Group].push(Name);
};
return acc;
}, {});
I've also added a pen for this at: https://codepen.io/anon/pen/BGpgdz?editors=1011 so you can see this working.
Hope this helps!
You can also reduce your array to the grouped object (keyed by Group values):
const data = [{
Group: 'A',
Name: 'SD'
}, {
Group: 'B',
Name: 'FI'
}, {
Group: 'A',
Name: 'MM'
}, {
Group: 'B',
Name: 'CO'
}];
const grouped = data.reduce((a, {Group, Name}) => {
if (!(Group in a)) a[Group] = [Name];
else a[Group].push(Name);
return a;
}, {});
console.log(grouped);
can do something like..
const map = {};
data.forEach( d => {
if( map[d.Group] ) {
map[d.Group].push(d.Name);
} else {
map[d.Group] = [d.Name];
}
})
console.log(map)
I think the easiest way to achieve this would be to use Array.prototype.reduce method to create an object that maps unique Group names to arrays that contain Names. You can supply an empty object literal as your initial reduce accumulator:
const data = [{
Group: 'A',
Name: 'SD'
}, {
Group: 'B',
Name: 'FI'
}, {
Group: 'A',
Name: 'MM'
}, {
Group: 'B',
Name: 'CO'
}];
var namesByGroup = data.reduce((map, el) => {
map[el.Group] ? map[el.Group].push(el.Name) : map[el.Group] = [el.Name];
return map;
}, {});
console.log(namesByGroup);
If you're interested in a functional approach, here is a solution using Ramda:
const group =
R.pipe(
R.groupBy(R.prop('Group')),
R.map(R.map(R.prop('Name'))));
console.log(
group([
{Group: 'A', Name: 'SD'},
{Group: 'B', Name: 'FI'},
{Group: 'A', Name: 'MM'},
{Group: 'B', Name: 'CO'}])
);
<script src="https://cdnjs.cloudflare.com/ajax/libs/ramda/0.25.0/ramda.min.js"></script>
can also be done using forEach
const data = [{
Group: 'A',
Name: 'SD'
}, {
Group: 'B',
Name: 'FI'
}, {
Group: 'A',
Name: 'MM'
}, {
Group: 'B',
Name: 'CO'
}];
const somefunction = (data) => {
let arr = {}
data.forEach( ({Group, Name}) => {
Group in arr ? arr[Group].push(Name) : arr[Group] = [Name]
})
return arr;
}
console.log(somefunction(data))

JS (ES6): Merge arrays based on id and concatenating sub arrays

I have two arrays, which look like this:
const persons = [
{
id: 1,
name: 'Peter',
job: 'Programmer'
},
{
id: 2,
name: 'Jeff',
job: 'Architect'
},
];
const salaries = [
{
id: 1,
salary: 3000,
departments: ['A', 'B']
},
{
id: 1,
salary: 4000,
departments: ['A', 'C']
},
{
id: 2,
salary: 4000,
departments: ['C', 'D']
}
];
Now I need to somehow merge this arrays to one, so that every id only exists once. Same keys should be replaced, except it is an array, then I want them to add/concat. So the desired result should look something like this:
const result = [
{
id: 1,
name: 'Peter',
job: 'Programmer',
salary: 4000,
departments: ['A', 'B', 'C']
},
{
id: 2,
name: 'Jeff',
job: 'Architect',
salary: 4000,
departments: ['C', 'D']
}
];
I have already tried:
// double id's, arrays get replaced
Object.assign({}, persons, salaries)
// loadsh: double id's, arrays get concatenated
_.mergeWith(persons, salaries, (objValue, srcValue) => {
if (_.isArray(objValue)) {
return objValue.concat(srcValue);
}
});
// gives me a map but replaces arrays
new Map(salaries.map(x => [x.id, x])
Does anyone have an idea how to accomplish this?
You can use map(), filter(), reduce(), Object.assign() and Spread syntax to achieve required result.
DEMO
const persons = [{
id: 1,
name: 'Peter',
job: 'Programmer'
}, {
id: 2,
name: 'Jeff',
job: 'Architect'
}],
salaries = [{
id: 1,
salary: 3000,
departments: ['A', 'B']
}, {
id: 1,
salary: 4000,
departments: ['A', 'C']
}, {
id: 2,
salary: 4000,
departments: ['C', 'D']
}];
let output = persons.map(obj => {
let filter = salaries.filter(v => v.id == obj.id);
if (filter) {
let departments = filter.reduce((r, v) => [...v.departments, ...r], []);
Object.assign(obj, {
salary: filter[filter.length - 1].salary,
departments: departments.filter((item, pos) => departments.indexOf(item) == pos).sort()
});
}
return obj;
});
console.log(output)
You can concat the arrays, than combine all items with the same id using Array.reduce(), and a Map.
to combine objects with the same id, get the object from the Map. Iterate the new Object.entries() with Array.forEach(). Check if existing value is an array, if not assign the value. If it is an array, combine the arrays, and make the items unique using a Set with array spread.
To convert the Map back to an array, you can spread the Map.values() iterator.
const persons = [{"id":1,"name":"Peter","job":"Programmer"},{"id":2,"name":"Jeff","job":"Architect"}];
const salaries = [{"id":1,"salary":3000,"departments":["A","B"]},{"id":1,"salary":4000,"departments":["A","C"]},{"id":2,"salary":4000,"departments":["C","D"]}];
const result = [...persons.concat(salaries)
.reduce((r, o) => {
r.has(o.id) || r.set(o.id, {});
const item = r.get(o.id);
Object.entries(o).forEach(([k, v]) =>
item[k] = Array.isArray(item[k]) ?
[...new Set([...item[k], ...v])] : v
);
return r;
}, new Map()).values()];
console.log(result);
You could use a Map and iterate all properties and check the type for adding unique values to the arrays.
var persons = [{ id: 1, name: 'Peter', job: 'Programmer' }, { id: 2, name: 'Jeff', job: 'Architect' }],
salaries = [{ id: 1, salary: 3000, departments: ['A', 'B'] }, { id: 1, salary: 4000, departments: ['A', 'C'] }, { id: 2, salary: 4000, departments: ['C', 'D'] }],
result = Array.from(
salaries
.reduce(
(m, o) => {
var t = m.get(o.id) || {};
Object.keys(o).forEach(k => {
if (Array.isArray(o[k])) {
t[k] = t[k] || [];
o[k].forEach(v => t[k].includes(v) || t[k].push(v));
} else if (t[k] !== o[k]) {
t[k] = o[k];
}
});
return m;
},
persons.reduce((m, o) => m.set(o.id, Object.assign({}, o)), new Map)
)
.values()
);
console.log(result);
.as-console-wrapper { max-height: 100% !important; top: 0; }

Categories