Trying to solve symmetric difference using Javascript - javascript

I am trying to figure out a solution for symmetric
difference using javascript that accomplishes the following
objectives:
accepts an unspecified number of arrays as arguments
preserves the original order of the numbers in the arrays
does not remove duplicates of numbers in single arrays
removes duplicates occurring across arrays
Thus, for example,
if the input is ([1, 1, 2, 6], [2, 3, 5], [2, 3, 4]),
the solution would be, [1, 1, 6, 5, 4].
I am trying to solve this as challenge given by an online
coding community. The exact instructions of the challenge
state,
Create a function that takes two or more arrays and returns an array
of the symmetric difference of the provided arrays.
The mathematical term symmetric difference refers to the elements in
two sets that are in either the first or second set, but not in both.
Although my solution below finds the numbers that are
unique to each array, it eliminates all numbers occuring
more than once and does not keep the order of the numbers.
My question is very close to the one asked at finding symmetric difference/unique elements in multiple arrays in javascript. However, the solution
does not preserve the original order of the numbers and does not preserve duplicates of unique numbers occurring in single arrays.
function sym(args){
var arr = [];
var result = [];
var units;
var index = {};
for(var i in arguments){
units = arguments[i];
for(var j = 0; j < units.length; j++){
arr.push(units[j]);
}
}
arr.forEach(function(a){
if(!index[a]){
index[a] = 0;
}
index[a]++;
});
for(var l in index){
if(index[l] === 1){
result.push(+l);
}
}
return result;
}
symsym([1, 1, 2, 6], [2, 3, 5], [2, 3, 4]); // => Desired answer: [1, 1, 6. 5. 4]

As with all problems, it's best to start off writing an algorithm:
Concatenate versions of the arrays, where each array is filtered to contain those elements which no array other than the current one contains
Then just write that down in JS:
function sym() {
var arrays = [].slice.apply(arguments);
return [].concat.apply([], // concatenate
arrays.map( // versions of the arrays
function(array, i) { // where each array
return array.filter( // is filtered to contain
function(elt) { // those elements which
return !arrays.some( // no array
function(a, j) { //
return i !== j // other than the current one
&& a.indexOf(elt) >= 0 // contains
;
}
);
}
);
}
)
);
}
Non-commented version, written more succinctly using ES6:
function sym(...arrays) {
return [].concat(arrays .
map((array, i) => array .
filter(elt => !arrays .
some((a, j) => i !== j && a.indexOf(elt) >= 0))));
}

Here's a version that uses the Set object to make for faster lookup. Here's the basic logic:
It puts each array passed as an argument into a separate Set object (to faciliate fast lookup).
Then, it iterates each passed in array and compares it to the other Set objects (the ones not made from the array being iterated).
If the item is not found in any of the other Sets, then it is added to the result.
So, it starts with the first array [1, 1, 2, 6]. Since 1 is not found in either of the other arrays, each of the first two 1 values are added to the result. Then 2 is found in the second set so it is not added to the result. Then 6 is not found in either of the other two sets so it is added to the result. The same process repeats for the second array [2, 3, 5] where 2 and 3 are found in other Sets, but 5 is not so 5 is added to the result. And, for the last array, only 4 is not found in the other Sets. So, the final result is [1,1,6,5,4].
The Set objects are used for convenience and performance. One could use .indexOf() to look them up in each array or one could make your own Set-like lookup with a plain object if you didn't want to rely on the Set object. There's also a partial polyfill for the Set object that would work here in this answer.
function symDiff() {
var sets = [], result = [];
// make copy of arguments into an array
var args = Array.prototype.slice.call(arguments, 0);
// put each array into a set for easy lookup
args.forEach(function(arr) {
sets.push(new Set(arr));
});
// now see which elements in each array are unique
// e.g. not contained in the other sets
args.forEach(function(array, arrayIndex) {
// iterate each item in the array
array.forEach(function(item) {
var found = false;
// iterate each set (use a plain for loop so it's easier to break)
for (var setIndex = 0; setIndex < sets.length; setIndex++) {
// skip the set from our own array
if (setIndex !== arrayIndex) {
if (sets[setIndex].has(item)) {
// if the set has this item
found = true;
break;
}
}
}
if (!found) {
result.push(item);
}
});
});
return result;
}
var r = symDiff([1, 1, 2, 6], [2, 3, 5], [2, 3, 4]);
log(r);
function log(x) {
var d = document.createElement("div");
d.textContent = JSON.stringify(x);
document.body.appendChild(d);
}
One key part of this code is how it compares a given item to the Sets from the other arrays. It just iterates through the list of Set objects, but it skips the Set object that has the same index in the array as the array being iterated. That skips the Set made from this array so it's only looking for items that exist in other arrays. That allows it to retain duplicates that occur in only one array.
Here's a version that uses the Set object if it's present, but inserts a teeny replacement if not (so this will work in more older browsers):
function symDiff() {
var sets = [], result = [], LocalSet;
if (typeof Set === "function") {
try {
// test to see if constructor supports iterable arg
var temp = new Set([1,2,3]);
if (temp.size === 3) {
LocalSet = Set;
}
} catch(e) {}
}
if (!LocalSet) {
// use teeny polyfill for Set
LocalSet = function(arr) {
this.has = function(item) {
return arr.indexOf(item) !== -1;
}
}
}
// make copy of arguments into an array
var args = Array.prototype.slice.call(arguments, 0);
// put each array into a set for easy lookup
args.forEach(function(arr) {
sets.push(new LocalSet(arr));
});
// now see which elements in each array are unique
// e.g. not contained in the other sets
args.forEach(function(array, arrayIndex) {
// iterate each item in the array
array.forEach(function(item) {
var found = false;
// iterate each set (use a plain for loop so it's easier to break)
for (var setIndex = 0; setIndex < sets.length; setIndex++) {
// skip the set from our own array
if (setIndex !== arrayIndex) {
if (sets[setIndex].has(item)) {
// if the set has this item
found = true;
break;
}
}
}
if (!found) {
result.push(item);
}
});
});
return result;
}
var r = symDiff([1, 1, 2, 6], [2, 3, 5], [2, 3, 4]);
log(r);
function log(x) {
var d = document.createElement("div");
d.textContent = JSON.stringify(x);
document.body.appendChild(d);
}

I came across this question in my research of the same coding challenge on FCC. I was able to solve it using for and while loops, but had some trouble solving using the recommended Array.reduce(). After learning a ton about .reduce and other array methods, I thought I'd share my solutions as well.
This is the first way I solved it, without using .reduce.
function sym() {
var arrays = [].slice.call(arguments);
function diff(arr1, arr2) {
var arr = [];
arr1.forEach(function(v) {
if ( !~arr2.indexOf(v) && !~arr.indexOf(v) ) {
arr.push( v );
}
});
arr2.forEach(function(v) {
if ( !~arr1.indexOf(v) && !~arr.indexOf(v) ) {
arr.push( v );
}
});
return arr;
}
var result = diff(arrays.shift(), arrays.shift());
while (arrays.length > 0) {
result = diff(result, arrays.shift());
}
return result;
}
After learning and trying various method combinations, I came up with this that I think is pretty succinct and readable.
function sym() {
var arrays = [].slice.call(arguments);
function diff(arr1, arr2) {
return arr1.filter(function (v) {
return !~arr2.indexOf(v);
});
}
return arrays.reduce(function (accArr, curArr) {
return [].concat( diff(accArr, curArr), diff(curArr, accArr) )
.filter(function (v, i, self) { return self.indexOf(v) === i; });
});
}
That last .filter line I thought was pretty cool to dedup an array. I found it here, but modified it to use the 3rd callback parameter instead of the named array due to the method chaining.
This challenge was a lot of fun!

// Set difference, a.k.a. relative compliment
const diff = (a, b) => a.filter(v => !b.includes(v))
const symDiff = (first, ...rest) =>
rest.reduce(
(acc, x) => [
...diff(acc, x),
...diff(x, acc),
],
first,
)
/* - - - */
console.log(symDiff([1, 3], ['Saluton', 3])) // [1, 'Saluton']
console.log(symDiff([1, 3], [2, 3], [2, 8, 5])) // [1, 8, 5]

Just use _.xor or copy lodash code.

Another simple, yet readable solution:
/*
This filters arr1 and arr2 from elements which are in both arrays
and returns concatenated results from filtering.
*/
function symDiffArray(arr1, arr2) {
return arr1.filter(elem => !arr2.includes(elem))
.concat(arr2.filter(elem => !arr1.includes(elem)));
}
/*
Add and use this if you want to filter more than two arrays at a time.
*/
function symDiffArrays(...arrays) {
return arrays.reduce(symDiffArray, []);
}
console.log(symDiffArray([1, 3], ['Saluton', 3])); // [1, 'Saluton']
console.log(symDiffArrays([1, 3], [2, 3], [2, 8, 5])); // [1, 8, 5]
Used functions: Array.prototype.filter() | Array.prototype.reduce() | Array.prototype.includes()

function sym(arr1, arr2, ...rest) {
//creating a array which has unique numbers from both the arrays
const union = [...new Set([...arr1,...arr2])];
// finding the Symmetric Difference between those two arrays
const diff = union.filter((num)=> !(arr1.includes(num) && arr2.includes(num)))
//if there are more than 2 arrays
if(rest.length){
// recurrsively call till rest become 0
// i.e. diff of 1,2 will be the first parameter so every recurrsive call will reduce // the arrays till diff between all of them are calculated.
return sym(diff, rest[0], ...rest.slice(1))
}
return diff
}

Create a Map with a count of all unique values (across arrays). Then concat all arrays, and filter non unique values using the Map.
const symsym = (...args) => {
// create a Map from the unique value of each array
const m = args.reduce((r, a) => {
// get unique values of array, and add to Map
new Set(a).forEach((n) => r.set(n, (r.get(n) || 0) + 1));
return r;
}, new Map());
// combine all arrays
return [].concat(...args)
// remove all items that appear more than once in the map
.filter((n) => m.get(n) === 1);
};
console.log(symsym([1, 1, 2, 6], [2, 3, 5], [2, 3, 4])); // => Desired answer: [1, 1, 6, 5, 4]

This is the JS code using higher order functions
function sym(args) {
var output;
output = [].slice.apply(arguments).reduce(function(previous, current) {
current.filter(function(value, index, self) { //for unique
return self.indexOf(value) === index;
}).map(function(element) { //pushing array
var loc = previous.indexOf(element);
a = [loc !== -1 ? previous.splice(loc, 1) : previous.push(element)];
});
return previous;
}, []);
document.write(output);
return output;
}
sym([1, 2, 3], [5, 2, 1, 4]);
And it would return the output as: [3,5,4]

Pure javascript solution.
function diff(arr1, arr2) {
var arr3= [];
for(var i = 0; i < arr1.length; i++ ){
var unique = true;
for(var j=0; j < arr2.length; j++){
if(arr1[i] == arr2[j]){
unique = false;
break;
}
}
if(unique){
arr3.push(arr1[i]);}
}
return arr3;
}
function symDiff(arr1, arr2){
return diff(arr1,arr2).concat(diff(arr2,arr1));
}
symDiff([1, "calf", 3, "piglet"], [7, "filly"])
//[1, "calf", 3, "piglet", 7, "filly"]

My short solution. At the end, I removed duplicates by filter().
function sym() {
var args = Array.prototype.slice.call(arguments);
var almost = args.reduce(function(a,b){
return b.filter(function(i) {return a.indexOf(i) < 0;})
.concat(a.filter(function(i){return b.indexOf(i)<0;}));
});
return almost.filter(function(el, pos){return almost.indexOf(el) == pos;});
}
sym([1, 1, 2, 5], [2, 2, 3, 5], [3, 4, 5, 5]);
//Result: [4,5,1]

function sym(args) {
var initialArray = Array.prototype.slice.call(arguments);
var combinedTotalArray = initialArray.reduce(symDiff);
// Iterate each element in array, find values not present in other array and push values in combinedDualArray if value is not there already
// Repeat for the other array (change roles)
function symDiff(arrayOne, arrayTwo){
var combinedDualArray = [];
arrayOne.forEach(function(el, i){
if(!arrayTwo.includes(el) && !combinedDualArray.includes(el)){
combinedDualArray.push(el);
}
});
arrayTwo.forEach(function(el, i){
if(!arrayOne.includes(el) && !combinedDualArray.includes(el)){
combinedDualArray.push(el);
}
});
combinedDualArray.sort();
return combinedDualArray;
}
return combinedTotalArray;
}
console.log(sym([1, 1, 2, 5], [2, 2, 3, 5], [3, 4, 5, 5]));

This function removes duplicates because the original concept of symmetric difference operates over sets. In this example, the function operates on the sets this way: ((A △ B) △ C) △ D ...
function sym(...args) {
return args.reduce((old, cur) => {
let oldSet = [...new Set(old)]
let curSet = [...new Set(cur)]
return [
...oldSet.filter(i => !curSet.includes(i)),
...curSet.filter(i => !oldSet.includes(i))
]
})
}
// Running> sym([1, 1, 2, 6], [2, 3, 5], [2, 3, 4])
console.log(sym([1, 1, 2, 6], [2, 3, 5], [2, 3, 4]))
// Return> [1, 6, 5, 2, 4]

This works for me:
function sym() {
var args = [].slice.call(arguments);
var getSym = function(arr1, arr2) {
return arr1.filter(function(each, idx) {
return arr2.indexOf(each) === -1 && arr1.indexOf(each, idx + 1) === -1;
}).concat(arr2.filter(function(each, idx) {
return arr1.indexOf(each) === -1 && arr2.indexOf(each, idx + 1) === -1;
}));
};
var result = getSym(args[0], args[1]);
var len = args.length - 1, i = 2;
while (--len) {
result = [].concat(getSym(result, args[i]));
i++;
}
return result;
}
console.info(sym([1, 1, 2, 5], [2, 2, 3, 5], [6, 8], [7, 8], [9]));

Alternative: Use the lookup inside a map instead of an array
function sym(...vs){
var has = {};
//flatten values
vs.reduce((a,b)=>a.concat(b)).
//if element does not exist add it (value==1)
//or mark it as multiply found value > 1
forEach(value=>{has[value] = (has[value]||0)+1});
return Object.keys(has).filter(x=>has[x]==1).map(x=>parseInt(x,10));
}
console.log(sym([1, 2, 3], [5, 2, 1, 4],[5,7], [5]));//[3,4,7])

Hey if anyone is interested this is my solution:
function sym (...args) {
let fileteredArgs = [];
let symDiff = [];
args.map(arrayEl =>
fileteredArgs.push(arrayEl.filter((el, key) =>
arrayEl.indexOf(el) === key
)
)
);
fileteredArgs.map(elArr => {
elArr.map(el => {
let index = symDiff.indexOf(el);
if (index === -1) {
symDiff.push(el);
} else {
symDiff.splice(index, 1);
}
});
});
return (symDiff);
}
console.log(sym([1, 2, 3, 3], [5, 2, 1, 4]));

Here is the solution
let a=[1, 1, 2, 6]
let b=[2, 3, 5];
let c= [2, 3, 4]
let result=[...a,...b].filter(item=>!(a.includes(item) && b.includes(item) ))
result=[...result,...c].filter(item=>!(b.includes(item) && c.includes(item) ))
console.log(result) //[1, 1, 6, 5, 4]

Concise solution using
Arrow functions
Array spread syntax
Array filter
Array reduce
Set
Rest parameters
Implicit return
const symPair = (a, b) =>
[...a.filter(item => !b.includes(item)),
...b.filter(item => !a.includes(item))]
const sym = (...args) => [...new Set(args.reduce(symPair))]
The function symPair works for two input arrays, and the function sym works for two or more arrays, using symPair as a reducer.
const symPair = (a, b) =>
[...a.filter(item => !b.includes(item)),
...b.filter(item => !a.includes(item))]
const sym = (...args) => [...new Set(args.reduce(symPair))]
console.log(sym([1, 2, 3], [2, 3, 4], [6]))

const removeDuplicates = (data) => Array.from(new Set(data));
const getSymmetric = (data) => (val) => data.indexOf(val) === data.lastIndexOf(val)
function sym(...args) {
let joined = [];
args.forEach((arr) => {
joined = joined.concat(removeDuplicates(arr));
joined = joined.filter(getSymmetric(joined))
});
return joined;
}
console.log(sym([1, 2, 3], [5, 2, 1, 4]));

Below code worked fine all the scenarios. Try the below code
function sym() {
var result = [];
for (var i = 0; i < arguments.length; i++) {
if (i == 0) {
var setA = arguments[i].filter((val) => !arguments[i + 1].includes(val));
var setB = arguments[i + 1].filter((val) => !arguments[i].includes(val));
result = [...setA, ...setB];
i = i + 1;
} else {
var setA = arguments[i].filter((val) => !result.includes(val));
var setB = result.filter((val) => !arguments[i].includes(val));
result = [...setA, ...setB];
}
}
return result.filter((c, index) => {
return result.indexOf(c) === index;
}).sort();
}

My code passed all test cases for the similar question on freecodecamp. Below is code for the same.
function sym(...args) {
const result = args.reduce((acc, curr, i) => {
if (curr.length > acc.length) {
const arr = curr.reduce((a, c, i) => {
if(a.includes(c)){
}
if (!acc.includes(c) && !a.includes(c)) {
a.push(c);
}
if (!curr.includes(acc[i]) && i < acc.length) {
a.push(acc[i])
}
return a;
}, []);
return [...arr];
} else {
const arr = acc.reduce((a, c, i) => {
if(a.includes(c)){
}
if (!curr.includes(c) && !a.includes(c)) {
a.push(c);
}
if (!acc.includes(curr[i]) && i < curr.length) {
a.push(curr[i])
}
return a;
}, []);
return [...arr]
}
});
let ans = new Set([...result])
return [...ans]
}
sym([1,2,3,3],[5, 2, 1, 4,5]);

Related

removing duplicate elements in an array and also the elements which is repeated most in the array should come first in the new array

const source = [2, 9, 9, 1, 6];
const ans = source.filter((item, index, arr)=> arr.indexOf(item) === index);
console.log(ans);
here i'm able to remove the duplicate elements but how to make 9 which is repeated highest to come first in the new array??
any help would be appreciated
ans should be [9, 2, 1, 6]
This should work for all cases where array should be sorted by most number of reoccurrences.
const source = [2,1,9,9,6];
const indexes = [];
var ans = source.filter((item,index,arr)=>{
if(arr.indexOf(item) === index){
indexes.push({item:item,count:1})
return true;
}
else if(arr.indexOf(item) !== index){
indexes[indexes.findIndex(object=> object.item === item)].count++
return false;
}
return false;
})
ans =(indexes.sort((a,b)=>{return b.count - a.count}).map(obj =>obj.item))
console.log(ans)
If using more space is okay, you can use a hash map for counting elements and then convert it to an array.
Something like this.
let arr = [2, 9, 9, 1, 6];
// count elements
const map = arr.reduce((acc, e) => acc.set(e, (acc.get(e) || 0) + 1), new Map());
// sort by values and convert back to array
const res = [...map.entries()].sort((a, b) => b[0] - a[0]).map((a) => {
return a[0]
});
console.log(res)
Try This
function removeAndSort(arr) {
var order = {};
for (var i = 0; i < arr.length; i++) {
var value = arr[i];
if (value in order) {
order[value]++;
} else {
order[value] = 1;
}
}
var result = [];
for (value in order) {
result.push(value);
}
function sort(a, b) {
return order[b] - order[a];
}
return result.sort(sort);
}
console.log(removeAndSort([2, 9, 9, 1, 6]));
It's Absolutely Working, Check It
Instead of removing the duplicates with the code you have you need to find a way to create a frequency map to save the duplicates information. In this example I create a map using an object like this...
const freq = { 9: [9, 2], 1: [1, 1] ... };
which uses the current iterated element as a key in an object, and the value is the element along with its duplicate value. You can grab those arrays using Object.values, sort them by that duplicate value, and then map over that sorted nested array to produce the result.
Note, however, due to the sorting process this results in [9, 1, 2, 6].
const source = [2, 9, 9, 1, 6];
// `reduce` over the array creating a key/value pair where
// the value is an array - the first element is the element value,
// and the second value is its duplicate value
const nested = source.reduce((acc, c) => {
// If the object property as defined by the element
// value doesn't exist assign an array to it initialising
// the duplicate value to zero
acc[c] ??= [c, 0];
// Increment the duplicate value
++acc[c][1];
// Return the object for the next iteration
return acc;
}, {});
// We take the `Object.values` of the object (a series of
// nested arrays and sort them by their duplicate value,
// finally `mapping` over the sorted arrays and extracting
// the first element of each
const out = Object.values(nested).sort((a, b) => {
return b[1] - a[1];
}).map(arr => arr[0]);
console.log(out);
Additional documentation
Logical nullish assignment
function sortAndFilter(source) {
let duplicates = {};
//count the duplications
source.filter((item, index, arr) => {
if(arr.indexOf(item) != index)
return duplicates[item] = (duplicates[item] || 0) + 1;
duplicates[item] = 0;
})
//sort the numbers based on the amount of duplications
return Object.keys(duplicates).map(a => parseInt(a)).sort((a, b) => duplicates[b] - duplicates[a]);
}
Output: [ 9, 6, 2, 1 ]
This could do the job
this is best answer for your question
const source = [2, 9, 9, 1, 6];
function duplicate(array) {
let duplicates = array.filter((item, index) => array.indexOf(item) !== index);
return duplicates.concat(array.filter((item) => !duplicates.includes(item)));
}
console.log(duplicate(source));
function myFunction() {
const source = [2, 9, 9, 1, 6];
const ans = source.filter((item, index, arr)=> arr.indexOf(item) === index);
ans.sort((a, b) => b-a);
console.log(ans);
}
Output: [ 9, 6, 2, 1 ]

Find all duplicates in array of objects [duplicate]

I need to check a JavaScript array to see if there are any duplicate values. What's the easiest way to do this? I just need to find what the duplicated values are - I don't actually need their indexes or how many times they are duplicated.
I know I can loop through the array and check all the other values for a match, but it seems like there should be an easier way.
Similar question:
Get all unique values in a JavaScript array (remove duplicates)
You could sort the array and then run through it and then see if the next (or previous) index is the same as the current. Assuming your sort algorithm is good, this should be less than O(n2):
const findDuplicates = (arr) => {
let sorted_arr = arr.slice().sort(); // You can define the comparing function here.
// JS by default uses a crappy string compare.
// (we use slice to clone the array so the
// original array won't be modified)
let results = [];
for (let i = 0; i < sorted_arr.length - 1; i++) {
if (sorted_arr[i + 1] == sorted_arr[i]) {
results.push(sorted_arr[i]);
}
}
return results;
}
let duplicatedArray = [9, 9, 111, 2, 3, 4, 4, 5, 7];
console.log(`The duplicates in ${duplicatedArray} are ${findDuplicates(duplicatedArray)}`);
In case, if you are to return as a function for duplicates. This is for similar type of case.
Reference: https://stackoverflow.com/a/57532964/8119511
If you want to elimate the duplicates, try this great solution:
function eliminateDuplicates(arr) {
var i,
len = arr.length,
out = [],
obj = {};
for (i = 0; i < len; i++) {
obj[arr[i]] = 0;
}
for (i in obj) {
out.push(i);
}
return out;
}
console.log(eliminateDuplicates([1,6,7,3,6,8,1,3,4,5,1,7,2,6]))
Source:
http://dreaminginjavascript.wordpress.com/2008/08/22/eliminating-duplicates/
This is my answer from the duplicate thread (!):
When writing this entry 2014 - all examples were for-loops or jQuery. JavaScript has the perfect tools for this: sort, map and reduce.
Find duplicate items
var names = ['Mike', 'Matt', 'Nancy', 'Adam', 'Jenny', 'Nancy', 'Carl']
const uniq = names
.map((name) => {
return {
count: 1,
name: name
};
})
.reduce((result, b) => {
result[b.name] = (result[b.name] || 0) + b.count;
return result;
}, {});
const duplicates = Object.keys(uniq).filter((a) => uniq[a] > 1);
console.log(duplicates); // [ 'Nancy' ]
More functional syntax:
#Dmytro-Laptin pointed out some code that can be removed. This is a more compact version of the same code. Using some ES6 tricks and higher-order functions:
const names = ['Mike', 'Matt', 'Nancy', 'Adam', 'Jenny', 'Nancy', 'Carl'];
const count = names =>
names.reduce((result, value) => ({ ...result,
[value]: (result[value] || 0) + 1
}), {}); // don't forget to initialize the accumulator
const duplicates = dict =>
Object.keys(dict).filter((a) => dict[a] > 1);
console.log(count(names)); // { Mike: 1, Matt: 1, Nancy: 2, Adam: 1, Jenny: 1, Carl: 1 }
console.log(duplicates(count(names))); // [ 'Nancy' ]
UPDATED: Short one-liner to get the duplicates:
[1, 2, 2, 4, 3, 4].filter((e, i, a) => a.indexOf(e) !== i) // [2, 4]
To get the array without duplicates simply invert the condition:
[1, 2, 2, 4, 3, 4].filter((e, i, a) => a.indexOf(e) === i) // [1, 2, 3, 4]
Note that this answer’s main goal is to be short. If you need something performant for a big array, one possible solution is to sort your array first (if it is sortable) then do the following to get the same kind of results as above:
myHugeSortedArray.filter((e, i, a) => a[i-1] === e)
Here is an example for a 1 000 000 integers array:
const myHugeIntArrayWithDuplicates =
[...Array(1_000_000).keys()]
// adding two 0 and four 9 duplicates
.fill(0, 2, 4).fill(9, 10, 14)
console.time("time")
console.log(
myHugeIntArrayWithDuplicates
// a possible sorting method for integers
.sort((a, b) => a > b ? 1 : -1)
.filter((e, i, a) => a[i-1] === e)
)
console.timeEnd("time")
On my AMD Ryzen 7 5700G dev machine it outputs:
[ 0, 0, 9, 9, 9, 9 ]
time: 22.738ms
As pointed out in the comments both the short solution and the performant solution will return an array with several time the same duplicate if it occurs more than once in the original array:
[1, 1, 1, 2, 2, 2, 2].filter((e, i, a) => a.indexOf(e) !== i) // [1, 1, 2, 2, 2]
If unique duplicates are wanted then a function like
function duplicates(arr) {
return [...new Set(arr.filter((e, i, a) => a.indexOf(e) !== i))]
}
can be used so that duplicates([1, 1, 1, 2, 2, 2, 2]) returns [1, 2].
When all you need is to check that there are no duplicates as asked in this question you can use the every() method:
[1, 2, 3].every((e, i, a) => a.indexOf(e) === i) // true
[1, 2, 1].every((e, i, a) => a.indexOf(e) === i) // false
Note that every() doesn't work for IE 8 and below.
Find duplicate values in an array
This should be one of the shortest ways to actually find duplicate values in an array. As specifically asked for by the OP, this does not remove duplicates but finds them.
var input = [1, 2, 3, 1, 3, 1];
var duplicates = input.reduce(function(acc, el, i, arr) {
if (arr.indexOf(el) !== i && acc.indexOf(el) < 0) acc.push(el); return acc;
}, []);
document.write(duplicates); // = 1,3 (actual array == [1, 3])
This doesn't need sorting or any third party framework. It also doesn't need manual loops. It works with every value indexOf() (or to be clearer: the strict comparision operator) supports.
Because of reduce() and indexOf() it needs at least IE 9.
You can add this function, or tweak it and add it to Javascript's Array prototype:
Array.prototype.unique = function () {
var r = new Array();
o:for(var i = 0, n = this.length; i < n; i++)
{
for(var x = 0, y = r.length; x < y; x++)
{
if(r[x]==this[i])
{
alert('this is a DUPE!');
continue o;
}
}
r[r.length] = this[i];
}
return r;
}
var arr = [1,2,2,3,3,4,5,6,2,3,7,8,5,9];
var unique = arr.unique();
alert(unique);
UPDATED: The following uses an optimized combined strategy. It optimizes primitive lookups to benefit from hash O(1) lookup time (running unique on an array of primitives is O(n)). Object lookups are optimized by tagging objects with a unique id while iterating through so so identifying duplicate objects is also O(1) per item and O(n) for the whole list. The only exception is items that are frozen, but those are rare and a fallback is provided using an array and indexOf.
var unique = function(){
var hasOwn = {}.hasOwnProperty,
toString = {}.toString,
uids = {};
function uid(){
var key = Math.random().toString(36).slice(2);
return key in uids ? uid() : uids[key] = key;
}
function unique(array){
var strings = {}, numbers = {}, others = {},
tagged = [], failed = [],
count = 0, i = array.length,
item, type;
var id = uid();
while (i--) {
item = array[i];
type = typeof item;
if (item == null || type !== 'object' && type !== 'function') {
// primitive
switch (type) {
case 'string': strings[item] = true; break;
case 'number': numbers[item] = true; break;
default: others[item] = item; break;
}
} else {
// object
if (!hasOwn.call(item, id)) {
try {
item[id] = true;
tagged[count++] = item;
} catch (e){
if (failed.indexOf(item) === -1)
failed[failed.length] = item;
}
}
}
}
// remove the tags
while (count--)
delete tagged[count][id];
tagged = tagged.concat(failed);
count = tagged.length;
// append primitives to results
for (i in strings)
if (hasOwn.call(strings, i))
tagged[count++] = i;
for (i in numbers)
if (hasOwn.call(numbers, i))
tagged[count++] = +i;
for (i in others)
if (hasOwn.call(others, i))
tagged[count++] = others[i];
return tagged;
}
return unique;
}();
If you have ES6 Collections available, then there is a much simpler and significantly faster version. (shim for IE9+ and other browsers here: https://github.com/Benvie/ES6-Harmony-Collections-Shim)
function unique(array){
var seen = new Set;
return array.filter(function(item){
if (!seen.has(item)) {
seen.add(item);
return true;
}
});
}
var a = ["a","a","b","c","c"];
a.filter(function(value,index,self){ return (self.indexOf(value) !== index )})
This should get you what you want, Just the duplicates.
function find_duplicates(arr) {
var len=arr.length,
out=[],
counts={};
for (var i=0;i<len;i++) {
var item = arr[i];
counts[item] = counts[item] >= 1 ? counts[item] + 1 : 1;
if (counts[item] === 2) {
out.push(item);
}
}
return out;
}
find_duplicates(['one',2,3,4,4,4,5,6,7,7,7,'pig','one']); // -> ['one',4,7] in no particular order.
Find non-unique values from 3 arrays (or more):
ES2015
// 🚩🚩 🚩 🚩 🚩
var arr = [1,2,2,3,3,4,5,6,2,3,7,8,5,22],
arr2 = [1,2,511,12,50],
arr3 = [22,0],
merged,
nonUnique;
// Combine all the arrays to a single one
merged = arr.concat(arr2, arr3)
// create a new (dirty) Array with only the non-unique items
nonUnique = merged.filter((item,i) => merged.includes(item, i+1))
// Cleanup - remove duplicate & empty items items
nonUnique = [...new Set(nonUnique)]
console.log(nonUnique)
PRE-ES2015:
In the below example I chose to superimpose a unique method on top of the Array prototype, allowing access from everywhere and has more "declarative" syntax. I do not recommend this approach on large projects, since it might very well collide with another method with the same custom name.
Array.prototype.unique = function () {
var arr = this.sort(), i=arr.length; // input must be sorted for this to work
while(i--)
arr[i] === arr[i-1] && arr.splice(i,1) // remove duplicate item
return arr
}
Array.prototype.nonunique = function () {
var arr = this.sort(), i=arr.length, res = []; // input must be sorted for this to work
while(i--)
arr[i] === arr[i-1] && (res.indexOf(arr[i]) == -1) && res.push(arr[i])
return res
}
// 🚩🚩 🚩 🚩 🚩
var arr = [1,2,2,3,3,4,5,6,2,3,7,8,5,22],
arr2 = [1,2,511,12,50],
arr3 = [22,0],
// merge all arrays & call custom Array Prototype - "unique"
unique = arr.concat(arr2, arr3).unique(),
nonunique = arr.concat(arr2, arr3).nonunique()
console.log(unique) // [1,12,2,22,3,4,5,50,511,6,7,8]
console.log(nonunique) // [1,12,2,22,3,4,5,50,511,6,7,8]
using underscore.js
function hasDuplicate(arr){
return (arr.length != _.uniq(arr).length);
}
The simplest and quickest way is to use the Set object:
const numbers = [1, 2, 3, 2, 4, 5, 5, 6];
const set = new Set(numbers);
const duplicates = numbers.filter(item => {
if (set.has(item)) {
set.delete(item);
return false;
} else {
return true;
}
});
// OR more concisely
const duplicates = numbers.filter(item => !set.delete(item));
console.log(duplicates);
// [ 2, 5 ]
This is my proposal (ES6):
let a = [1, 2, 3, 4, 2, 2, 4, 1, 5, 6]
let b = [...new Set(a.sort().filter((o, i) => o !== undefined && a[i + 1] !== undefined && o === a[i + 1]))]
// b is now [1, 2, 4]
Here's the simplest solution I could think of:
const arr = [-1, 2, 2, 2, 0, 0, 0, 500, -1, 'a', 'a', 'a']
const filtered = arr.filter((el, index) => arr.indexOf(el) !== index)
// => filtered = [ 2, 2, 0, 0, -1, 'a', 'a' ]
const duplicates = [...new Set(filtered)]
console.log(duplicates)
// => [ 2, 0, -1, 'a' ]
That's it.
Note:
It works with any numbers including 0, strings and negative numbers e.g. -1 -
Related question: Get all unique values in a JavaScript array (remove duplicates)
The original array arr is preserved (filter returns the new array instead of modifying the original)
The filtered array contains all duplicates; it can also contain more than 1 same value (e.g. our filtered array here is [ 2, 2, 0, 0, -1, 'a', 'a' ])
If you want to get only values that are duplicated (you don't want to have multiple duplicates with the same value) you can use [...new Set(filtered)] (ES6 has an object Set which can store only unique values)
Hope this helps.
Here is mine simple and one line solution.
It searches not unique elements first, then makes found array unique with the use of Set.
So we have array of duplicates in the end.
var array = [1, 2, 2, 3, 3, 4, 5, 6, 2, 3, 7, 8, 5, 22, 1, 2, 511, 12, 50, 22];
console.log([...new Set(
array.filter((value, index, self) => self.indexOf(value) !== index))]
);
Shortest vanilla JS:
[1,1,2,2,2,3].filter((v,i,a) => a.indexOf(v) !== i) // [1, 2, 2]
one liner simple way
var arr = [9,1,2,4,3,4,9]
console.log(arr.filter((ele,indx)=>indx!==arr.indexOf(ele))) //get the duplicates
console.log(arr.filter((ele,indx)=>indx===arr.indexOf(ele))) //remove the duplicates
var a = [324,3,32,5,52,2100,1,20,2,3,3,2,2,2,1,1,1].sort();
a.filter(function(v,i,o){return i&&v!==o[i-1]?v:0;});
or when added to the prototyp.chain of Array
//copy and paste: without error handling
Array.prototype.unique =
function(){return this.sort().filter(function(v,i,o){return i&&v!==o[i-1]?v:0;});}
See here: https://gist.github.com/1305056
Fast and elegant way using es6 object destructuring and reduce
It runs in O(n) (1 iteration over the array) and doesn't repeat values that appear more than 2 times
const arr = ['hi', 'hi', 'hi', 'bye', 'bye', 'asd']
const {
dup
} = arr.reduce(
(acc, curr) => {
acc.items[curr] = acc.items[curr] ? acc.items[curr] += 1 : 1
if (acc.items[curr] === 2) acc.dup.push(curr)
return acc
}, {
items: {},
dup: []
},
)
console.log(dup)
// ['hi', 'bye']
You can use filter method and indexOf() to get all the duplicate values
function duplicate(arr) {
return duplicateArray = arr.filter((item, index) => arr.indexOf(item) !== index)
}
arr.indexOf(item) will always return the first index at which a given element can be
found
ES5 only (i.e., it needs a filter() polyfill for IE8 and below):
var arrayToFilter = [ 4, 5, 5, 5, 2, 1, 3, 1, 1, 2, 1, 3 ];
arrayToFilter.
sort().
filter( function(me,i,arr){
return (i===0) || ( me !== arr[i-1] );
});
Here is a very light and easy way:
var codes = dc_1.split(',');
var i = codes.length;
while (i--) {
if (codes.indexOf(codes[i]) != i) {
codes.splice(i,1);
}
}
ES6 offers the Set data structure which is basically an array that doesn't accept duplicates.
With the Set data structure, there's a very easy way to find duplicates in an array (using only one loop).
Here's my code
function findDuplicate(arr) {
var set = new Set();
var duplicates = new Set();
for (let i = 0; i< arr.length; i++) {
var size = set.size;
set.add(arr[i]);
if (set.size === size) {
duplicates.add(arr[i]);
}
}
return duplicates;
}
With ES6 (or using Babel or Typescipt) you can simply do:
var duplicates = myArray.filter(i => myArray.filter(ii => ii === i).length > 1);
https://es6console.com/j58euhbt/
Simple code with ES6 syntax (return sorted array of duplicates):
let duplicates = a => {d=[]; a.sort((a,b) => a-b).reduce((a,b)=>{a==b&&!d.includes(a)&&d.push(a); return b}); return d};
How to use:
duplicates([1,2,3,10,10,2,3,3,10]);
I have just figured out a simple way to achieve this using an Array filter
var list = [9, 9, 111, 2, 3, 4, 4, 5, 7];
// Filter 1: to find all duplicates elements
var duplicates = list.filter(function(value,index,self) {
return self.indexOf(value) !== self.lastIndexOf(value) && self.indexOf(value) === index;
});
console.log(duplicates);
This answer might also be helpful, it leverages js reduce operator/method to remove duplicates from array.
const result = [1, 2, 2, 3, 3, 3, 3].reduce((x, y) => x.includes(y) ? x : [...x, y], []);
console.log(result);
Higher ranked answers have a few inherent issues including the use of legacy javascript, incorrect ordering or with only support for 2 duplicated items.
Here's a modern solution which fixes those problems:
const arrayNonUniq = array => {
if (!Array.isArray(array)) {
throw new TypeError("An array must be provided!")
}
return array.filter((value, index) => array.indexOf(value) === index && array.lastIndexOf(value) !== index)
}
arrayNonUniq([1, 1, 2, 3, 3])
//=> [1, 3]
arrayNonUniq(["foo", "foo", "bar", "foo"])
//=> ['foo']
You can also use the npm package array-non-uniq.
The following function (a variation of the eliminateDuplicates function already mentioned) seems to do the trick, returning test2,1,7,5 for the input ["test", "test2", "test2", 1, 1, 1, 2, 3, 4, 5, 6, 7, 7, 10, 22, 43, 1, 5, 8]
Note that the problem is stranger in JavaScript than in most other languages, because a JavaScript array can hold just about anything. Note that solutions that use sorting might need to provide an appropriate sorting function--I haven't tried that route yet.
This particular implementation works for (at least) strings and numbers.
function findDuplicates(arr) {
var i,
len=arr.length,
out=[],
obj={};
for (i=0;i<len;i++) {
if (obj[arr[i]] != null) {
if (!obj[arr[i]]) {
out.push(arr[i]);
obj[arr[i]] = 1;
}
} else {
obj[arr[i]] = 0;
}
}
return out;
}
var arr = [2, 1, 2, 2, 4, 4, 2, 5];
function returnDuplicates(arr) {
return arr.reduce(function(dupes, val, i) {
if (arr.indexOf(val) !== i && dupes.indexOf(val) === -1) {
dupes.push(val);
}
return dupes;
}, []);
}
alert(returnDuplicates(arr));
This function avoids the sorting step and uses the reduce() method to push duplicates to a new array if it doesn't already exist in it.

Algorithm to return all combinations of 3 combinations from a array of length 6 without the number appearing with the same set of numbers

I was trying to write a algorithm in javascript that returns all the possible 3 digit numbers numbers from a given array of length 6
For Example
var arr = [1, 2, 3, 4, 5, 6];
I have already got the combinations with the same sets of numbers in different positions in the 2D array.
(The code which I took the help of)
If I have the same numbers in different combinations then I would like to remove them form the array. like I have [1, 2, 3] at index i in the array comtaining all the possible combinations then I would like to remove other combination with the same numbers like [2, 1, 3], [1, 3, 2] and so on..
Note the array also contains numbers repeated like [3, 3, 3], [2, 2, 2], [3, 2, 3] and so on
I expect an 2d array which has the values : [[1,2,3],[1,2,4],[1,2,5],[1,2,6],[1,3,4]] and so on (24 possibilities)
Is there any way to do this?
Extending the answer you linked, just filter out the results with the help of a Set.
Sort an individual result, convert them into a String using join(), check if it's present in set or not, and if not, then store them in the final result.
function cartesian_product(xs, ys) {
var result = [];
for (var i = 0; i < xs.length; i++) {
for (var j = 0; j < ys.length; j++) {
// transform [ [1, 2], 3 ] => [ 1, 2, 3 ] and append it to result []
result.push([].concat.apply([], [xs[i], ys[j]]));
}
}
return result;
}
function cartesian_power(xs, n) {
var result = xs;
for (var i = 1; i < n; i++) {
result = cartesian_product(result, xs)
}
return result;
}
function unique_cartesian_power(xs, n) {
var result = cartesian_power(xs, n);
var unique_result = [];
const set = new Set();
result.forEach(function(value) {
var representation = value.sort().join(' ');
if (!set.has(representation)) {
set.add(representation);
unique_result.push(value);
}
});
return unique_result;
}
console.log(unique_cartesian_power([1, 2, 3, 4, 5, 6], 3));
const arr = [1, 2, 3, 4, 5, 6];
const result = arr.reduce((a, v) => arr.reduce((a, v2) => {
arr.reduce((a, v3) => {
const current = [v, v2, v3].sort().join(",");
!a.find(_ => _.sort().join() === current) && a.push([v, v2, v3]);
return a;
}, a);
return a;
}, a), []);
console.log(result.length);
console.log(...result.map(JSON.stringify));
You could take an iterative and recursive approach by sorting the index and a temporary array for the collected values.
Because of the nature of going upwards with the index, no duplicate set is created.
function getCombination(array, length) {
function iter(index, right) {
if (right.length === length) return result.push(right);
if (index === array.length) return;
for (let i = index, l = array.length - length + right.length + 1; i < l; i++) {
iter(i + 1, [...right, array[i]]);
}
}
var result = [];
iter(0, []);
return result;
}
var array = [1, 2, 3, 4, 5, 6],
result = getCombination(array, 3);
console.log(result.length);
result.forEach(a => console.log(...a));
.as-console-wrapper { max-height: 100% !important; top: 0; }
This is a good example, that it is usually worthwhile not asking for a specific answer for a generic problem shown with a specific question; however as you've requested - if you really have the above constraints which kind of don't make much sense to me, you could do it like that:
function combine(firstDigits, secondDigits, thirdDigits) {
let result = [];
firstDigits.forEach(firstDigit => {
// combine with all secondDigitPermutations
secondDigits.forEach(secondDigit => {
// combine with all thirdDigitPermutations
thirdDigits.forEach(thirdDigit => {
result.push([firstDigit, secondDigit, thirdDigit])
})
})
});
// now we have all permutations and simply need to filter them
// [1,2,3] is the same as [2,3,1]; so we need to sort them
// and check them for equality (by using a hash) and memoize them
// [1,2,3] => '123'
function hashCombination(combination) {
return combination.join('ಠ_ಠ');
}
return result
// sort individual combinations to make them equal
.map(combination => combination.sort())
.reduce((acc, currentCombination) => {
// transform the currentCombination into a "hash"
let hash = hashCombination(currentCombination);
// and look it up; if it is not there, add it to cache and result
if (!(hash in acc.cache)) {
acc.cache[hash] = true;
acc.result.push(currentCombination);
}
return acc;
}, {result: [], cache: {}})
.result;
}
console.log(combine([1,2,3,4,5,6],[1,2,3,4,5,6],[1,2,3,4,5,6]).length);
console.log(...combine([1,2,3,4,5,6],[1,2,3,4,5,6],[1,2,3,4,5,6]).map(JSON.stringify));
This does not include some super-clever assumptions about some index, but it does abuse the fact, that it's all about numbers. It is deliberately using no recursion, because this would easily explode, if the amount of combinations is going to be bigger and because recursion in itself is not very readable.
For a real world problem™ - you'd employ a somewhat similar strategy though; generating all combinations and then filter them. Doing both at the same time, is an exercise left for the astute reader. For finding combinations, that look different, but are considered to be the same you'd also use some kind of hashing and memoizing.
let arr1 = [1,2,3,4,5,6];
function getCombination(arr){
let arr2 = [];
for(let i=0; i<arr.length; i++){
for(let j=i; j<arr.length; j++){
for(let k=j; k<arr.length; k++){
arr2.push([arr[i],arr[j],arr[k]]);
}
}
}
return arr2;
}
console.log(getCombination(arr1));

Array of Arrays to an Array of Unique Values Using Reduce/Map

I have an array of arrays, that needs to become 1 array of unique values.
[1, 3, 2], [5, 2, 1, 4], [2, 1]
I want to use reduce/map to solve the problem, but it doesn't seem to be working. I have solved the problem already with nested for loops like so:
function uniteUnique(arr) {
var args = Array.from(arguments);
var arr = [];
for (var i = 0; i < args.length; i++) {
for (var j = 0; j < args[i].length; j++) {
if (!arr.includes(args[i][j])) {
arr.push(args[i][j]);
}
}
}
return arr;
}
Now I tried to solve the problem here using reduce/map, but not getting the correct solution, like so:
function uniteUnique(arr) {
var args = Array.from(arguments);
return args.reduce(
(arr, a) => a.map(n => (!arr.includes(n) ? arr.push(n) : n)),
[]
);
}
console.log(uniteUnique([1, 3, 2], [5, 2, 1, 4], [2, 1]));
I also tried to solve with reduce/map, using the older syntax, like so:
function uniteUnique(arr) {
var args = Array.from(arguments);
return args.reduce(function(arr, a) {
return a.map(function(n) {
if (!arr.includes(n)) {
return arr.push(n);
} else {
return n;
}
});
});
}
My guess is that I'm not doing something right with the return statements in the callback functions. Any help would be appreciated, thanks.
The problem is that:
arr.includes(n)
arr is an array of arrays, includes wont work there. You also never pass arr down the reduce chain.
The easiest to solve would be:
[...new Set(array.reduce((a, b) => a.concat(b), []))]
That just flattens the array, builds a Set for uniqueness and spreads it into an array. Or another elegant solution usong iterators:
function* flatten(arr) {
for(const el of arr) {
if(Array.isArray(el)) {
yield* flatten(el);
} else {
yield el;
}
}
}
const result = [];
for(const el of flatten(array))
if(!result.includes(el)) result.push(el);
Instead of using array#map use array#forEach and push unique number in the accumulator.
function uniteUnique(arr) {
var args = Array.from(arguments);
return args.reduce((arr, a) => {
a.forEach(n => (!arr.includes(n) ? arr.push(n) : n));
return arr
},[]);
}
console.log(uniteUnique([1, 3, 2], [5, 2, 1, 4], [2, 1]));
Alternatively, you can array#concat all the array and then using Set get the unique value.
const arr = [[1, 3, 2], [5, 2, 1, 4], [2, 1]],
unique = [...new Set([].concat(...arr))];
console.log(unique);

How to find duplicated values in an Array [duplicate]

I need to check a JavaScript array to see if there are any duplicate values. What's the easiest way to do this? I just need to find what the duplicated values are - I don't actually need their indexes or how many times they are duplicated.
I know I can loop through the array and check all the other values for a match, but it seems like there should be an easier way.
Similar question:
Get all unique values in a JavaScript array (remove duplicates)
You could sort the array and then run through it and then see if the next (or previous) index is the same as the current. Assuming your sort algorithm is good, this should be less than O(n2):
const findDuplicates = (arr) => {
let sorted_arr = arr.slice().sort(); // You can define the comparing function here.
// JS by default uses a crappy string compare.
// (we use slice to clone the array so the
// original array won't be modified)
let results = [];
for (let i = 0; i < sorted_arr.length - 1; i++) {
if (sorted_arr[i + 1] == sorted_arr[i]) {
results.push(sorted_arr[i]);
}
}
return results;
}
let duplicatedArray = [9, 9, 111, 2, 3, 4, 4, 5, 7];
console.log(`The duplicates in ${duplicatedArray} are ${findDuplicates(duplicatedArray)}`);
In case, if you are to return as a function for duplicates. This is for similar type of case.
Reference: https://stackoverflow.com/a/57532964/8119511
If you want to elimate the duplicates, try this great solution:
function eliminateDuplicates(arr) {
var i,
len = arr.length,
out = [],
obj = {};
for (i = 0; i < len; i++) {
obj[arr[i]] = 0;
}
for (i in obj) {
out.push(i);
}
return out;
}
console.log(eliminateDuplicates([1,6,7,3,6,8,1,3,4,5,1,7,2,6]))
Source:
http://dreaminginjavascript.wordpress.com/2008/08/22/eliminating-duplicates/
This is my answer from the duplicate thread (!):
When writing this entry 2014 - all examples were for-loops or jQuery. JavaScript has the perfect tools for this: sort, map and reduce.
Find duplicate items
var names = ['Mike', 'Matt', 'Nancy', 'Adam', 'Jenny', 'Nancy', 'Carl']
const uniq = names
.map((name) => {
return {
count: 1,
name: name
};
})
.reduce((result, b) => {
result[b.name] = (result[b.name] || 0) + b.count;
return result;
}, {});
const duplicates = Object.keys(uniq).filter((a) => uniq[a] > 1);
console.log(duplicates); // [ 'Nancy' ]
More functional syntax:
#Dmytro-Laptin pointed out some code that can be removed. This is a more compact version of the same code. Using some ES6 tricks and higher-order functions:
const names = ['Mike', 'Matt', 'Nancy', 'Adam', 'Jenny', 'Nancy', 'Carl'];
const count = names =>
names.reduce((result, value) => ({ ...result,
[value]: (result[value] || 0) + 1
}), {}); // don't forget to initialize the accumulator
const duplicates = dict =>
Object.keys(dict).filter((a) => dict[a] > 1);
console.log(count(names)); // { Mike: 1, Matt: 1, Nancy: 2, Adam: 1, Jenny: 1, Carl: 1 }
console.log(duplicates(count(names))); // [ 'Nancy' ]
UPDATED: Short one-liner to get the duplicates:
[1, 2, 2, 4, 3, 4].filter((e, i, a) => a.indexOf(e) !== i) // [2, 4]
To get the array without duplicates simply invert the condition:
[1, 2, 2, 4, 3, 4].filter((e, i, a) => a.indexOf(e) === i) // [1, 2, 3, 4]
Note that this answer’s main goal is to be short. If you need something performant for a big array, one possible solution is to sort your array first (if it is sortable) then do the following to get the same kind of results as above:
myHugeSortedArray.filter((e, i, a) => a[i-1] === e)
Here is an example for a 1 000 000 integers array:
const myHugeIntArrayWithDuplicates =
[...Array(1_000_000).keys()]
// adding two 0 and four 9 duplicates
.fill(0, 2, 4).fill(9, 10, 14)
console.time("time")
console.log(
myHugeIntArrayWithDuplicates
// a possible sorting method for integers
.sort((a, b) => a > b ? 1 : -1)
.filter((e, i, a) => a[i-1] === e)
)
console.timeEnd("time")
On my AMD Ryzen 7 5700G dev machine it outputs:
[ 0, 0, 9, 9, 9, 9 ]
time: 22.738ms
As pointed out in the comments both the short solution and the performant solution will return an array with several time the same duplicate if it occurs more than once in the original array:
[1, 1, 1, 2, 2, 2, 2].filter((e, i, a) => a.indexOf(e) !== i) // [1, 1, 2, 2, 2]
If unique duplicates are wanted then a function like
function duplicates(arr) {
return [...new Set(arr.filter((e, i, a) => a.indexOf(e) !== i))]
}
can be used so that duplicates([1, 1, 1, 2, 2, 2, 2]) returns [1, 2].
When all you need is to check that there are no duplicates as asked in this question you can use the every() method:
[1, 2, 3].every((e, i, a) => a.indexOf(e) === i) // true
[1, 2, 1].every((e, i, a) => a.indexOf(e) === i) // false
Note that every() doesn't work for IE 8 and below.
Find duplicate values in an array
This should be one of the shortest ways to actually find duplicate values in an array. As specifically asked for by the OP, this does not remove duplicates but finds them.
var input = [1, 2, 3, 1, 3, 1];
var duplicates = input.reduce(function(acc, el, i, arr) {
if (arr.indexOf(el) !== i && acc.indexOf(el) < 0) acc.push(el); return acc;
}, []);
document.write(duplicates); // = 1,3 (actual array == [1, 3])
This doesn't need sorting or any third party framework. It also doesn't need manual loops. It works with every value indexOf() (or to be clearer: the strict comparision operator) supports.
Because of reduce() and indexOf() it needs at least IE 9.
You can add this function, or tweak it and add it to Javascript's Array prototype:
Array.prototype.unique = function () {
var r = new Array();
o:for(var i = 0, n = this.length; i < n; i++)
{
for(var x = 0, y = r.length; x < y; x++)
{
if(r[x]==this[i])
{
alert('this is a DUPE!');
continue o;
}
}
r[r.length] = this[i];
}
return r;
}
var arr = [1,2,2,3,3,4,5,6,2,3,7,8,5,9];
var unique = arr.unique();
alert(unique);
UPDATED: The following uses an optimized combined strategy. It optimizes primitive lookups to benefit from hash O(1) lookup time (running unique on an array of primitives is O(n)). Object lookups are optimized by tagging objects with a unique id while iterating through so so identifying duplicate objects is also O(1) per item and O(n) for the whole list. The only exception is items that are frozen, but those are rare and a fallback is provided using an array and indexOf.
var unique = function(){
var hasOwn = {}.hasOwnProperty,
toString = {}.toString,
uids = {};
function uid(){
var key = Math.random().toString(36).slice(2);
return key in uids ? uid() : uids[key] = key;
}
function unique(array){
var strings = {}, numbers = {}, others = {},
tagged = [], failed = [],
count = 0, i = array.length,
item, type;
var id = uid();
while (i--) {
item = array[i];
type = typeof item;
if (item == null || type !== 'object' && type !== 'function') {
// primitive
switch (type) {
case 'string': strings[item] = true; break;
case 'number': numbers[item] = true; break;
default: others[item] = item; break;
}
} else {
// object
if (!hasOwn.call(item, id)) {
try {
item[id] = true;
tagged[count++] = item;
} catch (e){
if (failed.indexOf(item) === -1)
failed[failed.length] = item;
}
}
}
}
// remove the tags
while (count--)
delete tagged[count][id];
tagged = tagged.concat(failed);
count = tagged.length;
// append primitives to results
for (i in strings)
if (hasOwn.call(strings, i))
tagged[count++] = i;
for (i in numbers)
if (hasOwn.call(numbers, i))
tagged[count++] = +i;
for (i in others)
if (hasOwn.call(others, i))
tagged[count++] = others[i];
return tagged;
}
return unique;
}();
If you have ES6 Collections available, then there is a much simpler and significantly faster version. (shim for IE9+ and other browsers here: https://github.com/Benvie/ES6-Harmony-Collections-Shim)
function unique(array){
var seen = new Set;
return array.filter(function(item){
if (!seen.has(item)) {
seen.add(item);
return true;
}
});
}
var a = ["a","a","b","c","c"];
a.filter(function(value,index,self){ return (self.indexOf(value) !== index )})
This should get you what you want, Just the duplicates.
function find_duplicates(arr) {
var len=arr.length,
out=[],
counts={};
for (var i=0;i<len;i++) {
var item = arr[i];
counts[item] = counts[item] >= 1 ? counts[item] + 1 : 1;
if (counts[item] === 2) {
out.push(item);
}
}
return out;
}
find_duplicates(['one',2,3,4,4,4,5,6,7,7,7,'pig','one']); // -> ['one',4,7] in no particular order.
Find non-unique values from 3 arrays (or more):
ES2015
// 🚩🚩 🚩 🚩 🚩
var arr = [1,2,2,3,3,4,5,6,2,3,7,8,5,22],
arr2 = [1,2,511,12,50],
arr3 = [22,0],
merged,
nonUnique;
// Combine all the arrays to a single one
merged = arr.concat(arr2, arr3)
// create a new (dirty) Array with only the non-unique items
nonUnique = merged.filter((item,i) => merged.includes(item, i+1))
// Cleanup - remove duplicate & empty items items
nonUnique = [...new Set(nonUnique)]
console.log(nonUnique)
PRE-ES2015:
In the below example I chose to superimpose a unique method on top of the Array prototype, allowing access from everywhere and has more "declarative" syntax. I do not recommend this approach on large projects, since it might very well collide with another method with the same custom name.
Array.prototype.unique = function () {
var arr = this.sort(), i=arr.length; // input must be sorted for this to work
while(i--)
arr[i] === arr[i-1] && arr.splice(i,1) // remove duplicate item
return arr
}
Array.prototype.nonunique = function () {
var arr = this.sort(), i=arr.length, res = []; // input must be sorted for this to work
while(i--)
arr[i] === arr[i-1] && (res.indexOf(arr[i]) == -1) && res.push(arr[i])
return res
}
// 🚩🚩 🚩 🚩 🚩
var arr = [1,2,2,3,3,4,5,6,2,3,7,8,5,22],
arr2 = [1,2,511,12,50],
arr3 = [22,0],
// merge all arrays & call custom Array Prototype - "unique"
unique = arr.concat(arr2, arr3).unique(),
nonunique = arr.concat(arr2, arr3).nonunique()
console.log(unique) // [1,12,2,22,3,4,5,50,511,6,7,8]
console.log(nonunique) // [1,12,2,22,3,4,5,50,511,6,7,8]
using underscore.js
function hasDuplicate(arr){
return (arr.length != _.uniq(arr).length);
}
The simplest and quickest way is to use the Set object:
const numbers = [1, 2, 3, 2, 4, 5, 5, 6];
const set = new Set(numbers);
const duplicates = numbers.filter(item => {
if (set.has(item)) {
set.delete(item);
return false;
} else {
return true;
}
});
// OR more concisely
const duplicates = numbers.filter(item => !set.delete(item));
console.log(duplicates);
// [ 2, 5 ]
This is my proposal (ES6):
let a = [1, 2, 3, 4, 2, 2, 4, 1, 5, 6]
let b = [...new Set(a.sort().filter((o, i) => o !== undefined && a[i + 1] !== undefined && o === a[i + 1]))]
// b is now [1, 2, 4]
Here's the simplest solution I could think of:
const arr = [-1, 2, 2, 2, 0, 0, 0, 500, -1, 'a', 'a', 'a']
const filtered = arr.filter((el, index) => arr.indexOf(el) !== index)
// => filtered = [ 2, 2, 0, 0, -1, 'a', 'a' ]
const duplicates = [...new Set(filtered)]
console.log(duplicates)
// => [ 2, 0, -1, 'a' ]
That's it.
Note:
It works with any numbers including 0, strings and negative numbers e.g. -1 -
Related question: Get all unique values in a JavaScript array (remove duplicates)
The original array arr is preserved (filter returns the new array instead of modifying the original)
The filtered array contains all duplicates; it can also contain more than 1 same value (e.g. our filtered array here is [ 2, 2, 0, 0, -1, 'a', 'a' ])
If you want to get only values that are duplicated (you don't want to have multiple duplicates with the same value) you can use [...new Set(filtered)] (ES6 has an object Set which can store only unique values)
Hope this helps.
Here is mine simple and one line solution.
It searches not unique elements first, then makes found array unique with the use of Set.
So we have array of duplicates in the end.
var array = [1, 2, 2, 3, 3, 4, 5, 6, 2, 3, 7, 8, 5, 22, 1, 2, 511, 12, 50, 22];
console.log([...new Set(
array.filter((value, index, self) => self.indexOf(value) !== index))]
);
Shortest vanilla JS:
[1,1,2,2,2,3].filter((v,i,a) => a.indexOf(v) !== i) // [1, 2, 2]
one liner simple way
var arr = [9,1,2,4,3,4,9]
console.log(arr.filter((ele,indx)=>indx!==arr.indexOf(ele))) //get the duplicates
console.log(arr.filter((ele,indx)=>indx===arr.indexOf(ele))) //remove the duplicates
var a = [324,3,32,5,52,2100,1,20,2,3,3,2,2,2,1,1,1].sort();
a.filter(function(v,i,o){return i&&v!==o[i-1]?v:0;});
or when added to the prototyp.chain of Array
//copy and paste: without error handling
Array.prototype.unique =
function(){return this.sort().filter(function(v,i,o){return i&&v!==o[i-1]?v:0;});}
See here: https://gist.github.com/1305056
Fast and elegant way using es6 object destructuring and reduce
It runs in O(n) (1 iteration over the array) and doesn't repeat values that appear more than 2 times
const arr = ['hi', 'hi', 'hi', 'bye', 'bye', 'asd']
const {
dup
} = arr.reduce(
(acc, curr) => {
acc.items[curr] = acc.items[curr] ? acc.items[curr] += 1 : 1
if (acc.items[curr] === 2) acc.dup.push(curr)
return acc
}, {
items: {},
dup: []
},
)
console.log(dup)
// ['hi', 'bye']
You can use filter method and indexOf() to get all the duplicate values
function duplicate(arr) {
return duplicateArray = arr.filter((item, index) => arr.indexOf(item) !== index)
}
arr.indexOf(item) will always return the first index at which a given element can be
found
ES5 only (i.e., it needs a filter() polyfill for IE8 and below):
var arrayToFilter = [ 4, 5, 5, 5, 2, 1, 3, 1, 1, 2, 1, 3 ];
arrayToFilter.
sort().
filter( function(me,i,arr){
return (i===0) || ( me !== arr[i-1] );
});
Here is a very light and easy way:
var codes = dc_1.split(',');
var i = codes.length;
while (i--) {
if (codes.indexOf(codes[i]) != i) {
codes.splice(i,1);
}
}
ES6 offers the Set data structure which is basically an array that doesn't accept duplicates.
With the Set data structure, there's a very easy way to find duplicates in an array (using only one loop).
Here's my code
function findDuplicate(arr) {
var set = new Set();
var duplicates = new Set();
for (let i = 0; i< arr.length; i++) {
var size = set.size;
set.add(arr[i]);
if (set.size === size) {
duplicates.add(arr[i]);
}
}
return duplicates;
}
With ES6 (or using Babel or Typescipt) you can simply do:
var duplicates = myArray.filter(i => myArray.filter(ii => ii === i).length > 1);
https://es6console.com/j58euhbt/
Simple code with ES6 syntax (return sorted array of duplicates):
let duplicates = a => {d=[]; a.sort((a,b) => a-b).reduce((a,b)=>{a==b&&!d.includes(a)&&d.push(a); return b}); return d};
How to use:
duplicates([1,2,3,10,10,2,3,3,10]);
I have just figured out a simple way to achieve this using an Array filter
var list = [9, 9, 111, 2, 3, 4, 4, 5, 7];
// Filter 1: to find all duplicates elements
var duplicates = list.filter(function(value,index,self) {
return self.indexOf(value) !== self.lastIndexOf(value) && self.indexOf(value) === index;
});
console.log(duplicates);
This answer might also be helpful, it leverages js reduce operator/method to remove duplicates from array.
const result = [1, 2, 2, 3, 3, 3, 3].reduce((x, y) => x.includes(y) ? x : [...x, y], []);
console.log(result);
Higher ranked answers have a few inherent issues including the use of legacy javascript, incorrect ordering or with only support for 2 duplicated items.
Here's a modern solution which fixes those problems:
const arrayNonUniq = array => {
if (!Array.isArray(array)) {
throw new TypeError("An array must be provided!")
}
return array.filter((value, index) => array.indexOf(value) === index && array.lastIndexOf(value) !== index)
}
arrayNonUniq([1, 1, 2, 3, 3])
//=> [1, 3]
arrayNonUniq(["foo", "foo", "bar", "foo"])
//=> ['foo']
You can also use the npm package array-non-uniq.
The following function (a variation of the eliminateDuplicates function already mentioned) seems to do the trick, returning test2,1,7,5 for the input ["test", "test2", "test2", 1, 1, 1, 2, 3, 4, 5, 6, 7, 7, 10, 22, 43, 1, 5, 8]
Note that the problem is stranger in JavaScript than in most other languages, because a JavaScript array can hold just about anything. Note that solutions that use sorting might need to provide an appropriate sorting function--I haven't tried that route yet.
This particular implementation works for (at least) strings and numbers.
function findDuplicates(arr) {
var i,
len=arr.length,
out=[],
obj={};
for (i=0;i<len;i++) {
if (obj[arr[i]] != null) {
if (!obj[arr[i]]) {
out.push(arr[i]);
obj[arr[i]] = 1;
}
} else {
obj[arr[i]] = 0;
}
}
return out;
}
var arr = [2, 1, 2, 2, 4, 4, 2, 5];
function returnDuplicates(arr) {
return arr.reduce(function(dupes, val, i) {
if (arr.indexOf(val) !== i && dupes.indexOf(val) === -1) {
dupes.push(val);
}
return dupes;
}, []);
}
alert(returnDuplicates(arr));
This function avoids the sorting step and uses the reduce() method to push duplicates to a new array if it doesn't already exist in it.

Categories