// Remove duplicates that occur 3 or more times in an array
// keeping unique values and those with less than 3
function removeMany(arr) {
const newArr = Array.from(arr).sort()
let count = 0;
let result = []
newArr.forEach((value, index, ar) => {
count += 1;
// refactored afterwards from (ar[index + 1] !== value)
if (ar.lastIndexOf(value) <= index && count <= 2) {
for (var i = 0; i < arr.length; i++) {
if (arr[i] === value) {
result.push(arr[i])
}
}
count = 0
} else if (ar[index + 1] !== value) {
count = 0;
}
});
// +1 is there anyway to return a result that mimicks the original order of `numbers`?
return result; // [1, 2, 2, 3, 4, 4]
}
const numbers = [1, 2, 3, 2, 4, 4, 5, 5, 5, 5];
console.log(removeMany(numbers));
// Remove duplicates that occur 3 or more times in an array
// keeping unique values and those with less than 3
function removeMany(arr) {
let countMappings = arr.reduce(function(carry, item) {
if (carry[item]!== undefined) {
carry[item]++;
}
else {
carry[item] = 1;
}
return carry;
}, {});
return arr.reduce(function(final, item) {
if (countMappings[item] <3) {
final.push(item);
}
return final;
}, []);
}
const numbers = [1, 2, 3, 2, 4, 4, 5, 5, 5, 5];
console.log(removeMany(numbers));
function removeMany(numbers, max) {
const numberMap = numbers.reduce((map, num) => {
map[num] = map[num] ? map[num] + 1 : 1;
return map;
}, []);
return numbers.filter(num => numberMap[num] < max);
}
const numbers = [1, 2, 3, 2, 4, 4, 5, 5, 5, 5];
console.log(removeMany(numbers, 3));
--enable-precise-memory-info
flag.
Test case name | Result |
---|---|
original duplicate removal | |
2-pass approach | |
2-pass with filtering |
Test name | Executions per second |
---|---|
original duplicate removal | 44283.3 Ops/sec |
2-pass approach | 43351.4 Ops/sec |
2-pass with filtering | 44605.8 Ops/sec |
Let's break down the benchmark and its test cases.
What is being tested?
The benchmark measures the performance of three different approaches to remove duplicates from an array where each element occurs 3 or more times, keeping unique values and those with less than 3 occurrences. The approaches are:
Array.prototype.reduce()
. Then, it uses another Array.prototype.reduce()
to filter out elements with a count greater than or equal to 3.Array.prototype.filter()
instead of Array.prototype.reduce()
for the second pass.Options compared
The benchmark compares these three approaches:
Libraries used
Array.from()
method is used in the original duplicate removal approach to convert an iterable object into a new array.Array.prototype.sort()
and Array.prototype.lastIndexOf()
methods are used in the original duplicate removal approach.Array.prototype.reduce()
method is used in both the 2-pass and 2-pass with filtering approaches.Special JS features or syntax
None mentioned in this benchmark, but it does utilize built-in JavaScript functionality such as array operations (e.g., Array.prototype.sort()
, Array.prototype.filter()
), map operations (Array.prototype.reduce()
), and string concatenation for the purpose of executing multiple test cases efficiently.
Alternatives
If you were to create a similar benchmark or explore other approaches, some alternatives might include:
Map
in JavaScript) instead of an array to track counts.However, given the nature of JavaScript and its built-in functions (like Array.prototype.sort()
, Array.prototype.filter()
, etc.), most developers would likely stick with these approaches as they offer a good balance between readability, performance, and standard library usage.