Skip to content

Commit 648dc36

Browse files
refactor: removeDuplicates
1 parent 250a915 commit 648dc36

File tree

1 file changed

+16
-22
lines changed

1 file changed

+16
-22
lines changed
Lines changed: 16 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,36 +1,30 @@
11
/**
22
* Remove duplicate values from a sequence, preserving the order of the first occurrence of each value.
33
*
4-
* Time Complexity:
5-
* Space Complexity:
6-
* Optimal Time Complexity:
4+
* Time Complexity: O(n) - where n is the length of the input sequence, as we need to iterate through it once to create the Set.
5+
* Space Complexity: O(n) - in the worst case, if all elements are unique, the Set will contain all elements of the input sequence. so we need memory to store the unique elements.
6+
* Optimal Time Complexity: O(n)- using a Set to track seen values allows us to achieve linear time complexity for removing duplicates.
77
*
88
* @param {Array} inputSequence - Sequence to remove duplicates from
99
* @returns {Array} New sequence with duplicates removed
1010
*/
1111
export function removeDuplicates(inputSequence) {
12+
13+
return [...new Set(inputSequence)];
14+
}
15+
16+
17+
// approach 2: map and look up object
18+
/*export function removeDuplicates(inputSequence) {
19+
const seen = {};
1220
const uniqueItems = [];
1321
14-
for (
15-
let currentIndex = 0;
16-
currentIndex < inputSequence.length;
17-
currentIndex++
18-
) {
19-
let isDuplicate = false;
20-
for (
21-
let compareIndex = 0;
22-
compareIndex < uniqueItems.length;
23-
compareIndex++
24-
) {
25-
if (inputSequence[currentIndex] === uniqueItems[compareIndex]) {
26-
isDuplicate = true;
27-
break;
28-
}
29-
}
30-
if (!isDuplicate) {
31-
uniqueItems.push(inputSequence[currentIndex]);
22+
for (const item of inputSequence) {
23+
if (!seen[item]) {
24+
seen[item] = true;
25+
uniqueItems.push(item);
3226
}
3327
}
3428
3529
return uniqueItems;
36-
}
30+
}*/

0 commit comments

Comments
 (0)