Simplify removeDuplicates

Use a sort+compact construct instead of doing it by hand with a hashmap. The
time complexity is now O(nlogn+n) instead of O(n), and space complexity around
O(logn) instead of O(n+uniq(n)), but it shouldn't matter anyway, since
removeDuplicates is only called to deduplicate tags.
This commit is contained in:
jvoisin 2024-03-18 17:06:30 +01:00 committed by Frédéric Guillot
parent 91f5522ce0
commit 863a5b3648

View file

@ -8,6 +8,7 @@ import (
"errors"
"fmt"
"log/slog"
"slices"
"time"
"miniflux.app/v2/internal/crypto"
@ -615,15 +616,7 @@ func (s *Storage) UnshareEntry(userID int64, entryID int64) (err error) {
return
}
// removeDuplicate removes duplicate entries from a slice
func removeDuplicates[T string | int](sliceList []T) []T {
allKeys := make(map[T]bool)
list := []T{}
for _, item := range sliceList {
if _, value := allKeys[item]; !value {
allKeys[item] = true
list = append(list, item)
}
}
return list
func removeDuplicates(l []string) []string {
slices.Sort(l)
return slices.Compact(l)
}