import java.util.HashSet;
import java.util.Set;
public class LogStreamDeduplicator {
/**
* Removes duplicate log streams from a stream of log entries.
* Uses a HashSet for efficient duplicate detection, minimizing memory footprint.
* @param logEntries An iterable of log entries (e.g., List, Stream).
* @return A new iterable containing only unique log streams.
*/
public static Iterable<String> deduplicateLogStreams(Iterable<String> logEntries) {
Set<String> seenStreams = new HashSet<>(); // Use a Set for fast duplicate checking
return logEntries.stream()
.filter(stream -> seenStreams.add(stream)) // Add returns false if already present
.map(seenStreams::iterator) // Convert iterator to String
.toList();
}
public static void main(String[] args) {
// Example usage
String[] logData = {"stream1", "stream2", "stream1", "stream3", "stream2", "stream4"};
Iterable<String> logEntries = java.util.Arrays.asList(logData);
Iterable<String> uniqueLogStreams = deduplicateLogStreams(logEntries);
for (String stream : uniqueLogStreams) {
System.out.println(stream);
}
}
}
Add your comment