mirror of
https://gitlab.com/Anson-Projects/zine.git
synced 2025-06-15 13:16:39 +00:00
58 lines
1.4 KiB
Rust
58 lines
1.4 KiB
Rust
use std::collections::HashMap;
|
|
use std::collections::HashSet;
|
|
use std::fs;
|
|
|
|
// Function to read URLs from a file
|
|
fn read_feed(path: &str) -> Vec<String> {
|
|
let binding = fs::read_to_string(path).unwrap();
|
|
binding.lines().map(|s| s.to_owned()).collect()
|
|
}
|
|
|
|
#[test]
|
|
fn test_if_feeds_are_in_alphabetical_order() {
|
|
let mut urls = read_feed("feeds.txt");
|
|
|
|
if !urls.windows(2).all(|w| w[0] < w[1]) {
|
|
println!("Sorted feeds.txt:");
|
|
|
|
urls.sort();
|
|
|
|
for url in urls {
|
|
println!("{}", url);
|
|
}
|
|
panic!("feeds.txt was not sorted!")
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_if_feeds_lists_have_overlapping_feed() {
|
|
let set1: HashSet<_> = read_feed("feeds.txt").into_iter().collect();
|
|
let set2: HashSet<_> = read_feed("featured_feeds.txt").into_iter().collect();
|
|
assert!(set1.is_disjoint(&set2));
|
|
}
|
|
|
|
#[test]
|
|
fn test_if_duplicates_in_feeds_list() {
|
|
let mut urls = read_feed("feeds.txt");
|
|
|
|
urls.sort();
|
|
|
|
let duplicates = urls
|
|
.into_iter()
|
|
.fold(HashMap::new(), |mut map, x| {
|
|
*map.entry(x).or_insert(0) += 1;
|
|
map
|
|
})
|
|
.into_iter()
|
|
.filter(|(_, count)| *count > 1)
|
|
.map(|(item, _)| item)
|
|
.collect::<Vec<String>>();
|
|
|
|
if !duplicates.is_empty() {
|
|
for dupe in duplicates {
|
|
println!("{}", dupe)
|
|
}
|
|
panic!("Duplicate entries found!")
|
|
}
|
|
}
|