diff --git a/src/utilities.rs b/src/utilities.rs index dba9796..44f6aed 100644 --- a/src/utilities.rs +++ b/src/utilities.rs @@ -116,7 +116,7 @@ pub fn read_feed(path: &str) -> Vec { .filter_map(|url| match web_fetchers::fetch_feed(url) { Ok(entries) => Some(entries), Err(e) => { - println!("Failed to fetch or parse feed {}: {}", url, e); + println!("Failed to fetch or parse feed {url}: {e}"); None } }) @@ -131,7 +131,7 @@ pub fn read_feed(path: &str) -> Vec { .par_iter() .map(|entry| { Post::from_entry(entry).map_err(|e| { - log::warn!("Failed to process entry: {}", e); + log::warn!("Failed to process entry: {e}"); e }) }) diff --git a/tests/feed_validation.rs b/tests/feed_validation.rs index f807e2f..aca9cea 100644 --- a/tests/feed_validation.rs +++ b/tests/feed_validation.rs @@ -18,7 +18,7 @@ fn test_if_feeds_are_in_alphabetical_order() { urls.sort(); for url in urls { - println!("{}", url); + println!("{url}"); } panic!("feeds.txt was not sorted!") } @@ -50,7 +50,7 @@ fn test_if_duplicates_in_feeds_list() { if !duplicates.is_empty() { for dupe in duplicates { - println!("{}", dupe) + println!("{dupe}") } panic!("Duplicate entries found!") }