1
0
mirror of https://gitlab.com/Anson-Projects/zine.git synced 2025-07-27 00:31:23 +00:00

Added proper logging

This commit is contained in:
2024-03-01 01:09:20 -07:00
parent 8b30cc58b9
commit 59b2fdef91
6 changed files with 113 additions and 16 deletions

View File

@@ -22,7 +22,7 @@ fn create_featured_card(entry: &Entry) -> Markup {
let lang = link.clone().href_lang.unwrap_or("en".to_string());
if lang != "en" {
println!("Non english! {} {}", lang, link.href);
log::warn!("Non english! {} {}", lang, link.href);
}
let mut image_url = entry
@@ -34,7 +34,7 @@ fn create_featured_card(entry: &Entry) -> Markup {
// Fallback to fetching social image if direct extraction didn't work
if image_url.is_empty() {
println!(
log::info!(
"Falling back to searching for a social image for {}",
link.href
);
@@ -60,7 +60,6 @@ fn create_featured_card(entry: &Entry) -> Markup {
let truncated_description = utilities::truncate_description(&cleaned_description, 500);
let main_url = utilities::get_root_url(link.href.as_str());
dbg!(main_url);
html! {
article {
@@ -93,7 +92,7 @@ fn create_post_card(entry: &Entry) -> Markup {
let lang = link.clone().href_lang.unwrap_or("en".to_string());
if lang != "en" {
println!("Non english! {} {}", lang, link.href);
log::warn!("Non english! {} {}", lang, link.href);
}
let mut image_url = entry
@@ -105,12 +104,15 @@ fn create_post_card(entry: &Entry) -> Markup {
// Fallback to fetching social image if direct extraction didn't work
if image_url.is_empty() {
println!(
log::info!(
"Falling back to searching for a social image for {}",
link.href
);
image_url = web_fetchers::fetch_social_image(link.href.as_str()).unwrap_or_default();
}
if image_url.is_empty() {
log::warn!("No image could be gathered for {}", link.href);
}
let description = entry.content.as_ref().map_or_else(
|| {
@@ -249,10 +251,10 @@ pub fn generate_index(mut entries: Vec<Entry>) -> Markup {
let featured = entries.remove(0);
if running_in_gitlab {
println!("Building for deployment");
log::info!("Building for deployment");
entries.truncate(30);
} else {
println!("Building for development");
log::info!("Building for development");
entries.truncate(6);
}

View File

@@ -14,13 +14,15 @@ mod utilities;
mod web_fetchers;
fn main() -> Result<(), Box<dyn Error>> {
simple_logger::init_with_level(log::Level::Info).unwrap();
let featured = read_feed("featured_feeds.txt").first().unwrap().clone();
let mut entries = read_feed("feeds.txt");
// Move featured article to the front
entries.retain(|entry| entry != &featured);
entries.insert(0, featured);
println!(
log::info!(
"Featured article: {}",
entries[0].links.first().unwrap().href.as_str()
);
@@ -34,8 +36,8 @@ fn main() -> Result<(), Box<dyn Error>> {
.unwrap();
match write(output_path, index.into_string()) {
Ok(_) => println!("Successfully wrote to {}", output_path.display()),
Err(e) => eprintln!("Failed to write to {}: {}", output_path.display(), e),
Ok(_) => log::info!("Successfully wrote to {}", output_path.display()),
Err(e) => log::error!("Failed to write to {}: {}", output_path.display(), e),
}
Ok(())

View File

@@ -9,7 +9,7 @@ pub fn read_feed(path: &str) -> Vec<Entry> {
let binding = fs::read_to_string(path).unwrap();
let feed_urls: Vec<&str> = binding.lines().collect();
println!("Fetching feeds:");
log::trace!("Fetching feeds:");
let raw_entries: Vec<Result<Vec<Entry>, String>> = feed_urls
.into_par_iter()
.map(|url| {
@@ -22,7 +22,7 @@ pub fn read_feed(path: &str) -> Vec<Entry> {
for entry in raw_entries {
match entry {
Ok(mut feed_entries) => entries.append(&mut feed_entries),
Err(e) => println!("{}", e),
Err(e) => log::warn!("{}", e),
}
}

View File

@@ -9,7 +9,11 @@ use std::error::Error;
pub fn fetch_feed(url: &str) -> Result<Vec<Entry>, Box<dyn Error>> {
let content = get(url)?.text()?;
let feed = parser::parse(content.as_bytes())?;
println!("\tFeed {} returned {} items", url, feed.entries.len());
if feed.entries.is_empty() {
log::warn!("Feed {url} returned no items!");
} else {
log::info!("Feed {} returned {} items", url, feed.entries.len());
}
Ok(feed.entries)
}