mirror of
https://gitlab.com/Anson-Projects/zine.git
synced 2025-06-16 13:36:40 +00:00
Make featured post temporary
Also remove factorio since it posts so frequently
This commit is contained in:
parent
9ed094af46
commit
7014f6359d
@ -1,3 +1,7 @@
|
|||||||
# Anson's Aggregated Feed
|
# Anson's Aggregated Feed
|
||||||
|
|
||||||
Anson's Aggregated Feed is a Rust application designed to aggregate content from multiple RSS feeds, creating a personalized news page. It fetches articles from specified RSS feed URLs, generates HTML cards for each entry, and outputs a single, styled HTML page. This project showcases the use of Rust for web content aggregation and manipulation, leveraging several powerful crates for parsing RSS feeds, handling dates and times, making HTTP requests, and rendering HTML.
|
Anson's Aggregated Feed is a Rust application designed to aggregate content from multiple RSS feeds, creating a personalized news page. It fetches articles from specified RSS feed URLs, generates HTML cards for each entry, and outputs a single, styled HTML page. This project showcases the use of Rust for web content aggregation and manipulation, leveraging several powerful crates for parsing RSS feeds, handling dates and times, making HTTP requests, and rendering HTML.
|
||||||
|
|
||||||
|
## Featured Feeds
|
||||||
|
|
||||||
|
These are feeds that are extremely high quality and don't post on a regular schedule. There are blogs (factorio for example) that I think are consistently high quality but I want to save this space for very special posts.
|
@ -1,2 +1 @@
|
|||||||
https://ciechanow.ski/atom.xml
|
https://ciechanow.ski/atom.xml
|
||||||
https://factorio.com/blog/rss
|
|
@ -75,7 +75,7 @@ fn create_featured_card(entry: &Entry) -> Markup {
|
|||||||
}
|
}
|
||||||
footer {
|
footer {
|
||||||
a class="grid" href=(link.href) style="--pico-text-decoration: none;" {
|
a class="grid" href=(link.href) style="--pico-text-decoration: none;" {
|
||||||
button class="outline primary" { "Read Post" }
|
button class="outline primary" { "Read Featured Post" }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -246,10 +246,9 @@ fn about_modal(entries: Vec<Entry>) -> Markup {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn generate_index(mut entries: Vec<Entry>) -> Markup {
|
pub fn generate_index(mut entries: Vec<Entry>, featured: Entry) -> Markup {
|
||||||
let running_in_gitlab = env::var("CI").map(|val| val == "true").unwrap_or(false);
|
let running_in_gitlab = env::var("CI").map(|val| val == "true").unwrap_or(false);
|
||||||
|
|
||||||
let featured = entries.remove(0);
|
|
||||||
if running_in_gitlab {
|
if running_in_gitlab {
|
||||||
log::info!("Building for deployment");
|
log::info!("Building for deployment");
|
||||||
entries.truncate(30);
|
entries.truncate(30);
|
||||||
@ -258,6 +257,14 @@ pub fn generate_index(mut entries: Vec<Entry>) -> Markup {
|
|||||||
entries.truncate(6);
|
entries.truncate(6);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let featured_card: maud::PreEscaped<String>;
|
||||||
|
if (utilities::get_entry_date(&featured)) > (chrono::Utc::now() - chrono::Duration::days(3)) {
|
||||||
|
featured_card = create_featured_card(&featured);
|
||||||
|
entries.retain(|entry| entry != &featured);
|
||||||
|
} else {
|
||||||
|
featured_card = html! {};
|
||||||
|
}
|
||||||
|
|
||||||
html! {
|
html! {
|
||||||
(maud::DOCTYPE)
|
(maud::DOCTYPE)
|
||||||
html {
|
html {
|
||||||
@ -272,7 +279,7 @@ pub fn generate_index(mut entries: Vec<Entry>) -> Markup {
|
|||||||
}
|
}
|
||||||
body { main class="container" {
|
body { main class="container" {
|
||||||
{(generate_header())}
|
{(generate_header())}
|
||||||
(create_featured_card(&featured))
|
(featured_card)
|
||||||
div class="grid" {
|
div class="grid" {
|
||||||
@for column_entries in utilities::group_by_nth(&entries, 3) {
|
@for column_entries in utilities::group_by_nth(&entries, 3) {
|
||||||
div {
|
div {
|
||||||
|
@ -17,17 +17,14 @@ fn main() -> Result<(), Box<dyn Error>> {
|
|||||||
simple_logger::init_with_level(log::Level::Info).unwrap();
|
simple_logger::init_with_level(log::Level::Info).unwrap();
|
||||||
|
|
||||||
let featured = read_feed("featured_feeds.txt").first().unwrap().clone();
|
let featured = read_feed("featured_feeds.txt").first().unwrap().clone();
|
||||||
let mut entries = read_feed("feeds.txt");
|
let entries = read_feed("feeds.txt");
|
||||||
|
|
||||||
// Move featured article to the front
|
|
||||||
entries.retain(|entry| entry != &featured);
|
|
||||||
entries.insert(0, featured);
|
|
||||||
log::info!(
|
log::info!(
|
||||||
"Featured article: {}",
|
"Featured article: {}",
|
||||||
entries[0].links.first().unwrap().href.as_str()
|
entries[0].links.first().unwrap().href.as_str()
|
||||||
);
|
);
|
||||||
|
|
||||||
let index = index_generator::generate_index(entries);
|
let index = index_generator::generate_index(entries, featured);
|
||||||
|
|
||||||
let output_path = Path::new("output/index.html");
|
let output_path = Path::new("output/index.html");
|
||||||
DirBuilder::new()
|
DirBuilder::new()
|
||||||
|
@ -28,12 +28,15 @@ pub fn read_feed(path: &str) -> Vec<Entry> {
|
|||||||
|
|
||||||
// Remove any entries that don't have a timestamp, and then sort by timestamps
|
// Remove any entries that don't have a timestamp, and then sort by timestamps
|
||||||
entries.retain(|entry| entry.published.is_some() || entry.updated.is_some());
|
entries.retain(|entry| entry.published.is_some() || entry.updated.is_some());
|
||||||
entries
|
entries.sort_by_key(|entry| Reverse(get_entry_date(entry)));
|
||||||
.sort_by_key(|entry| Reverse(entry.published.unwrap_or(entry.updated.unwrap_or_default())));
|
|
||||||
|
|
||||||
entries
|
entries
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_entry_date(entry: &Entry) -> chrono::DateTime<chrono::Utc> {
|
||||||
|
entry.published.unwrap_or(entry.updated.unwrap_or_default())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_root_url(input_url: &str) -> &str {
|
pub fn get_root_url(input_url: &str) -> &str {
|
||||||
let mut url = input_url;
|
let mut url = input_url;
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user