1
0
mirror of https://gitlab.com/Anson-Projects/zine.git synced 2025-07-27 08:41:25 +00:00

Add Navigation and Embrace pico.css

This commit is contained in:
2024-02-16 06:16:09 +00:00
parent a6f828e8ea
commit ef11297c25
7 changed files with 383 additions and 182 deletions

View File

@@ -14,6 +14,8 @@ use std::fs::write;
use std::fs::DirBuilder;
use std::path::Path;
use rayon::prelude::*;
fn fetch_feed(url: &str) -> Result<Vec<Entry>, Box<dyn Error>> {
let content = get(url)?.text()?;
let feed = parser::parse(content.as_bytes())?;
@@ -54,21 +56,42 @@ fn create_html_card(entry: &Entry) -> Markup {
);
let cleaned_description = strip_html_tags(&description);
let truncated_description = truncate_description(&cleaned_description, 500); // Truncate description to 100 characters
let truncated_description = truncate_description(&cleaned_description, 500);
let main_url = get_root_url(link.href.as_str());
html! {
a.card-link href=(link.href) target=("_blank") {
div.card {
h2 { (title) }
article {
header {
hgroup {
h2 { (title) }
a href=(format!("http://{}", main_url)) { (main_url) }
}
}
body {
@if !image_src.is_empty() {
img src=(image_src) alt="Entry image";
}
p { (truncated_description) }
}
footer {
a class="grid" href=(link.href) style="--pico-text-decoration: none;" {
button class="outline secondary" { "Read Post" }
}
}
}
}
}
fn get_root_url(input_url: &str) -> &str {
let mut url = input_url;
url = url.strip_prefix("https://").unwrap_or(url);
url = url.strip_prefix("http://").unwrap_or(url);
url.split_once('/').unwrap().0
}
fn truncate_description(description: &str, max_length: usize) -> String {
let description_trimmed = description.trim();
if description_trimmed.len() > max_length {
@@ -100,23 +123,116 @@ fn strip_html_tags(html: &str) -> String {
text_content.trim().to_string()
}
fn generate_html(entries: Vec<Entry>) -> Markup {
fn generate_header() -> Markup {
html! {
header {
nav {
ul {
li { h1 { "Anson's Aggregated Feed" }}
}
ul {
li { button data-target="about" onclick="toggleModal(event)" { "About" } }
li {
details class="dropdown" {
summary role="button" class="outline secondary" { "Theme" }
ul {
li { a href="#" data-theme-switcher="auto" { "Auto" }}
li { a href="#" data-theme-switcher="light" { "Light" }}
li { a href="#" data-theme-switcher="dark" { "Dark" }}
}
}
}
}
}
}
}
}
fn about_modal(entries: Vec<Entry>) -> Markup {
// Get link for each entry, which is a blog post then,
// convert it to a url to the main page of the blog
let mut links = entries
.iter()
.map(|entry| entry.links.first().unwrap().href.as_str())
.map(get_root_url)
.collect::<std::collections::HashSet<&str>>()
.into_iter()
.collect::<Vec<&str>>();
// Alphabetical to be fair to everytone :)
links.sort();
html! {
dialog id="about" {
article {
header {
a href="#" aria-label="Close" rel="prev" {}
p { strong { "About" }}
}
p {
"When looking for a RSS reader I came across "
a href="https://news.russellsaw.io/" {"news.russellsaw.io"}
" I thought the idea of building my own personalised newspaper was cool. \
So, I decided to build a clone using my own subscribed RSS feeds."
}
p {
"This page updates daily at 8:11ish AM Mountain Time. The following blogs are"
" in the subscription list:"
}
ul {
@for link in links {
li {a href=(link) {(link)}}
}
}
}
}
}
}
fn generate_footer() -> Markup {
html! {
footer class="container" {
small {
p {
a href="https://ansonbiggs.com" { "Anson Biggs" }
" - 2024 - "
a href="gitlab.com" { "Source Code" }
}
}
}
}
}
fn generate_index(entries: Vec<Entry>) -> Markup {
let num_columns = 3;
let chunk_size = (entries.len() as f32 / num_columns as f32).ceil() as usize;
html! {
(maud::DOCTYPE)
html {
head {
title { "Anson's Zine" }
link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@picocss/pico@1/css/pico.min.css";
meta charset="utf-8";
meta name="viewport" content="width=device-width, initial-scale=1";
link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@picocss/pico@2/css/pico.blue.min.css";
link rel="stylesheet" href="style.css";
}
body {
h1 { "Anson's Aggregated Feed" }
div class="cards-container" {
@for entry in entries {
{(create_html_card(&entry))}
body { main class="container" {
{(generate_header())}
div class="grid" {
@for column_entries in entries.chunks(chunk_size) {
div {
@for entry in column_entries {
{(create_html_card(&entry))}
}
}
}
}
}
{(generate_footer())}
{(about_modal(entries))}
script src="modal.js" {}
script src="minimal-theme-switcher.js" {}
}}
}
}
}
@@ -125,21 +241,27 @@ fn main() -> Result<(), Box<dyn Error>> {
let binding = fs::read_to_string("feeds.txt").unwrap();
let feed_urls: Vec<&str> = binding.lines().collect();
let mut entries: Vec<Entry> = Vec::new();
let raw_entries: Vec<Result<Vec<Entry>, String>> = feed_urls
.into_par_iter()
.map(|url| {
fetch_feed(url).map_err(|e| format!("Failed to fetch or parse feed {}: {}", url, e))
})
.collect();
for url in feed_urls {
match fetch_feed(url) {
// Flatten the entries and filter out the errors
let mut entries: Vec<Entry> = Vec::new();
for entry in raw_entries {
match entry {
Ok(mut feed_entries) => entries.append(&mut feed_entries),
Err(e) => println!("Failed to fetch or parse feed {}: {}", url, e),
Err(e) => println!("{}", e),
}
}
// Remove any entries that don't have a timestamp, and then sort by timestamps
entries.retain(|entry| entry.published.is_some() || entry.updated.is_some());
entries
.sort_by_key(|entry| Reverse(entry.published.unwrap_or(entry.updated.unwrap_or_default())));
let html_string = generate_html(entries).into_string();
let html_string = generate_index(entries).into_string();
let output_path = Path::new("output/index.html");
DirBuilder::new()