1
0
mirror of https://gitlab.com/Anson-Projects/anson-stuff/zinetest.git synced 2025-06-15 13:36:39 +00:00

init commit

This commit is contained in:
Anson Biggs 2024-02-10 01:40:54 -07:00
commit 09b13b9b34
7 changed files with 1530 additions and 0 deletions

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
/target
/output

40
.gitlab-ci.yml Normal file
View File

@ -0,0 +1,40 @@
stages:
- test
- deploy
cache:
key: ${CI_COMMIT_REF_SLUG}
paths:
- target/
- cargo/
before_script:
- rustup default stable
- cargo --version
- rustc --version
test:
stage: test
script:
- cargo test --verbose
build_site:
stage: build_site
script:
- cargo run
- mv output public
artifacts:
paths:
- public
pages:
stage: deploy
script:
- echo "Publishing site..."
dependencies:
- build_site
artifacts:
paths:
- public
rules:
- if: "$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH"

1341
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

12
Cargo.toml Normal file
View File

@ -0,0 +1,12 @@
[package]
name = "zine"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
feed-rs = "1.4.0"
reqwest = { version = "0.11.24", features = ["blocking"] }
maud = "0.26.0"
chrono = "0.4.33"

5
feeds.txt Normal file
View File

@ -0,0 +1,5 @@
https://bcantrill.dtrace.org/feed/
https://ciechanow.ski/atom.xml
https://factorio.com/blog/rss
https://notes.ansonbiggs.com/rss/
https://orbitalindex.com/feed.xml

88
src/main.rs Normal file
View File

@ -0,0 +1,88 @@
extern crate feed_rs;
extern crate maud;
extern crate reqwest;
use feed_rs::model::Entry;
use feed_rs::parser;
use maud::{html, Markup};
use reqwest::blocking::get;
use std::cmp::Reverse;
use std::error::Error;
use std::fs;
use std::fs::write;
use std::fs::DirBuilder;
use std::path::Path;
fn fetch_feed(url: &str) -> Result<Vec<Entry>, Box<dyn Error>> {
let content = get(url)?.text()?;
let feed = parser::parse(content.as_bytes())?;
Ok(feed.entries)
}
fn generate_html(entries: Vec<Entry>) -> Markup {
html! {
(maud::DOCTYPE)
html {
head {
title { "Anson Zine" }
link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@picocss/pico@1/css/pico.min.css";
}
body {
h1 { "Aggregated Feed" }
ul {
@for entry in entries {
li {
@if let Some(link) = entry.links.first() {
a href=(link.href) {
@if let Some(title) = entry.title {
(title.content)
}
}
}
p {
({
entry.published.unwrap_or(entry.updated.unwrap_or_default())
})
}
}
}
}
}
}
}
}
fn main() -> Result<(), Box<dyn Error>> {
let binding = fs::read_to_string("feeds.txt").unwrap();
let feed_urls: Vec<&str> = binding.lines().collect();
let mut entries: Vec<Entry> = Vec::new();
for url in feed_urls {
match fetch_feed(url) {
Ok(mut feed_entries) => entries.append(&mut feed_entries),
Err(e) => println!("Failed to fetch or parse feed {}: {}", url, e),
}
}
// Remove any entries that don't have a timestamp, and then sort by timestamps
entries.retain(|entry| entry.published.is_some() || entry.updated.is_some());
entries
.sort_by_key(|entry| Reverse(entry.published.unwrap_or(entry.updated.unwrap_or_default())));
let html_string = generate_html(entries).into_string();
let output_path = Path::new("output/index.html");
DirBuilder::new()
.recursive(true)
.create(output_path.parent().unwrap())
.unwrap();
match write(output_path, html_string) {
Ok(_) => println!("Successfully wrote to {}", output_path.display()),
Err(e) => eprintln!("Failed to write to {}: {}", output_path.display(), e),
}
Ok(())
}

42
tests/feed_validation.rs Normal file
View File

@ -0,0 +1,42 @@
use feed_rs::parser;
use reqwest::blocking::get;
use std::fs;
// Function to read URLs from a file
fn read_feed() -> Vec<String> {
let binding = fs::read_to_string("feeds.txt").unwrap();
binding.lines().map(|s| s.to_owned()).collect()
}
// Function to fetch and parse a feed, returning true if successful
fn fetch_and_parse_feed(url: &str) -> bool {
let content = match get(url) {
Ok(response) => response.text().unwrap_or_default(),
Err(_) => return false,
};
parser::parse(content.as_bytes()).is_ok()
}
#[test]
fn test_that_urls_point_to_valid_feeds() {
let urls = read_feed();
for url in urls {
assert!(
fetch_and_parse_feed(&url),
"Feed at URL failed validation: {}",
url
);
}
}
#[test]
fn test_if_feeds_are_in_alphabetical_order() {
let urls = read_feed();
assert!(
urls.windows(2).all(|w| w[0] < w[1]),
"feeds.txt is not sorted alphabetically"
)
}