1
0
mirror of https://gitlab.com/Anson-Projects/projects.git synced 2025-09-19 03:52:37 +00:00

6 Commits

Author SHA1 Message Date
6aeb0ea8eb feat(ghost-upload): add update support, manual CI job, and dependency updates 2025-08-26 11:07:24 -06:00
51c03d9213 Merge branch 'modernize' into 'master'
FIx Dev Container

See merge request Anson-Projects/projects!9
2025-05-14 11:02:44 -07:00
609d4064a9 FIx Dev Container 2025-05-14 11:02:43 -07:00
388adf4a02 Fix date in Double Pendulum post 2025-05-11 23:37:08 +00:00
590f8cb106 Merge branch 'pendulum' into 'master'
Double Pendulum

See merge request Anson-Projects/projects!8
2025-05-11 14:27:15 -07:00
10083ec81c Double Pendulum 2025-05-11 14:27:15 -07:00
22 changed files with 1312 additions and 3193 deletions

2
.gitignore vendored
View File

@@ -1,4 +1,3 @@
_freeze/
_site/
public/
ghost-upload/target/
@@ -7,3 +6,4 @@ posts/*/\.jupyter_cache/
!/.quarto/_freeze/
!/.quarto/_freeze/*
/.quarto/
**/.DS_Store

View File

@@ -1,19 +1,16 @@
build:
stage: build
image:
name: gcr.io/kaniko-project/executor:v1.21.0-debug
name: gcr.io/kaniko-project/executor:v1.23.2-debug
entrypoint: [""]
script:
- /kaniko/executor
--context "${CI_PROJECT_DIR}"
--dockerfile "${CI_PROJECT_DIR}/Dockerfile"
--destination "${CI_REGISTRY_IMAGE}:${CI_COMMIT_BRANCH}"
--destination "${CI_REGISTRY_IMAGE}:latest"
--cleanup
staging:
cache:
paths:
- _freeze
stage: deploy
image: ${CI_REGISTRY_IMAGE}:${CI_COMMIT_BRANCH}
script:

File diff suppressed because one or more lines are too long

View File

@@ -2,7 +2,8 @@ const kProgressiveAttr = "data-src";
let categoriesLoaded = false;
window.quartoListingCategory = (category) => {
category = atob(category);
// category is URI encoded in EJS template for UTF-8 support
category = decodeURIComponent(atob(category));
if (categoriesLoaded) {
activateCategory(category);
setCategoryHash(category);

View File

@@ -1,11 +1,12 @@
FROM ubuntu:22.04
FROM debian:bookworm
ARG DEBIAN_FRONTEND=noninteractive
ENV JULIA_VERSION=1.11.1 \
ENV JULIA_VERSION=1.11.5 \
JULIA_MAJOR_VERSION=1.11 \
JULIA_PATH=/usr/local/julia \
QUARTO_VERSION=1.6.37
JULIA_PATH=/usr/local/julia
ENV QUARTO_VERSION=1.7.31
RUN apt-get update && apt-get install -y --no-install-recommends \
apt-utils dialog \
@@ -13,19 +14,19 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
python3 python3-pip python3-dev \
r-base \
gcc g++ \
wget curl tar \
curl tar \
openssh-client \
&& rm -rf /var/lib/apt/lists/*
# Use a RUN command for architecture detection and conditional logic
RUN wget https://github.com/quarto-dev/quarto-cli/releases/download/v${QUARTO_VERSION}/quarto-${QUARTO_VERSION}-linux-$(if [ "$(uname -m)" = "x86_64" ]; then echo "amd64"; else echo "arm64"; fi).tar.gz -O quarto.tar.gz \
RUN curl -fsSL "https://github.com/quarto-dev/quarto-cli/releases/download/v${QUARTO_VERSION}/quarto-${QUARTO_VERSION}-linux-$(if [ "$(uname -m)" = "x86_64" ]; then echo "amd64"; else echo "arm64"; fi).tar.gz" -o quarto.tar.gz \
&& tar -xzf quarto.tar.gz -C /opt \
&& mkdir -p /opt/quarto \
&& mv /opt/quarto-${QUARTO_VERSION}/* /opt/quarto/ \
&& ln -s /opt/quarto/bin/quarto /usr/local/bin/quarto \
&& rm -rf quarto.tar.gz /opt/quarto-${QUARTO_VERSION}
RUN python3 -m pip install jupyter webio_jupyter_extension jupyter-cache
RUN python3 -m pip install --break-system-packages jupyter webio_jupyter_extension jupyter-cache
RUN curl -fsSL "https://julialang-s3.julialang.org/bin/linux/$(if [ "$(uname -m)" = "x86_64" ]; then echo "x64"; else echo "aarch64"; fi)/${JULIA_MAJOR_VERSION}/julia-${JULIA_VERSION}-linux-$(if [ "$(uname -m)" = "x86_64" ]; then echo "x86_64"; else echo "aarch64"; fi).tar.gz" -o julia.tar.gz \
&& tar -xzf julia.tar.gz -C /tmp \

File diff suppressed because it is too large Load Diff

View File

@@ -1,20 +1,9 @@
[deps]
CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b"
Colors = "5ae59095-9a9b-59fe-a467-6f913c188581"
Conda = "8f4d0f93-b110-5947-807f-2305c1781a2d"
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
DifferentialEquations = "0c46a032-eb83-5123-abaf-570d42b7fbaa"
GR_jll = "d2c73de3-f751-5644-a686-071e5b155ba9"
IJulia = "7073ff75-c697-5162-941a-fcdaad2a7d2a"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
Measurements = "eff96d63-e80a-5855-80a2-b1b0885c5ab7"
Optim = "429524aa-4258-5aef-a3af-852621145aeb"
PlotlyJS = "f0f68f2c-4968-5e81-91da-67840de0976a"
Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80"
Pluto = "c3e4b0f8-55cb-11ea-2926-15256bba5781"
Revise = "295af30f-e4ad-537b-8983-00126c2a3abe"
SatelliteToolbox = "6ac157d9-b43d-51bb-8fab-48bf53814f4a"
Unitful = "1986cc42-f94f-5a68-af5c-568840ba703d"
[compat]
julia = "1.11"

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,253 @@
const kProgressiveAttr = "data-src";
let categoriesLoaded = false;
window.quartoListingCategory = (category) => {
category = atob(category);
if (categoriesLoaded) {
activateCategory(category);
setCategoryHash(category);
}
};
window["quarto-listing-loaded"] = () => {
// Process any existing hash
const hash = getHash();
if (hash) {
// If there is a category, switch to that
if (hash.category) {
// category hash are URI encoded so we need to decode it before processing
// so that we can match it with the category element processed in JS
activateCategory(decodeURIComponent(hash.category));
}
// Paginate a specific listing
const listingIds = Object.keys(window["quarto-listings"]);
for (const listingId of listingIds) {
const page = hash[getListingPageKey(listingId)];
if (page) {
showPage(listingId, page);
}
}
}
const listingIds = Object.keys(window["quarto-listings"]);
for (const listingId of listingIds) {
// The actual list
const list = window["quarto-listings"][listingId];
// Update the handlers for pagination events
refreshPaginationHandlers(listingId);
// Render any visible items that need it
renderVisibleProgressiveImages(list);
// Whenever the list is updated, we also need to
// attach handlers to the new pagination elements
// and refresh any newly visible items.
list.on("updated", function () {
renderVisibleProgressiveImages(list);
setTimeout(() => refreshPaginationHandlers(listingId));
// Show or hide the no matching message
toggleNoMatchingMessage(list);
});
}
};
window.document.addEventListener("DOMContentLoaded", function (_event) {
// Attach click handlers to categories
const categoryEls = window.document.querySelectorAll(
".quarto-listing-category .category"
);
for (const categoryEl of categoryEls) {
// category needs to support non ASCII characters
const category = decodeURIComponent(
atob(categoryEl.getAttribute("data-category"))
);
categoryEl.onclick = () => {
activateCategory(category);
setCategoryHash(category);
};
}
// Attach a click handler to the category title
// (there should be only one, but since it is a class name, handle N)
const categoryTitleEls = window.document.querySelectorAll(
".quarto-listing-category-title"
);
for (const categoryTitleEl of categoryTitleEls) {
categoryTitleEl.onclick = () => {
activateCategory("");
setCategoryHash("");
};
}
categoriesLoaded = true;
});
function toggleNoMatchingMessage(list) {
const selector = `#${list.listContainer.id} .listing-no-matching`;
const noMatchingEl = window.document.querySelector(selector);
if (noMatchingEl) {
if (list.visibleItems.length === 0) {
noMatchingEl.classList.remove("d-none");
} else {
if (!noMatchingEl.classList.contains("d-none")) {
noMatchingEl.classList.add("d-none");
}
}
}
}
function setCategoryHash(category) {
setHash({ category });
}
function setPageHash(listingId, page) {
const currentHash = getHash() || {};
currentHash[getListingPageKey(listingId)] = page;
setHash(currentHash);
}
function getListingPageKey(listingId) {
return `${listingId}-page`;
}
function refreshPaginationHandlers(listingId) {
const listingEl = window.document.getElementById(listingId);
const paginationEls = listingEl.querySelectorAll(
".pagination li.page-item:not(.disabled) .page.page-link"
);
for (const paginationEl of paginationEls) {
paginationEl.onclick = (sender) => {
setPageHash(listingId, sender.target.getAttribute("data-i"));
showPage(listingId, sender.target.getAttribute("data-i"));
return false;
};
}
}
function renderVisibleProgressiveImages(list) {
// Run through the visible items and render any progressive images
for (const item of list.visibleItems) {
const itemEl = item.elm;
if (itemEl) {
const progressiveImgs = itemEl.querySelectorAll(
`img[${kProgressiveAttr}]`
);
for (const progressiveImg of progressiveImgs) {
const srcValue = progressiveImg.getAttribute(kProgressiveAttr);
if (srcValue) {
progressiveImg.setAttribute("src", srcValue);
}
progressiveImg.removeAttribute(kProgressiveAttr);
}
}
}
}
function getHash() {
// Hashes are of the form
// #name:value|name1:value1|name2:value2
const currentUrl = new URL(window.location);
const hashRaw = currentUrl.hash ? currentUrl.hash.slice(1) : undefined;
return parseHash(hashRaw);
}
const kAnd = "&";
const kEquals = "=";
function parseHash(hash) {
if (!hash) {
return undefined;
}
const hasValuesStrs = hash.split(kAnd);
const hashValues = hasValuesStrs
.map((hashValueStr) => {
const vals = hashValueStr.split(kEquals);
if (vals.length === 2) {
return { name: vals[0], value: vals[1] };
} else {
return undefined;
}
})
.filter((value) => {
return value !== undefined;
});
const hashObj = {};
hashValues.forEach((hashValue) => {
hashObj[hashValue.name] = decodeURIComponent(hashValue.value);
});
return hashObj;
}
function makeHash(obj) {
return Object.keys(obj)
.map((key) => {
return `${key}${kEquals}${obj[key]}`;
})
.join(kAnd);
}
function setHash(obj) {
const hash = makeHash(obj);
window.history.pushState(null, null, `#${hash}`);
}
function showPage(listingId, page) {
const list = window["quarto-listings"][listingId];
if (list) {
list.show((page - 1) * list.page + 1, list.page);
}
}
function activateCategory(category) {
// Deactivate existing categories
const activeEls = window.document.querySelectorAll(
".quarto-listing-category .category.active"
);
for (const activeEl of activeEls) {
activeEl.classList.remove("active");
}
// Activate this category
const categoryEl = window.document.querySelector(
`.quarto-listing-category .category[data-category='${btoa(
encodeURIComponent(category)
)}']`
);
if (categoryEl) {
categoryEl.classList.add("active");
}
// Filter the listings to this category
filterListingCategory(category);
}
function filterListingCategory(category) {
const listingIds = Object.keys(window["quarto-listings"]);
for (const listingId of listingIds) {
const list = window["quarto-listings"][listingId];
if (list) {
if (category === "") {
// resets the filter
list.filter();
} else {
// filter to this category
list.filter(function (item) {
const itemValues = item.values();
if (itemValues.categories !== null) {
const categories = decodeURIComponent(
atob(itemValues.categories)
).split(",");
return categories.includes(category);
} else {
return false;
}
});
}
}
}
}

View File

@@ -13,3 +13,17 @@ publish:
- pages
rules:
- if: "$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH"
publish_update:
stage: deploy
image: rust:latest
variables:
UPDATE_EXISTING: "true"
script:
- cd ./ghost-upload
- cargo run
needs:
- pages
rules:
- if: "$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH"
when: manual

1074
ghost-upload/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,3 +1,17 @@
# ghost-upload
This code uploads posts from https://projects.ansonbiggs.com to https://notes.ansonbiggs.com. I couldn't figure out how to update posts, and the kagi API doesn't make it clear how long it caches results for so for now only posts that don't exist on the ghost blog will be uploaded. If you want to update content you need to manually make edits to the code and delete posts on the blog.
This tool uploads posts from https://projects.ansonbiggs.com to https://notes.ansonbiggs.com.
What's new:
- Uses the Ghost Admin API to check for existing posts by slug instead of probing the public site.
- Optional update support: set `UPDATE_EXISTING=true` to update an existing post in-place (via `PUT /ghost/api/v3/admin/posts/{id}?source=html`).
- Safer slug handling (trims trailing `/` and falls back to the last path segment).
Env vars:
- `admin_api_key`: Ghost Admin API key in `key_id:secret` format.
- `kagi_api_key`: Kagi Summarizer API key.
- `UPDATE_EXISTING` (optional): if `true`/`1`, update posts that already exist in Ghost.
Notes:
- Updates use optimistic concurrency by sending the current `updated_at` from Ghost. If someone edits a post in Ghost after we fetch it, the update will fail with a 409 and be reported in the console.
- Summaries are always regenerated when creating or updating; if you want to avoid re-summarizing on updates, leave `UPDATE_EXISTING` unset.

View File

@@ -1,6 +1,5 @@
use feed_rs::model::Entry;
use feed_rs::parser;
use futures::future::join_all;
use jsonwebtoken::{encode, Algorithm, EncodingKey, Header};
use maud::html;
use reqwest::Client;
@@ -20,6 +19,29 @@ struct PostPayload {
posts: Vec<Post>,
}
#[derive(Serialize, Debug, Clone)]
struct UpdatePost {
id: String,
title: String,
slug: String,
html: String,
status: String,
published_at: String,
updated_at: String,
canonical_url: String,
tags: Vec<String>,
feature_image: Option<String>,
feature_image_alt: Option<String>,
feature_image_caption: Option<String>,
meta_description: Option<String>,
custom_excerpt: Option<String>,
}
#[derive(Serialize, Debug)]
struct UpdatePayload {
posts: Vec<UpdatePost>,
}
#[derive(Serialize, Debug, Clone)]
struct Post {
title: String,
@@ -121,24 +143,54 @@ impl Post {
meta_description,
custom_excerpt,
};
dbg!(&x);
x
}
}
fn get_slug(link: &str) -> String {
link.split_once("/posts/").unwrap().1.to_string()
// Prefer portion after "/posts/" if present, otherwise fall back to the last path segment
let raw = match link.split_once("/posts/") {
Some((_, rest)) => rest,
None => link.rsplit('/').next().unwrap_or(link),
};
raw.trim_end_matches('/')
.to_string()
}
async fn check_if_post_exists(entry: &Entry) -> bool {
let posts_url = "https://notes.ansonbiggs.com/";
let link = entry.links.first().unwrap().href.as_str();
let slug = get_slug(link);
match reqwest::get(format!("{}{}", posts_url, slug)).await {
Ok(response) => response.status().is_success(),
Err(_) => false,
#[derive(Deserialize, Debug)]
struct GhostPostSummary {
id: String,
slug: String,
updated_at: String,
}
#[derive(Deserialize, Debug)]
struct GhostPostsResponse<T> {
posts: Vec<T>,
}
async fn get_existing_post_by_slug(
client: &Client,
ghost_admin_base: &str,
token: &str,
slug: &str,
) -> Option<GhostPostSummary> {
// Use Ghost Admin API to search by slug
let url = format!(
"{}/posts/?filter=slug:{}&fields=id,slug,updated_at",
ghost_admin_base, slug
);
let resp = client
.get(url)
.header("Authorization", format!("Ghost {}", token))
.send()
.await
.ok()?;
if !resp.status().is_success() {
return None;
}
let json = resp.json::<GhostPostsResponse<GhostPostSummary>>().await.ok()?;
json.posts.into_iter().next()
}
async fn fetch_feed(url: &str) -> Vec<Entry> {
@@ -205,7 +257,8 @@ async fn summarize_url(url: &str) -> String {
}
#[tokio::main]
async fn main() {
let ghost_api_url = "https://notes.ansonbiggs.com/ghost/api/v3/admin/posts/?source=html";
let ghost_admin_base = "https://notes.ansonbiggs.com/ghost/api/v3/admin";
let ghost_posts_create_url = format!("{}/posts/?source=html", ghost_admin_base);
let ghost_admin_api_key = env::var("admin_api_key").unwrap();
let feed = "https://projects.ansonbiggs.com/index.xml";
@@ -238,51 +291,87 @@ async fn main() {
)
.expect("JWT encoding failed");
let client = Client::new();
// Prepare the post data
let entries = fetch_feed(feed).await;
let post_exists_futures = entries.into_iter().map(|entry| {
let entry_clone = entry.clone();
async move { (entry_clone, check_if_post_exists(&entry).await) }
});
// Control whether to update existing posts via env var
let update_existing = env::var("UPDATE_EXISTING").map(|v| v == "1" || v.eq_ignore_ascii_case("true")).unwrap_or(false);
let post_exists_results = join_all(post_exists_futures).await;
for entry in entries {
let link = entry.links.first().unwrap().href.as_str();
let slug = get_slug(link);
let filtered_entries: Vec<Entry> = post_exists_results
.into_iter()
.filter_map(|(entry, exists)| if !exists { Some(entry) } else { None })
.collect();
let existing = get_existing_post_by_slug(&client, ghost_admin_base, &token, &slug).await;
if filtered_entries.is_empty() {
println!("Nothing to post.");
return;
}
let post_futures = filtered_entries.into_iter().map(Post::new);
let client = Client::new();
for post in join_all(post_futures).await {
let post_payload = PostPayload {
posts: vec![post.clone()],
};
match existing {
None => {
// Create new post
let post = Post::new(entry.clone()).await;
let post_payload = PostPayload { posts: vec![post.clone()] };
let response = client
.post(ghost_api_url)
.post(&ghost_posts_create_url)
.header("Authorization", format!("Ghost {}", token))
.json(&post_payload)
.send()
.await
.expect("Request failed");
// Check the response
if response.status().is_success() {
println!("Post {} published successfully.", post.title);
} else {
println!(
"Failed to publish post {}.\n\tResp: {:?}",
&post.title, response
"Failed to publish post {}.\n\tStatus: {}",
&post.title,
response.status()
);
}
}
Some(summary) => {
if !update_existing {
println!("Post '{}' exists (slug: {}), skipping.", entry.title.unwrap().content, slug);
continue;
}
// Update existing post
let post = Post::new(entry.clone()).await;
let update = UpdatePost {
id: summary.id,
title: post.title,
slug: post.slug,
html: post.html,
status: post.status,
published_at: post.published_at,
updated_at: summary.updated_at,
canonical_url: post.canonical_url,
tags: post.tags,
feature_image: post.feature_image,
feature_image_alt: post.feature_image_alt,
feature_image_caption: post.feature_image_caption,
meta_description: post.meta_description,
custom_excerpt: post.custom_excerpt,
};
let update_url = format!("{}/posts/{}/?source=html", ghost_admin_base, update.id);
let response = client
.put(update_url)
.header("Authorization", format!("Ghost {}", token))
.json(&UpdatePayload { posts: vec![update] })
.send()
.await
.expect("Update request failed");
if response.status().is_success() {
println!("Post '{}' updated successfully.", entry.title.unwrap().content);
} else {
println!(
"Failed to update post '{}' (status: {}).",
entry.title.unwrap().content,
response.status()
);
}
}
}
}
}

BIN
posts/.DS_Store vendored

Binary file not shown.

View File

@@ -0,0 +1,155 @@
---
title: "Double Pendulum"
description: |
Lets create a double pendulum in Observable JS!
date: 2025-05-09
categories:
- Observable JS
- Code
- Math
draft: false
freeze: true
image: FeistyCompetentGarpike-mobile.mp4
image-alt: "My original Double Pendulum done in Python and Processing.js"
---
Quarto (which this blog is built on) recently added support for [Observable JS](https://observablehq.com/@observablehq/observable-javascript), which lets you make really cool interactive and animated visualizations. I have an odd fixation with finding new tools to visualize data, and while JS is far from the first tool I want to grab I figure I should give OJS a shot. Web browsers have been the best way to distribute and share applications for a long time now so I think its time that I invest some time to learn something better than a plotly diagram or jupyter notebook saved as a pdf to share data.
![My original Double Pendulum done in Python and Processing.js](FeistyCompetentGarpike-mobile.mp4){fig-alt="My original Double Pendulum done in Python and Processing.js"}
Many years ago I hit the front page the [/r/python](https://www.reddit.com/r/Python/comments/ci1cg4/double_pendulum_made_with_processingpy/) with a double pendulum I made after watching the wonderful [Daniel Shiffman](https://thecodingtrain.com/showcase/author/anson-biggs) of the Coding Train. The video was posted on gfycat which is now defunct but the internet archive has saved it: [https://web.archive.org/web/20201108021323/https://gfycat.com/feistycompetentgarpike-daniel-shiffman-double-pendulum-coding-train](https://web.archive.org/web/20201108021323/https://gfycat.com/feistycompetentgarpike-daniel-shiffman-double-pendulum-coding-train)
I originally used Processing's Python bindings to make the animation. So, a lot of the hard work was done (mostly by Daniel), and this animation seems to be a crowd pleaser so I went ahead and ported it over. Keeping the code hidden since its not the focus here, but feel free to expand it and peruse.
```{ojs}
//| echo: false
// Interactive controls
viewof length1 = Inputs.range([50, 300], {step: 10, value: 200, label: "Length of pendulum 1"})
viewof length2 = Inputs.range([50, 300], {step: 10, value: 200, label: "Length of pendulum 2"})
viewof mass1 = Inputs.range([10, 100], {step: 5, value: 40, label: "Mass of pendulum 1"})
viewof mass2 = Inputs.range([10, 100], {step: 5, value: 40, label: "Mass of pendulum 2"})
```
```{ojs}
//| code-fold: true
//| column: page
pendulum = {
const width = 900;
const height = 600;
const canvas = DOM.canvas(width, height);
const ctx = canvas.getContext("2d");
const gravity = .1;
const traceCanvas = DOM.canvas(width, height);
const traceCtx = traceCanvas.getContext("2d");
traceCtx.fillStyle = "white";
traceCtx.fillRect(0, 0, width, height);
const centerX = width / 2;
const centerY = 200;
// State variables
let angle1 = Math.PI / 2;
let angle2 = Math.PI / 2;
let angularVelocity1 = 0;
let angularVelocity2 = 0;
let previousPosition2X = -1;
let previousPosition2Y = -1;
function animate() {
// Physics calculations (same equations as Python)
let numerator1Part1 = -gravity * (2 * mass1 + mass2) * Math.sin(angle1);
let numerator1Part2 = -mass2 * gravity * Math.sin(angle1 - 2 * angle2);
let numerator1Part3 = -2 * Math.sin(angle1 - angle2) * mass2;
let numerator1Part4 = angularVelocity2 * angularVelocity2 * length2 +
angularVelocity1 * angularVelocity1 * length1 * Math.cos(angle1 - angle2);
let denominator1 = length1 * (2 * mass1 + mass2 - mass2 * Math.cos(2 * angle1 - 2 * angle2));
let angularAcceleration1 = (numerator1Part1 + numerator1Part2 + numerator1Part3 * numerator1Part4) / denominator1;
let numerator2Part1 = 2 * Math.sin(angle1 - angle2);
let numerator2Part2 = angularVelocity1 * angularVelocity1 * length1 * (mass1 + mass2);
let numerator2Part3 = gravity * (mass1 + mass2) * Math.cos(angle1);
let numerator2Part4 = angularVelocity2 * angularVelocity2 * length2 * mass2 * Math.cos(angle1 - angle2);
let denominator2 = length2 * (2 * mass1 + mass2 - mass2 * Math.cos(2 * angle1 - 2 * angle2));
let angularAcceleration2 = (numerator2Part1 * (numerator2Part2 + numerator2Part3 + numerator2Part4)) / denominator2;
// Update velocities and angles
angularVelocity1 += angularAcceleration1;
angularVelocity2 += angularAcceleration2;
angle1 += angularVelocity1;
angle2 += angularVelocity2;
// Calculate positions
let position1X = length1 * Math.sin(angle1);
let position1Y = length1 * Math.cos(angle1);
let position2X = position1X + length2 * Math.sin(angle2);
let position2Y = position1Y + length2 * Math.cos(angle2);
// Clear and draw to canvas
ctx.fillStyle = "white";
ctx.fillRect(0, 0, width, height);
ctx.drawImage(traceCanvas, 0, 0);
// Draw pendulum
ctx.save();
ctx.translate(centerX, centerY);
// First arm and mass
ctx.beginPath();
ctx.moveTo(0, 0);
ctx.lineTo(position1X, position1Y);
ctx.strokeStyle = "black";
ctx.lineWidth = 2;
ctx.stroke();
ctx.beginPath();
ctx.arc(position1X, position1Y, mass1/2, 0, 2 * Math.PI);
ctx.fillStyle = "black";
ctx.fill();
// Second arm and mass
ctx.beginPath();
ctx.moveTo(position1X, position1Y);
ctx.lineTo(position2X, position2Y);
ctx.stroke();
ctx.beginPath();
ctx.arc(position2X, position2Y, mass2/2, 0, 2 * Math.PI);
ctx.fill();
ctx.restore();
// Draw trace line
if (previousPosition2X !== -1 && previousPosition2Y !== -1) {
traceCtx.save();
traceCtx.translate(centerX, centerY);
traceCtx.beginPath();
traceCtx.moveTo(previousPosition2X, previousPosition2Y);
traceCtx.lineTo(position2X, position2Y);
traceCtx.strokeStyle = "black";
traceCtx.stroke();
traceCtx.restore();
}
previousPosition2X = position2X;
previousPosition2Y = position2Y;
requestAnimationFrame(animate);
}
animate();
return canvas;
}
```
## Conclusion
I think this is far from an idiomatic implementation so I'll keep this brief. I don't think I used JS or Observable as well as I could have so treat this as a beginner stabbing into the dark because thats essentially what the code is.
This was quite a bit more work than the [original Python implementation](https://gitlab.com/MisterBiggs/double_pendulum/blob/master/double_pendulum.pyde), but running real time, having beaufitul defaults, and being interactive without a backend make this leagues better than anything offered by any other language. There is definitely a loss of energy in the system over time that I attribute to Javascript being a mess, but I doubt that I would ever move all of my analysis to JS anyways so I don't think it matters. Its also very likely I'm doing something bad with my timesteps.