Compare commits
No commits in common. "main" and "feat/import-cost" have entirely different histories.
main
...
feat/impor
41
.github/workflows/aws.yaml
vendored
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
name: Deploy to AWS S3
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup PNPM
|
||||
uses: pnpm/action-setup@v2
|
||||
with:
|
||||
version: 8
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install
|
||||
|
||||
- name: Build site
|
||||
run: pnpm build
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v2
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-2
|
||||
|
||||
- name: Deploy static site to S3 bucket
|
||||
run: |
|
||||
chmod +x scripts/deploy.sh
|
||||
scripts/deploy.sh
|
||||
36
.github/workflows/ci.yaml
vendored
|
|
@ -1,18 +1,36 @@
|
|||
name: ci
|
||||
name: CI
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
ci:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: pnpm/action-setup@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup PNPM
|
||||
uses: pnpm/action-setup@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "pnpm"
|
||||
- run: pnpm install
|
||||
- run: pnpm prettier --check .
|
||||
- run: pnpm astro check
|
||||
- run: pnpm build
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install
|
||||
|
||||
- name: Prettier check
|
||||
run: pnpm prettier --check .
|
||||
|
||||
- name: Astro check
|
||||
run: pnpm astro check
|
||||
|
||||
- name: Build
|
||||
run: pnpm build
|
||||
|
|
|
|||
1
.gitignore
vendored
|
|
@ -19,4 +19,3 @@ pnpm-debug.log*
|
|||
.gitignore
|
||||
|
||||
.claude/
|
||||
.vercel/
|
||||
|
|
|
|||
|
|
@ -1,11 +1,41 @@
|
|||
import { defineConfig } from "astro/config";
|
||||
import mdx from "@astrojs/mdx";
|
||||
import vercel from "@astrojs/vercel";
|
||||
import rehypeExternalLinks from "rehype-external-links";
|
||||
import rehypeKatex from "rehype-katex";
|
||||
import remarkMath from "remark-math";
|
||||
import path from "path";
|
||||
|
||||
const midnight = {
|
||||
name: "midnight",
|
||||
type: "dark",
|
||||
colors: {
|
||||
"editor.background": "#121212",
|
||||
"editor.foreground": "#e0e0e0",
|
||||
},
|
||||
tokenColors: [
|
||||
{
|
||||
scope: [
|
||||
"storage.type",
|
||||
"storage.modifier",
|
||||
"keyword.control",
|
||||
"keyword.operator.new",
|
||||
],
|
||||
settings: { foreground: "#7aa2f7" },
|
||||
},
|
||||
{
|
||||
scope: [
|
||||
"string",
|
||||
"constant",
|
||||
"constant.numeric",
|
||||
"constant.language",
|
||||
"constant.character",
|
||||
"number",
|
||||
],
|
||||
settings: { foreground: "#98c379" },
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const daylight = {
|
||||
name: "daylight",
|
||||
type: "light",
|
||||
|
|
@ -38,8 +68,6 @@ const daylight = {
|
|||
};
|
||||
|
||||
export default defineConfig({
|
||||
output: "static",
|
||||
adapter: vercel(),
|
||||
build: {
|
||||
format: "file",
|
||||
},
|
||||
|
|
@ -69,6 +97,7 @@ export default defineConfig({
|
|||
shikiConfig: {
|
||||
themes: {
|
||||
light: daylight,
|
||||
dark: midnight,
|
||||
},
|
||||
langs: [],
|
||||
wrap: true,
|
||||
|
|
|
|||
15
package.json
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"name": "barrettruth.sh",
|
||||
"name": "barrettruth.com",
|
||||
"type": "module",
|
||||
"version": "0.0.1",
|
||||
"scripts": {
|
||||
|
|
@ -17,19 +17,18 @@
|
|||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@astrojs/mdx": "^4.3.13",
|
||||
"@astrojs/vercel": "^9.0.4",
|
||||
"astro": "^5.17.3",
|
||||
"@astrojs/mdx": "^4.3.6",
|
||||
"astro": "^5.14.1",
|
||||
"rehype-external-links": "^3.0.0",
|
||||
"rehype-katex": "^7.0.1",
|
||||
"remark-math": "^6.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@astrojs/check": "^0.9.6",
|
||||
"@typescript-eslint/parser": "^8.56.1",
|
||||
"@astrojs/check": "^0.9.5",
|
||||
"@typescript-eslint/parser": "^8.46.0",
|
||||
"husky": "^9.1.7",
|
||||
"lint-staged": "^16.2.7",
|
||||
"prettier": "^3.8.1",
|
||||
"lint-staged": "^16.2.3",
|
||||
"prettier": "^3.6.2",
|
||||
"prettier-plugin-astro": "^0.14.1",
|
||||
"typescript": "^5.9.3"
|
||||
},
|
||||
|
|
|
|||
2724
pnpm-lock.yaml
generated
BIN
public/death/death.webp
Normal file
|
After Width: | Height: | Size: 294 KiB |
BIN
public/designing-this-website/website-design.webp
Normal file
|
After Width: | Height: | Size: 15 KiB |
BIN
public/fonts/apercu-mono/ApercuMonoProBold.ttf
Normal file
BIN
public/fonts/apercu-mono/ApercuMonoProLight.ttf
Normal file
BIN
public/fonts/apercu-mono/ApercuMonoProMedium.ttf
Normal file
BIN
public/fonts/apercu-mono/ApercuMonoProRegular.ttf
Normal file
BIN
public/improving-my-developer-workflow/workflow.webm
Normal file
BIN
public/multithreading-a-gui/multi-threaded-implementation.webp
Normal file
|
After Width: | Height: | Size: 29 KiB |
BIN
public/multithreading-a-gui/single-threaded-design.webp
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
public/my-competitive-programming-setup/cp-setup.webp
Normal file
|
After Width: | Height: | Size: 36 KiB |
BIN
public/proofs/graph.webp
Normal file
|
After Width: | Height: | Size: 7.9 KiB |
BIN
public/refactoring-a-state-machine/state-encoding.webp
Normal file
|
After Width: | Height: | Size: 9.7 KiB |
283
public/scripts/index.js
Normal file
|
|
@ -0,0 +1,283 @@
|
|||
(() => {
|
||||
if (window.__BT_INDEX_INIT) return;
|
||||
window.__BT_INDEX_INIT = true;
|
||||
|
||||
const TERMINAL_PROMPT = "barrett@ruth:~$ ";
|
||||
let typing = false;
|
||||
|
||||
function promptEl() {
|
||||
return document.querySelector(".terminal-prompt");
|
||||
}
|
||||
function promptTail() {
|
||||
const el = promptEl();
|
||||
if (!el) return "";
|
||||
const s = el.textContent || "";
|
||||
return s.startsWith(TERMINAL_PROMPT) ? s.slice(TERMINAL_PROMPT.length) : s;
|
||||
}
|
||||
function setPromptTailImmediate(tail) {
|
||||
const el = promptEl();
|
||||
if (!el) return;
|
||||
el.textContent = TERMINAL_PROMPT + tail;
|
||||
}
|
||||
function persistPrompt() {
|
||||
const el = promptEl();
|
||||
if (el) sessionStorage.setItem("terminalPromptText", el.textContent);
|
||||
}
|
||||
(function restorePrompt() {
|
||||
const saved = sessionStorage.getItem("terminalPromptText");
|
||||
const el = promptEl();
|
||||
if (saved && el) el.textContent = saved;
|
||||
sessionStorage.removeItem("terminalPromptText");
|
||||
})();
|
||||
|
||||
function normalizeDisplayPath(pathname) {
|
||||
let p = pathname.replace(/\/index\.html$/, "/").replace(/\.html$/, "");
|
||||
p = p.replace(/\/{2,}/g, "/");
|
||||
if (p !== "/" && p.endsWith("/")) p = p.slice(0, -1);
|
||||
return p === "/" ? "" : p;
|
||||
}
|
||||
function displayPathFromHref(href) {
|
||||
const url = new URL(href, location.origin);
|
||||
return normalizeDisplayPath(url.pathname);
|
||||
}
|
||||
function currentDisplayPath() {
|
||||
return normalizeDisplayPath(location.pathname);
|
||||
}
|
||||
function animateToDisplayPath(displayPath, totalMs, done) {
|
||||
if (typing) return;
|
||||
typing = true;
|
||||
|
||||
const el = promptEl();
|
||||
if (!el) {
|
||||
typing = false;
|
||||
return;
|
||||
}
|
||||
|
||||
const targetTail = displayPath ? " " + displayPath : "";
|
||||
const curTail = promptTail();
|
||||
|
||||
let i = 0;
|
||||
const max = Math.min(curTail.length, targetTail.length);
|
||||
while (i < max && curTail.charAt(i) === targetTail.charAt(i)) i++;
|
||||
|
||||
const delSteps = curTail.length - i;
|
||||
const typeSteps = targetTail.length - i;
|
||||
const totalSteps = delSteps + typeSteps;
|
||||
|
||||
if (totalSteps === 0) {
|
||||
typing = false;
|
||||
done && done();
|
||||
return;
|
||||
}
|
||||
|
||||
const stepMs = totalMs / totalSteps;
|
||||
|
||||
let delCount = 0;
|
||||
function tickDelete() {
|
||||
if (delCount < delSteps) {
|
||||
setPromptTailImmediate(curTail.slice(0, curTail.length - delCount - 1));
|
||||
delCount++;
|
||||
setTimeout(tickDelete, stepMs);
|
||||
} else {
|
||||
let j = 0;
|
||||
function tickType() {
|
||||
if (j < typeSteps) {
|
||||
setPromptTailImmediate(
|
||||
curTail.slice(0, i) + targetTail.slice(i, i + j + 1),
|
||||
);
|
||||
j++;
|
||||
setTimeout(tickType, stepMs);
|
||||
} else {
|
||||
typing = false;
|
||||
done && done();
|
||||
}
|
||||
}
|
||||
tickType();
|
||||
}
|
||||
}
|
||||
tickDelete();
|
||||
}
|
||||
|
||||
function renderPosts(topic) {
|
||||
if (!window.postsByCategory) return;
|
||||
const posts = document.getElementById("posts");
|
||||
if (!posts) return;
|
||||
posts.innerHTML = "";
|
||||
const arr = window.postsByCategory[topic];
|
||||
if (!arr) return;
|
||||
for (const post of arr) {
|
||||
const div = document.createElement("div");
|
||||
div.className = "post";
|
||||
const a = document.createElement("a");
|
||||
const slug =
|
||||
post.slug ||
|
||||
post.id
|
||||
?.split("/")
|
||||
.pop()
|
||||
?.replace(/\.mdx?$/, "");
|
||||
a.href = `/${topic}/${slug}.html`;
|
||||
a.textContent = post.data.title;
|
||||
a.style.textDecoration = "underline";
|
||||
div.appendChild(a);
|
||||
posts.appendChild(div);
|
||||
}
|
||||
}
|
||||
|
||||
function handleDataTopicClick(e) {
|
||||
const link = e.target.closest("[data-topic]");
|
||||
if (!link) return;
|
||||
e.preventDefault();
|
||||
if (typing) return;
|
||||
|
||||
const path = window.location.pathname;
|
||||
const isHome = path === "/" || path === "/index.html";
|
||||
const topic = link.dataset.topic?.toLowerCase() || "";
|
||||
const href = link.getAttribute("href") || "/";
|
||||
const delay = 500;
|
||||
|
||||
const colorFn = window.getTopicColor || (() => "");
|
||||
const topicKey = topic.split("/")[0];
|
||||
const willNavigateAway = !(
|
||||
isHome &&
|
||||
topic &&
|
||||
window.postsByCategory &&
|
||||
window.postsByCategory[topic]
|
||||
);
|
||||
|
||||
if (willNavigateAway) {
|
||||
link.classList.add("active");
|
||||
const c = colorFn(topicKey);
|
||||
if (c) link.style.color = c;
|
||||
} else {
|
||||
document.querySelectorAll("[data-topic]").forEach((t) => {
|
||||
t.classList.remove("active");
|
||||
t.style.color = "";
|
||||
});
|
||||
link.classList.add("active");
|
||||
const c = colorFn(topicKey);
|
||||
if (c) link.style.color = c;
|
||||
}
|
||||
|
||||
const displayPath = isHome ? `/${topic}` : displayPathFromHref(href);
|
||||
|
||||
animateToDisplayPath(displayPath, delay, () => {
|
||||
if (
|
||||
isHome &&
|
||||
topic &&
|
||||
window.postsByCategory &&
|
||||
window.postsByCategory[topic]
|
||||
) {
|
||||
renderPosts(topic);
|
||||
return;
|
||||
}
|
||||
persistPrompt();
|
||||
|
||||
const isMail = href.startsWith("mailto:");
|
||||
if (isMail) {
|
||||
window.location.href = href;
|
||||
return;
|
||||
}
|
||||
if (link.target === "_blank") {
|
||||
window.open(href, "_blank");
|
||||
return;
|
||||
}
|
||||
window.location.href = href;
|
||||
});
|
||||
}
|
||||
|
||||
function handleHomeClick(e) {
|
||||
const home = e.target.closest(".home-link");
|
||||
if (!home) return;
|
||||
e.preventDefault();
|
||||
const isHome =
|
||||
window.location.pathname === "/" ||
|
||||
window.location.pathname === "/index.html";
|
||||
const delay = 500;
|
||||
if (isHome) {
|
||||
animateToDisplayPath("", delay, () => {
|
||||
const posts = document.getElementById("posts");
|
||||
if (posts) posts.innerHTML = "";
|
||||
document.querySelectorAll("[data-topic].active").forEach((t) => {
|
||||
t.classList.remove("active");
|
||||
t.style.color = "";
|
||||
});
|
||||
document.title = "";
|
||||
});
|
||||
} else {
|
||||
persistPrompt();
|
||||
animateToDisplayPath("", delay, () => {
|
||||
window.location.href = "/";
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
document.addEventListener("DOMContentLoaded", () => {
|
||||
const initial = currentDisplayPath();
|
||||
if (initial) setPromptTailImmediate(" " + initial);
|
||||
else setPromptTailImmediate("");
|
||||
|
||||
document.body.addEventListener("click", (e) => {
|
||||
if (e.target.closest(".home-link")) return handleHomeClick(e);
|
||||
if (e.target.closest(".topics [data-topic]"))
|
||||
return handleDataTopicClick(e);
|
||||
});
|
||||
|
||||
document.body.addEventListener(
|
||||
"mouseenter",
|
||||
(e) => {
|
||||
const link = e.target.closest(".topics [data-topic]");
|
||||
if (!link) return;
|
||||
const raw = link.dataset.topic || "";
|
||||
const key = raw.split("/")[0].toLowerCase();
|
||||
const color = (window.getTopicColor && window.getTopicColor(key)) || "";
|
||||
if (color) {
|
||||
link.style.color = color;
|
||||
link.style.textDecorationColor = color;
|
||||
}
|
||||
},
|
||||
true,
|
||||
);
|
||||
|
||||
document.body.addEventListener(
|
||||
"mouseleave",
|
||||
(e) => {
|
||||
const link = e.target.closest(".topics [data-topic]");
|
||||
if (!link) return;
|
||||
if (!link.classList.contains("active")) {
|
||||
link.style.color = "";
|
||||
link.style.textDecorationColor = "";
|
||||
}
|
||||
},
|
||||
true,
|
||||
);
|
||||
|
||||
const themeToggle = document.getElementById("theme-toggle");
|
||||
if (themeToggle) {
|
||||
function updateBearVisual() {
|
||||
const currentTheme =
|
||||
document.documentElement.getAttribute("data-theme");
|
||||
if (currentTheme === "dark") {
|
||||
themeToggle.textContent = "☾⊂ʕ•ᴥ•ʔ";
|
||||
} else {
|
||||
themeToggle.textContent = "☼⊂ʕ•ᴥ•ʔ";
|
||||
}
|
||||
}
|
||||
|
||||
updateBearVisual();
|
||||
|
||||
themeToggle.addEventListener("click", () => {
|
||||
const currentTheme =
|
||||
document.documentElement.getAttribute("data-theme");
|
||||
const newTheme = currentTheme === "dark" ? "light" : "dark";
|
||||
document.documentElement.setAttribute("data-theme", newTheme);
|
||||
localStorage.setItem("theme", newTheme);
|
||||
updateBearVisual();
|
||||
});
|
||||
}
|
||||
|
||||
window.addEventListener("beforeunload", () => {
|
||||
const el = promptEl();
|
||||
if (el) el.textContent = TERMINAL_PROMPT;
|
||||
});
|
||||
});
|
||||
})();
|
||||
666
public/scripts/models-of-production.js
Normal file
|
|
@ -0,0 +1,666 @@
|
|||
function urlToTopic() {
|
||||
const path = window.location.pathname;
|
||||
const pathParts = path.split("/");
|
||||
return pathParts[2];
|
||||
}
|
||||
|
||||
function getTextColor() {
|
||||
return getComputedStyle(document.documentElement)
|
||||
.getPropertyValue("--text")
|
||||
.trim();
|
||||
}
|
||||
|
||||
function setUpParameters(render, parameters, modelPrefix) {
|
||||
parameters.forEach((param) => {
|
||||
const slider = document.getElementById(`slider${modelPrefix}${param}`);
|
||||
slider.oninput = function () {
|
||||
slider.previousElementSibling.innerText = this.value;
|
||||
render();
|
||||
};
|
||||
});
|
||||
return parameters.map((param) => {
|
||||
return parseFloat(
|
||||
document.getElementById(`output${modelPrefix}${param}`).textContent,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function drawSolowGraph() {
|
||||
const L = 150,
|
||||
K_MAX = 500,
|
||||
margin = { top: 50, right: 55, bottom: 40, left: 75 };
|
||||
|
||||
const [A, d, s, alpha] = setUpParameters(
|
||||
drawSolowGraph,
|
||||
["A", "d", "s", "alpha"],
|
||||
"S",
|
||||
);
|
||||
const solowOutput = (K) => A * Math.pow(K, alpha) * Math.pow(L, 1 - alpha);
|
||||
const solowDepreciation = (K) => d * K;
|
||||
const solowInvestment = (Y) => s * Y;
|
||||
|
||||
const container = document.getElementById("solow-visualization");
|
||||
|
||||
const width = container.clientWidth - margin.left - margin.right;
|
||||
const height = container.clientHeight - margin.top - margin.bottom;
|
||||
|
||||
container.innerHTML = "";
|
||||
|
||||
const svg = d3
|
||||
.select("#solow-visualization")
|
||||
.append("svg")
|
||||
.attr("width", width + margin.left + margin.right)
|
||||
.attr("height", height + margin.top + margin.bottom)
|
||||
.append("g")
|
||||
.attr("transform", `translate(${margin.left}, ${margin.top})`);
|
||||
|
||||
const x = d3.scaleLinear().domain([0, K_MAX]).range([0, width]);
|
||||
const xAxis = svg
|
||||
.append("g")
|
||||
.attr("transform", `translate(0, ${height})`)
|
||||
.call(d3.axisBottom(x));
|
||||
xAxis.selectAll("text").style("font-size", "1.5em");
|
||||
xAxis
|
||||
.append("text")
|
||||
.attr("x", width + 10)
|
||||
.attr("y", -10)
|
||||
.style("text-anchor", "end")
|
||||
.style("font-size", "2em")
|
||||
.text("K");
|
||||
|
||||
const Y_MAX = solowOutput(K_MAX) + K_MAX / 10;
|
||||
const y = d3.scaleLinear().domain([0, Y_MAX]).range([height, 0]);
|
||||
const yAxis = svg.append("g").call(d3.axisLeft(y));
|
||||
yAxis.selectAll("text").style("font-size", "1.5em");
|
||||
yAxis
|
||||
.append("text")
|
||||
.attr("x", 0)
|
||||
.attr("y", -10)
|
||||
.style("text-anchor", "start")
|
||||
.style("font-size", "2em")
|
||||
.text("Y");
|
||||
|
||||
const outputData = Array.from({ length: K_MAX }, (_, k) => ({
|
||||
K: k,
|
||||
Y: solowOutput(k),
|
||||
}));
|
||||
svg
|
||||
.append("path")
|
||||
.datum(outputData)
|
||||
.attr("fill", "none")
|
||||
.attr("stroke", window.getTopicColor(urlToTopic()))
|
||||
.attr("stroke-width", 2)
|
||||
.attr(
|
||||
"d",
|
||||
d3
|
||||
.line()
|
||||
.x((d) => x(d.K))
|
||||
.y((d) => y(d.Y)),
|
||||
);
|
||||
svg
|
||||
.append("foreignObject")
|
||||
.attr("width", "2em")
|
||||
.attr("height", "2em")
|
||||
.attr("x", x(K_MAX))
|
||||
.attr("y", y(outputData[K_MAX - 1].Y))
|
||||
.append("xhtml:body")
|
||||
.style("font-size", "0.75em")
|
||||
.text("Y");
|
||||
|
||||
const depreciationData = Array.from({ length: K_MAX }, (_, k) => ({
|
||||
K: k,
|
||||
Y: solowDepreciation(k),
|
||||
}));
|
||||
svg
|
||||
.append("path")
|
||||
.datum(depreciationData)
|
||||
.attr("fill", "none")
|
||||
.attr("stroke", "red")
|
||||
.attr("stroke-width", 2)
|
||||
.attr(
|
||||
"d",
|
||||
d3
|
||||
.line()
|
||||
.x((d) => x(d.K))
|
||||
.y((d) => y(d.Y)),
|
||||
);
|
||||
|
||||
svg
|
||||
.append("foreignObject")
|
||||
.attr("width", "2em")
|
||||
.attr("height", "2em")
|
||||
.attr("x", x(K_MAX))
|
||||
.attr("y", y(depreciationData[K_MAX - 1].Y))
|
||||
.append("xhtml:body")
|
||||
.style("font-size", "0.75em")
|
||||
.append("xhtml:div")
|
||||
.text("dK");
|
||||
|
||||
const investmentData = outputData.map((d) => ({
|
||||
K: d.K,
|
||||
Y: solowInvestment(d.Y),
|
||||
}));
|
||||
svg
|
||||
.append("path")
|
||||
.datum(investmentData)
|
||||
.attr("fill", "none")
|
||||
.attr("stroke", "purple")
|
||||
.attr("stroke-width", 2)
|
||||
.attr(
|
||||
"d",
|
||||
d3
|
||||
.line()
|
||||
.x((d) => x(d.K))
|
||||
.y((d) => y(d.Y)),
|
||||
);
|
||||
|
||||
svg
|
||||
.append("text")
|
||||
.attr("x", x(K_MAX))
|
||||
.attr("y", y(investmentData[K_MAX - 1].Y))
|
||||
.style("font-size", "0.75em")
|
||||
.text("I");
|
||||
|
||||
const k_star = L * Math.pow((s * A) / d, 1 / (1 - alpha));
|
||||
svg
|
||||
.append("line")
|
||||
.attr("x1", x(k_star))
|
||||
.attr("y1", y((d * k_star) / s))
|
||||
.attr("x2", x(k_star))
|
||||
.attr("y2", y(0))
|
||||
.attr("stroke-width", 1)
|
||||
.attr("stroke-dasharray", "5,5");
|
||||
|
||||
const y_star = solowOutput(k_star);
|
||||
svg
|
||||
.append("text")
|
||||
.attr("x", x(k_star) - 40)
|
||||
.attr("y", y(y_star) - 40)
|
||||
.style("font-size", "0.75em")
|
||||
.text(`(${k_star.toFixed(0)}, ${y_star.toFixed(0)})`);
|
||||
}
|
||||
|
||||
const formatNumber = (num) => {
|
||||
return `~${num.toExponential(0)}`;
|
||||
};
|
||||
|
||||
const normalFont = `style="font-weight: normal"`;
|
||||
|
||||
const updateRomerTable = (romerData) => {
|
||||
const tableHeader = document.getElementById("romer-table-header");
|
||||
const rowA_t = document.getElementById("row-A_t");
|
||||
const rowY_t = document.getElementById("row-Y_t");
|
||||
|
||||
tableHeader.innerHTML = `<th ${normalFont}>t</th>`;
|
||||
rowA_t.innerHTML = `<td class="romer-table-at">A_t</td>`;
|
||||
rowY_t.innerHTML = `<td class="romer-table-yt">Y_t</td>`;
|
||||
|
||||
romerData.forEach((d) => {
|
||||
if (d.year % 20 === 0 || d.year === 1) {
|
||||
tableHeader.innerHTML += `<th ${normalFont}>${d.year}</th>`;
|
||||
rowA_t.innerHTML += `<td>${formatNumber(d.A)}</td>`;
|
||||
rowY_t.innerHTML += `<td>${formatNumber(d.Y)}</td>`;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
function drawRomerGraph() {
|
||||
const T_MAX = 100,
|
||||
margin = { top: 50, right: 110, bottom: 40, left: 75 };
|
||||
|
||||
const [z, L, l, A0] = setUpParameters(
|
||||
drawRomerGraph,
|
||||
["z", "L", "l", "A0"],
|
||||
"R",
|
||||
);
|
||||
|
||||
const container = document.getElementById("romer-visualization");
|
||||
const width = container.clientWidth - margin.left - margin.right;
|
||||
const height = container.clientHeight - margin.top - margin.bottom;
|
||||
|
||||
container.innerHTML = "";
|
||||
|
||||
const svg = d3
|
||||
.select("#romer-visualization")
|
||||
.append("svg")
|
||||
.attr("width", width + margin.left + margin.right)
|
||||
.attr("height", height + margin.top + margin.bottom)
|
||||
.append("g")
|
||||
.attr("transform", `translate(${margin.left}, ${margin.top})`);
|
||||
|
||||
let A = A0;
|
||||
const romerData = [];
|
||||
|
||||
for (let t = 1; t <= T_MAX; ++t) {
|
||||
const A_t = A * (1 + z * l * L);
|
||||
const Y_t = A_t * (1 - l) * L;
|
||||
romerData.push({ year: t, A: A_t, Y: Math.log10(Y_t) });
|
||||
A = A_t;
|
||||
}
|
||||
|
||||
const x = d3.scaleLinear().domain([1, T_MAX]).range([0, width]);
|
||||
const xAxis = svg
|
||||
.append("g")
|
||||
.attr("transform", `translate(0, ${height})`)
|
||||
.call(d3.axisBottom(x));
|
||||
xAxis.selectAll("text").style("font-size", "1.5em");
|
||||
xAxis
|
||||
.append("text")
|
||||
.attr("x", width + 10)
|
||||
.attr("y", -10)
|
||||
.style("text-anchor", "end")
|
||||
.style("font-size", "2em")
|
||||
.text("t");
|
||||
|
||||
const y = d3
|
||||
.scaleLinear()
|
||||
.domain([0, romerData[romerData.length - 1].Y])
|
||||
.range([height, 0]);
|
||||
const yAxis = svg
|
||||
.append("g")
|
||||
.call(d3.axisLeft(y).ticks(10, d3.format(".1s")));
|
||||
yAxis.selectAll("text").style("font-size", "1.5em");
|
||||
yAxis
|
||||
.append("text")
|
||||
.attr("x", 0)
|
||||
.attr("y", -10)
|
||||
.style("text-anchor", "start")
|
||||
.style("font-size", "2em")
|
||||
.text("log(Y)");
|
||||
|
||||
svg
|
||||
.append("path")
|
||||
.datum(romerData)
|
||||
.attr("fill", "none")
|
||||
.attr("stroke", window.getTopicColor(urlToTopic()))
|
||||
.attr("stroke-width", 2)
|
||||
.attr(
|
||||
"d",
|
||||
d3
|
||||
.line()
|
||||
.x((d) => x(d.year))
|
||||
.y((d) => y(d.Y)),
|
||||
);
|
||||
|
||||
svg
|
||||
.append("text")
|
||||
.attr("x", x(T_MAX))
|
||||
.attr("y", y(romerData[T_MAX - 1].Y))
|
||||
.style("font-size", "0.75em")
|
||||
.text("log10(Y)");
|
||||
|
||||
updateRomerTable(romerData);
|
||||
}
|
||||
|
||||
function drawRomerlGraph() {
|
||||
const T_MAX = 100,
|
||||
z = 0.01,
|
||||
L = 50,
|
||||
A0 = 50,
|
||||
margin = { top: 50, right: 110, bottom: 40, left: 75 };
|
||||
|
||||
const [l, t0] = setUpParameters(drawRomerlGraph, ["lChange", "t0"], "");
|
||||
|
||||
const container = document.getElementById("romer-lchange-visualization");
|
||||
const width = container.clientWidth - margin.left - margin.right;
|
||||
const height = container.clientHeight - margin.top - margin.bottom;
|
||||
|
||||
container.innerHTML = "";
|
||||
|
||||
const svg = d3
|
||||
.select("#romer-lchange-visualization")
|
||||
.append("svg")
|
||||
.attr("width", width + margin.left + margin.right)
|
||||
.attr("height", height + margin.top + margin.bottom)
|
||||
.append("g")
|
||||
.attr("transform", `translate(${margin.left}, ${margin.top})`);
|
||||
|
||||
let A = A0,
|
||||
l_ = 0.1;
|
||||
const romerData = [];
|
||||
|
||||
for (let t = 1; t <= t0; ++t) {
|
||||
const A_t = A * (1 + z * l_ * L);
|
||||
const Y_t = A_t * (1 - l_) * L;
|
||||
romerData.push({ year: t, A: A_t, Y: Math.log10(Y_t) });
|
||||
A = A_t;
|
||||
}
|
||||
|
||||
for (let t = t0 + 1; t <= T_MAX; ++t) {
|
||||
const A_t = A * (1 + z * l * L);
|
||||
const Y_t = A_t * (1 - l) * L;
|
||||
romerData.push({ year: t, A: A_t, Y: Math.log10(Y_t) });
|
||||
A = A_t;
|
||||
}
|
||||
|
||||
const x = d3.scaleLinear().domain([1, T_MAX]).range([0, width]);
|
||||
const xAxis = svg
|
||||
.append("g")
|
||||
.attr("transform", `translate(0, ${height})`)
|
||||
.call(d3.axisBottom(x));
|
||||
xAxis.selectAll("text").style("font-size", "1.5em");
|
||||
xAxis
|
||||
.append("text")
|
||||
.attr("x", width + 10)
|
||||
.attr("y", -10)
|
||||
.style("text-anchor", "end")
|
||||
.style("font-size", "2em")
|
||||
.text("t");
|
||||
|
||||
const y = d3
|
||||
.scaleLinear()
|
||||
.domain([0, romerData[romerData.length - 1].Y])
|
||||
.range([height, 0]);
|
||||
const yAxis = svg
|
||||
.append("g")
|
||||
.call(d3.axisLeft(y).ticks(10, d3.format(".1s")));
|
||||
yAxis.selectAll("text").style("font-size", "1.5em");
|
||||
yAxis
|
||||
.append("text")
|
||||
.attr("x", 0)
|
||||
.attr("y", -10)
|
||||
.style("text-anchor", "start")
|
||||
.style("font-size", "2em")
|
||||
.text("log(Y)");
|
||||
|
||||
svg
|
||||
.append("path")
|
||||
.datum(romerData)
|
||||
.attr("fill", "none")
|
||||
.attr("stroke", window.getTopicColor(urlToTopic()))
|
||||
.attr("stroke-width", 2)
|
||||
.attr(
|
||||
"d",
|
||||
d3
|
||||
.line()
|
||||
.x((d) => x(d.year))
|
||||
.y((d) => y(d.Y)),
|
||||
);
|
||||
|
||||
svg
|
||||
.append("line")
|
||||
.attr("x1", x(t0))
|
||||
.attr("y1", y(romerData[T_MAX - 1].Y))
|
||||
.attr("x2", x(t0))
|
||||
.attr("y2", height)
|
||||
.attr("stroke-width", 1)
|
||||
.attr("stroke-dasharray", "4");
|
||||
|
||||
svg
|
||||
.append("text")
|
||||
.attr("x", x(0) + 15)
|
||||
.attr("y", y(romerData[0].Y))
|
||||
.style("font-size", "0.6em")
|
||||
.text(`l0=${l_}`);
|
||||
|
||||
svg
|
||||
.append("text")
|
||||
.attr("x", x(t0) + 15)
|
||||
.attr("y", y(romerData[t0].Y))
|
||||
.style("font-size", "0.6em")
|
||||
.text(`l1=${l}`);
|
||||
|
||||
svg
|
||||
.append("text")
|
||||
.attr("x", x(T_MAX))
|
||||
.attr("y", y(romerData[T_MAX - 1].Y))
|
||||
.style("font-size", "0.75em")
|
||||
.text("log10(Y)");
|
||||
}
|
||||
|
||||
function calculateRomerSolowData(
|
||||
T_MAX,
|
||||
L,
|
||||
l,
|
||||
A0,
|
||||
alpha,
|
||||
s,
|
||||
d,
|
||||
z,
|
||||
t0 = Infinity,
|
||||
L0,
|
||||
l0,
|
||||
alpha0,
|
||||
z0,
|
||||
) {
|
||||
let A = A0,
|
||||
K_t = 1,
|
||||
romerSolowData = [];
|
||||
|
||||
for (let t = 1; t <= T_MAX; ++t) {
|
||||
if (t > t0) {
|
||||
alpha = alpha0;
|
||||
z = z0;
|
||||
l = l0;
|
||||
L = L0;
|
||||
}
|
||||
|
||||
const Y_t = A * Math.pow(K_t, alpha) * Math.pow((1 - l) * L, 1 - alpha);
|
||||
const A_t = A * (1 + z * l * L);
|
||||
K_t = K_t + s * Y_t - d * K_t;
|
||||
romerSolowData.push({ year: t, A: A_t, K: K_t, Y: Math.log10(Y_t) });
|
||||
A = A_t;
|
||||
}
|
||||
|
||||
return romerSolowData;
|
||||
}
|
||||
|
||||
function drawRomerSolowGraph() {
|
||||
const T_MAX = 100,
|
||||
margin = { top: 50, right: 110, bottom: 40, left: 75 };
|
||||
|
||||
const [z, l, L, A0, s, d, alpha] = setUpParameters(
|
||||
drawRomerSolowGraph,
|
||||
["z", "l", "L", "A0", "s", "d", "alpha"],
|
||||
"RS",
|
||||
);
|
||||
|
||||
const container = document.getElementById("romer-solow-visualization");
|
||||
const width = container.clientWidth - margin.left - margin.right;
|
||||
const height = container.clientHeight - margin.top - margin.bottom;
|
||||
|
||||
container.innerHTML = "";
|
||||
|
||||
const svg = d3
|
||||
.select("#romer-solow-visualization")
|
||||
.append("svg")
|
||||
.attr("width", width + margin.left + margin.right)
|
||||
.attr("height", height + margin.top + margin.bottom)
|
||||
.append("g")
|
||||
.attr("transform", `translate(${margin.left}, ${margin.top})`);
|
||||
|
||||
const romerSolowData = calculateRomerSolowData(
|
||||
T_MAX,
|
||||
L,
|
||||
l,
|
||||
A0,
|
||||
alpha,
|
||||
s,
|
||||
d,
|
||||
z,
|
||||
);
|
||||
|
||||
const x = d3.scaleLinear().domain([1, T_MAX]).range([0, width]);
|
||||
const xAxis = svg
|
||||
.append("g")
|
||||
.attr("transform", `translate(0, ${height})`)
|
||||
.call(d3.axisBottom(x));
|
||||
xAxis.selectAll("text").style("font-size", "1.5em");
|
||||
xAxis
|
||||
.append("text")
|
||||
.attr("x", width + 10)
|
||||
.attr("y", -10)
|
||||
.style("text-anchor", "end")
|
||||
.style("font-size", "2em")
|
||||
.text("t");
|
||||
|
||||
const y = d3
|
||||
.scaleLinear()
|
||||
.domain([0, romerSolowData[romerSolowData.length - 1].Y])
|
||||
.range([height, 0]);
|
||||
const yAxis = svg
|
||||
.append("g")
|
||||
.call(d3.axisLeft(y).ticks(10, d3.format(".1s")));
|
||||
yAxis.selectAll("text").style("font-size", "1.5em");
|
||||
yAxis
|
||||
.append("text")
|
||||
.attr("x", 0)
|
||||
.attr("y", -10)
|
||||
.style("text-anchor", "start")
|
||||
.style("font-size", "2em")
|
||||
.text("log(Y)");
|
||||
|
||||
svg
|
||||
.append("path")
|
||||
.datum(romerSolowData)
|
||||
.attr("fill", "none")
|
||||
.attr("stroke", window.getTopicColor(urlToTopic()))
|
||||
.attr("stroke-width", 2)
|
||||
.attr(
|
||||
"d",
|
||||
d3
|
||||
.line()
|
||||
.x((d) => x(d.year))
|
||||
.y((d) => y(d.Y)),
|
||||
);
|
||||
|
||||
svg
|
||||
.append("text")
|
||||
.attr("x", x(T_MAX))
|
||||
.attr("y", y(romerSolowData[T_MAX - 1].Y))
|
||||
.style("font-size", "0.75em")
|
||||
.text("log10(Y)");
|
||||
}
|
||||
|
||||
function drawRomerSolowChangeGraph() {
|
||||
const T_MAX = 100,
|
||||
margin = { top: 20, right: 100, bottom: 20, left: 50 },
|
||||
s = 0.2,
|
||||
d = 0.2,
|
||||
A0 = 50,
|
||||
alpha = 0.33,
|
||||
l = 0.5,
|
||||
L = 100,
|
||||
z = 0.5;
|
||||
|
||||
const [z0, l0, L0, alpha0, t0] = setUpParameters(
|
||||
drawRomerSolowChangeGraph,
|
||||
["z0", "l0", "L0", "alpha0", "t0"],
|
||||
"RSC",
|
||||
);
|
||||
|
||||
const container = document.getElementById("romer-solow-change-visualization");
|
||||
const width = container.clientWidth - margin.left - margin.right;
|
||||
const height = container.clientHeight - margin.top - margin.bottom;
|
||||
|
||||
container.innerHTML = "";
|
||||
|
||||
const svg = d3
|
||||
.select("#romer-solow-change-visualization")
|
||||
.append("svg")
|
||||
.attr("width", width + margin.left + margin.right)
|
||||
.attr("height", height + margin.top + margin.bottom)
|
||||
.append("g")
|
||||
.attr("transform", `translate(${margin.left}, ${margin.top})`);
|
||||
|
||||
const romerSolowData = calculateRomerSolowData(
|
||||
T_MAX,
|
||||
L,
|
||||
l,
|
||||
A0,
|
||||
alpha,
|
||||
s,
|
||||
d,
|
||||
z,
|
||||
t0,
|
||||
L0,
|
||||
l0,
|
||||
alpha0,
|
||||
z0,
|
||||
);
|
||||
|
||||
const x = d3.scaleLinear().domain([1, T_MAX]).range([0, width]);
|
||||
const xAxis = svg
|
||||
.append("g")
|
||||
.attr("transform", `translate(0, ${height})`)
|
||||
.call(d3.axisBottom(x));
|
||||
xAxis.selectAll("text").style("font-size", "1.5em");
|
||||
xAxis
|
||||
.append("text")
|
||||
.attr("x", width + 10)
|
||||
.attr("y", -10)
|
||||
.style("text-anchor", "end")
|
||||
.style("font-size", "2em")
|
||||
.text("t");
|
||||
|
||||
const y = d3
|
||||
.scaleLinear()
|
||||
.domain([0, romerSolowData[romerSolowData.length - 1].Y])
|
||||
.range([height, 0]);
|
||||
const yAxis = svg
|
||||
.append("g")
|
||||
.call(d3.axisLeft(y).ticks(10, d3.format(".1s")));
|
||||
yAxis.selectAll("text").style("font-size", "1.5em");
|
||||
yAxis
|
||||
.append("text")
|
||||
.attr("x", 0)
|
||||
.attr("y", -10)
|
||||
.style("text-anchor", "start")
|
||||
.style("font-size", "2em")
|
||||
.text("log(Y)");
|
||||
|
||||
svg
|
||||
.append("path")
|
||||
.datum(romerSolowData)
|
||||
.attr("fill", "none")
|
||||
.attr("stroke", window.getTopicColor(urlToTopic()))
|
||||
.attr("stroke-width", 2)
|
||||
.attr(
|
||||
"d",
|
||||
d3
|
||||
.line()
|
||||
.x((d) => x(d.year))
|
||||
.y((d) => y(d.Y)),
|
||||
);
|
||||
|
||||
svg
|
||||
.append("line")
|
||||
.attr("x1", x(t0))
|
||||
.attr("y1", y(romerSolowData[T_MAX - 1].Y))
|
||||
.attr("x2", x(t0))
|
||||
.attr("y2", height)
|
||||
.attr("stroke-width", 1)
|
||||
.attr("stroke-dasharray", "4");
|
||||
|
||||
svg
|
||||
.append("text")
|
||||
.attr("x", x(T_MAX))
|
||||
.attr("y", y(romerSolowData[T_MAX - 1].Y))
|
||||
.style("font-size", "0.75em")
|
||||
.text("log10(Y)");
|
||||
}
|
||||
|
||||
drawSolowGraph();
|
||||
drawRomerGraph();
|
||||
drawRomerlGraph();
|
||||
drawRomerSolowGraph();
|
||||
drawRomerSolowChangeGraph();
|
||||
|
||||
window.onresize = () => {
|
||||
drawSolowGraph();
|
||||
drawRomerGraph();
|
||||
drawRomerlGraph();
|
||||
drawRomerSolowGraph();
|
||||
drawRomerSolowChangeGraph();
|
||||
};
|
||||
|
||||
new MutationObserver(() => {
|
||||
drawSolowGraph();
|
||||
drawRomerGraph();
|
||||
drawRomerlGraph();
|
||||
drawRomerSolowGraph();
|
||||
drawRomerSolowChangeGraph();
|
||||
}).observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ["data-theme"],
|
||||
});
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
.container {
|
||||
height: 100vh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
text-align: center;
|
||||
font-weight: 300;
|
||||
font-size: 2em;
|
||||
}
|
||||
|
|
@ -111,36 +111,69 @@
|
|||
}
|
||||
|
||||
@font-face {
|
||||
font-family: "Berkeley Mono";
|
||||
src: url("/fonts/berkeley-mono/BerkeleyMono-Regular.ttf") format("truetype");
|
||||
font-family: "Apercu Mono";
|
||||
src: url("/fonts/apercu-mono/ApercuMonoProLight.ttf") format("truetype");
|
||||
font-weight: 300;
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: "Apercu Mono";
|
||||
src: url("/fonts/apercu-mono/ApercuMonoProRegular.ttf") format("truetype");
|
||||
font-weight: 400;
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: "Berkeley Mono";
|
||||
src: url("/fonts/berkeley-mono/BerkeleyMono-Bold.ttf") format("truetype");
|
||||
font-weight: 700;
|
||||
font-family: "Apercu Mono";
|
||||
src: url("/fonts/apercu-mono/ApercuMonoProMedium.ttf") format("truetype");
|
||||
font-weight: 500;
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: "Berkeley Mono";
|
||||
src: url("/fonts/berkeley-mono/BerkeleyMono-Italic.ttf") format("truetype");
|
||||
font-weight: 400;
|
||||
font-style: italic;
|
||||
font-family: "Apercu Mono";
|
||||
src: url("/fonts/apercu-mono/ApercuMonoProBold.ttf") format("truetype");
|
||||
font-weight: 700;
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: "Berkeley Mono";
|
||||
src: url("/fonts/berkeley-mono/BerkeleyMono-BoldItalic.ttf")
|
||||
format("truetype");
|
||||
font-weight: 700;
|
||||
font-style: italic;
|
||||
font-display: swap;
|
||||
:root {
|
||||
--bg: #fff;
|
||||
--text: #000;
|
||||
--code-bg: #f4f4f4;
|
||||
--border: #e1e1e1;
|
||||
--grid-color: rgba(200, 200, 200, 0.4);
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
:root {
|
||||
--bg: #000;
|
||||
--text: #fff;
|
||||
--code-bg: #121212;
|
||||
--border: #2d2d2d;
|
||||
--grid-color: rgba(55, 55, 55, 0.4);
|
||||
}
|
||||
}
|
||||
|
||||
[data-theme="light"] {
|
||||
--bg: #fff;
|
||||
--text: #000;
|
||||
--code-bg: #f4f4f4;
|
||||
--border: #e1e1e1;
|
||||
--grid-color: rgba(200, 200, 200, 0.4);
|
||||
}
|
||||
|
||||
[data-theme="dark"] {
|
||||
--bg: #000;
|
||||
--text: #fff;
|
||||
--code-bg: #121212;
|
||||
--border: #2d2d2d;
|
||||
--grid-color: rgba(55, 55, 55, 0.4);
|
||||
}
|
||||
|
||||
pre,
|
||||
|
|
@ -148,32 +181,107 @@ code,
|
|||
pre code,
|
||||
.astro-code,
|
||||
code[class*="language-"] {
|
||||
font-family: "Berkeley Mono", monospace !important;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
html,
|
||||
body {
|
||||
background: #1a1a1a;
|
||||
color: #f5f5f5;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
html,
|
||||
body {
|
||||
background: #121212;
|
||||
color: #e0e0e0;
|
||||
}
|
||||
font-family: "Apercu Mono", monospace !important;
|
||||
font-size: 0.95em;
|
||||
line-height: 1.5;
|
||||
font-weight: 400;
|
||||
}
|
||||
|
||||
html,
|
||||
body {
|
||||
font-family: "Signifier", serif;
|
||||
overflow-x: hidden;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
min-height: 100vh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
background: var(--bg);
|
||||
color: var(--text);
|
||||
}
|
||||
|
||||
html,
|
||||
body {
|
||||
font-family: "Signifier", serif;
|
||||
header,
|
||||
footer {
|
||||
padding: 30px 20px;
|
||||
font-size: 1.5em;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
#theme-toggle {
|
||||
cursor: pointer;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.main {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
flex: 1;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
a {
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
li {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.graph-background {
|
||||
background-image:
|
||||
linear-gradient(to right, var(--grid-color) 1px, transparent 1px),
|
||||
linear-gradient(to bottom, var(--grid-color) 1px, transparent 1px);
|
||||
background-size: 3vw 3vw;
|
||||
}
|
||||
|
||||
html:has(body.graph-background) {
|
||||
background-image:
|
||||
linear-gradient(to right, var(--grid-color) 1px, transparent 1px),
|
||||
linear-gradient(to bottom, var(--grid-color) 1px, transparent 1px);
|
||||
background-size: 3vw 3vw;
|
||||
background-color: var(--bg);
|
||||
}
|
||||
|
||||
.terminal-cursor {
|
||||
display: inline-block;
|
||||
width: 10px;
|
||||
height: 1em;
|
||||
background-color: var(--bg);
|
||||
border: 1px solid var(--text);
|
||||
vertical-align: text-top;
|
||||
animation: blink 1s step-start infinite;
|
||||
}
|
||||
|
||||
@keyframes blink {
|
||||
50% {
|
||||
background-color: transparent;
|
||||
border-color: transparent;
|
||||
}
|
||||
}
|
||||
|
||||
.terminal-container {
|
||||
font-family: "Courier New", monospace;
|
||||
}
|
||||
|
||||
pre {
|
||||
font-feature-settings:
|
||||
"liga" 0,
|
||||
"calt" 0;
|
||||
}
|
||||
|
||||
@media (max-width: 1024px) {
|
||||
header,
|
||||
footer {
|
||||
font-size: 1.2em;
|
||||
padding: 20px 15px;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 640px) {
|
||||
header,
|
||||
footer {
|
||||
font-size: 1em;
|
||||
padding: 15px 10px;
|
||||
}
|
||||
}
|
||||
23
public/styles/git.css
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
.clone-line {
|
||||
font-family: "Apercu Mono", monospace;
|
||||
font-size: 1.2em;
|
||||
margin: 0 1em 0 0;
|
||||
text-align: left;
|
||||
user-select: all;
|
||||
}
|
||||
|
||||
.clone-line code {
|
||||
background: transparent !important;
|
||||
all: unset;
|
||||
font-family: "Apercu Mono", monospace;
|
||||
white-space: normal;
|
||||
word-break: break-word;
|
||||
overflow-wrap: anywhere;
|
||||
line-height: 1.5;
|
||||
cursor: text;
|
||||
}
|
||||
|
||||
.clone-line .prompt {
|
||||
user-select: none;
|
||||
-webkit-user-select: none;
|
||||
}
|
||||
103
public/styles/graph.css
Normal file
|
|
@ -0,0 +1,103 @@
|
|||
.graph {
|
||||
height: 50vh;
|
||||
width: 100%;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
margin: 50px 0;
|
||||
}
|
||||
|
||||
.graph div {
|
||||
height: 100%;
|
||||
width: 60%;
|
||||
}
|
||||
|
||||
.graph svg line,
|
||||
.graph svg path.domain,
|
||||
.graph svg .tick line {
|
||||
stroke: var(--text);
|
||||
}
|
||||
|
||||
.graph svg text {
|
||||
fill: var(--text);
|
||||
}
|
||||
|
||||
.slider {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.slider label {
|
||||
margin-right: 10px;
|
||||
}
|
||||
|
||||
.slider input {
|
||||
margin-left: 20px;
|
||||
-webkit-appearance: none;
|
||||
appearance: none;
|
||||
width: 100%;
|
||||
height: 2px;
|
||||
background: var(--text);
|
||||
cursor: pointer;
|
||||
outline: none;
|
||||
transform: translateY(-50%);
|
||||
width: 150px;
|
||||
}
|
||||
|
||||
.slider input::-webkit-slider-thumb {
|
||||
-webkit-appearance: none;
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
border-radius: 50%;
|
||||
background: var(--text);
|
||||
cursor: pointer;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.slider input::-moz-range-thumb {
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
border-radius: 50%;
|
||||
background: var(--text);
|
||||
cursor: pointer;
|
||||
position: relative;
|
||||
border: none;
|
||||
}
|
||||
|
||||
.slider input::-webkit-slider-runnable-track,
|
||||
.slider input::-moz-range-track {
|
||||
width: 100%;
|
||||
height: 2px;
|
||||
background: var(--text);
|
||||
border: none;
|
||||
}
|
||||
|
||||
.sliders {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.sliders li {
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
.romer-table-container {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
margin: 20px 0;
|
||||
}
|
||||
#romer-table {
|
||||
text-align: center;
|
||||
margin-top: 20px;
|
||||
margin: 0;
|
||||
font-size: 0.8em;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
#romer-table th,
|
||||
#romer-table td {
|
||||
border: 1px solid var(--text);
|
||||
background: var(--bg);
|
||||
text-align: center;
|
||||
padding: 5px;
|
||||
}
|
||||
|
|
@ -1,60 +1,164 @@
|
|||
.container {
|
||||
/* List pages only - no scroll */
|
||||
body:has(.content) {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
height: 100vh;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
body:has(.content) > header {
|
||||
flex: 0 0 auto;
|
||||
}
|
||||
|
||||
body:has(.content) > .main {
|
||||
flex: 1 1 auto;
|
||||
min-height: 0;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
body:has(.content) > footer {
|
||||
flex: 0 0 auto;
|
||||
}
|
||||
|
||||
.content {
|
||||
width: 100%;
|
||||
max-width: 55%;
|
||||
margin: 0 auto;
|
||||
padding: 0 clamp(20px, 5vw, 60px);
|
||||
min-height: 100vh;
|
||||
height: 100%;
|
||||
padding: 40px;
|
||||
box-sizing: border-box;
|
||||
display: flex;
|
||||
gap: 80px;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.topics {
|
||||
font-size: clamp(1.5em, 4vw, 3em);
|
||||
max-height: 80vh;
|
||||
column-width: 300px;
|
||||
column-gap: clamp(20px, 4vw, 60px);
|
||||
column-fill: auto;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
list-style: none;
|
||||
overflow-y: auto;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
.topics li:first-child {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
.topics li:last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.posts {
|
||||
font-size: clamp(1.2em, 3vw, 2.2em);
|
||||
text-align: right;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
min-height: 0;
|
||||
}
|
||||
|
||||
header {
|
||||
padding: clamp(30px, 5vw, 50px) 0;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
flex-wrap: nowrap;
|
||||
gap: 20px;
|
||||
.posts:empty {
|
||||
display: none;
|
||||
}
|
||||
|
||||
h1,
|
||||
.delta {
|
||||
font-size: clamp(1.5rem, 8vw, 4rem);
|
||||
font-weight: 300;
|
||||
flex-shrink: 1;
|
||||
a {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.delta {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
section {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
|
||||
gap: clamp(20px, 3vw, 40px);
|
||||
padding-bottom: clamp(80px, 15vw, 150px);
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-weight: 300;
|
||||
font-style: italic;
|
||||
font-size: clamp(1.25rem, 4vw, 2rem);
|
||||
margin-bottom: clamp(15px, 3vw, 25px);
|
||||
.post {
|
||||
margin-bottom: 25px;
|
||||
}
|
||||
|
||||
ul {
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
li {
|
||||
margin-bottom: 10px;
|
||||
font-size: clamp(0.9rem, 2.5vw, 1.1rem);
|
||||
line-height: 1.1;
|
||||
}
|
||||
|
||||
a {
|
||||
.topic a {
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
color: inherit;
|
||||
border-bottom: 1px solid transparent;
|
||||
text-decoration: none;
|
||||
margin-bottom: 25px;
|
||||
transition: color 0.5s ease;
|
||||
}
|
||||
|
||||
.topic a::after {
|
||||
content: "";
|
||||
height: 2px;
|
||||
display: block;
|
||||
margin-top: 4px;
|
||||
background: currentColor;
|
||||
width: 0;
|
||||
right: 100%;
|
||||
position: absolute;
|
||||
transition:
|
||||
width 0.5s ease,
|
||||
right 0.5s ease;
|
||||
}
|
||||
|
||||
.topic a:hover::after,
|
||||
.topic a.active::after {
|
||||
width: 100%;
|
||||
right: 0;
|
||||
}
|
||||
|
||||
body:has(#repo-list) .content,
|
||||
body:has(.topics:only-child) .content {
|
||||
justify-content: flex-start;
|
||||
display: block;
|
||||
}
|
||||
|
||||
body:has(#repo-list) .topics,
|
||||
body:has(.topics:only-child) .topics {
|
||||
column-width: 350px;
|
||||
column-fill: auto;
|
||||
height: calc(100vh - 200px);
|
||||
overflow-y: hidden;
|
||||
overflow-x: auto;
|
||||
max-height: calc(100vh - 200px);
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
body:has(#repo-list) .posts {
|
||||
display: none;
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
body:has(.content) {
|
||||
height: auto;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
body:has(.content) > .main {
|
||||
display: block;
|
||||
min-height: calc(100vh - 200px);
|
||||
}
|
||||
|
||||
.content {
|
||||
display: block;
|
||||
padding: 20px;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
.topics {
|
||||
font-size: 1.8em;
|
||||
column-count: 1;
|
||||
max-height: none;
|
||||
}
|
||||
|
||||
.posts {
|
||||
font-size: 1.4em;
|
||||
margin-top: 30px;
|
||||
}
|
||||
|
||||
.topic a {
|
||||
display: block;
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
192
public/styles/posts.css
Normal file
|
|
@ -0,0 +1,192 @@
|
|||
li {
|
||||
margin: 5px 0;
|
||||
}
|
||||
|
||||
.post-wrapper {
|
||||
width: 100%;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.toc-column {
|
||||
position: absolute;
|
||||
left: calc((100vw - min(65vw, 800px)) / 4 - 100px);
|
||||
top: 0;
|
||||
width: 200px;
|
||||
padding-top: 190px;
|
||||
}
|
||||
|
||||
.post-header {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.post-container {
|
||||
width: 65%;
|
||||
max-width: 800px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
.post-title {
|
||||
font-weight: normal;
|
||||
font-size: 2.2em;
|
||||
margin: 50px 0;
|
||||
width: 100%;
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.post-title::before {
|
||||
content: "";
|
||||
position: absolute;
|
||||
background-color: var(--topic-color, var(--text));
|
||||
height: 30px;
|
||||
width: 2px;
|
||||
bottom: -10px;
|
||||
left: -20px;
|
||||
}
|
||||
|
||||
.post-title::after {
|
||||
content: "";
|
||||
position: absolute;
|
||||
background-color: var(--topic-color, var(--text));
|
||||
width: 200px;
|
||||
height: 2px;
|
||||
bottom: -10px;
|
||||
left: -20px;
|
||||
}
|
||||
|
||||
.post-meta {
|
||||
font-size: 1.6em;
|
||||
color: var(--text);
|
||||
margin-left: 100px;
|
||||
}
|
||||
|
||||
.post-article {
|
||||
font-size: 1.5em;
|
||||
line-height: 1.3;
|
||||
padding-bottom: 50px;
|
||||
}
|
||||
|
||||
.post-article a {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
article h1,
|
||||
article h2,
|
||||
article h3,
|
||||
.post-article h1,
|
||||
.post-article h2,
|
||||
.post-article h3 {
|
||||
font-weight: normal;
|
||||
position: relative;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
article h1,
|
||||
.post-article h1 {
|
||||
font-size: 1.8em;
|
||||
padding-left: 1.5em;
|
||||
}
|
||||
|
||||
article h2,
|
||||
.post-article h2 {
|
||||
font-size: 1.2em;
|
||||
padding-left: 2em;
|
||||
}
|
||||
|
||||
article h3,
|
||||
.post-article h3 {
|
||||
padding-left: 2.5em;
|
||||
}
|
||||
|
||||
article h1::before,
|
||||
.post-article h1::before {
|
||||
content: "#";
|
||||
}
|
||||
|
||||
article h2::before,
|
||||
.post-article h2::before {
|
||||
content: "##";
|
||||
}
|
||||
|
||||
article h3::before,
|
||||
.post-article h3::before {
|
||||
content: "###";
|
||||
}
|
||||
|
||||
article h1::before,
|
||||
article h2::before,
|
||||
article h3::before,
|
||||
.post-article h1::before,
|
||||
.post-article h2::before,
|
||||
.post-article h3::before {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
color: var(--topic-color, var(--text));
|
||||
margin-right: 0.5em;
|
||||
}
|
||||
|
||||
.date {
|
||||
font-size: 0.67em;
|
||||
}
|
||||
|
||||
article img {
|
||||
display: block;
|
||||
margin: 2rem auto;
|
||||
max-width: 100%;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
article pre {
|
||||
padding: 1rem;
|
||||
overflow-x: auto;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--border);
|
||||
}
|
||||
|
||||
[data-theme="dark"] .astro-code,
|
||||
[data-theme="dark"] .astro-code span {
|
||||
color: var(--shiki-dark) !important;
|
||||
background-color: var(--shiki-dark-bg) !important;
|
||||
}
|
||||
|
||||
:not(pre) > code {
|
||||
font-family: "Courier New", Courier, monospace;
|
||||
padding: 4px;
|
||||
margin: 0 5px;
|
||||
white-space: nowrap;
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 4px;
|
||||
background: var(--code-bg);
|
||||
}
|
||||
|
||||
.astro-code {
|
||||
font-size: 0.8em !important;
|
||||
}
|
||||
|
||||
@media (max-width: 1200px) {
|
||||
.toc-column {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
.post-container {
|
||||
width: 85%;
|
||||
}
|
||||
|
||||
.post-title {
|
||||
font-size: 1.8em;
|
||||
margin: 30px 0;
|
||||
}
|
||||
|
||||
.post-meta {
|
||||
font-size: 1.2em;
|
||||
margin-left: 50px;
|
||||
}
|
||||
|
||||
.post-article {
|
||||
font-size: 1.1em;
|
||||
}
|
||||
}
|
||||
3
scripts/deploy.sh
Executable file
|
|
@ -0,0 +1,3 @@
|
|||
#!/bin/sh
|
||||
|
||||
aws s3 sync ./dist/ s3://barrettruth.com --delete
|
||||
56
src/components/Footer.astro
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
<footer>
|
||||
<div class="footer-links-left">
|
||||
<a href="/resume.pdf" data-topic="resume" target="_blank">resume</a>
|
||||
<a href="/transcript.pdf" data-topic="transcript" target="_blank"
|
||||
>transcript</a
|
||||
>
|
||||
<a href="/about.html" data-topic="about">about</a>
|
||||
</div>
|
||||
<div class="footer-links-right">
|
||||
<a href="/gist.html" data-topic="gist">gist</a>
|
||||
<a href="/git.html" data-topic="git">git</a>
|
||||
<a
|
||||
href="https://www.linkedin.com/in/barrett-ruth/"
|
||||
data-topic="linkedin"
|
||||
target="_blank">linkedin</a
|
||||
>
|
||||
<a href="mailto:br@barrettruth.com" data-topic="mail">email</a>
|
||||
</div>
|
||||
</footer>
|
||||
|
||||
<style>
|
||||
.footer-links-left,
|
||||
.footer-links-right {
|
||||
display: flex;
|
||||
gap: 25px;
|
||||
}
|
||||
.footer-links-left a,
|
||||
.footer-links-right a {
|
||||
text-decoration: none;
|
||||
cursor: pointer;
|
||||
}
|
||||
@media (max-width: 1024px) {
|
||||
footer {
|
||||
flex-direction: row;
|
||||
gap: 15px;
|
||||
}
|
||||
.footer-links-left,
|
||||
.footer-links-right {
|
||||
gap: 15px;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 640px) {
|
||||
footer {
|
||||
flex-direction: row;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
.footer-links-left,
|
||||
.footer-links-right {
|
||||
flex-direction: row;
|
||||
gap: 15px;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
||||
<script src="/scripts/index.js" is:inline></script>
|
||||
68
src/components/Header.astro
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
---
|
||||
const path = Astro.url.pathname;
|
||||
const is404 = path === "/404.html" || path === "/404";
|
||||
|
||||
function deriveTopic() {
|
||||
if (is404) return "/not-found";
|
||||
if (path.startsWith("/about")) return "/about";
|
||||
if (path === "/gist" || path.startsWith("/gist/")) return "/gist";
|
||||
if (path === "/git" || path.startsWith("/git/")) return "/git";
|
||||
return "";
|
||||
}
|
||||
|
||||
const topic = deriveTopic();
|
||||
const promptText = topic ? `barrett@ruth:~$ ${topic}` : "barrett@ruth:~$";
|
||||
---
|
||||
|
||||
<header>
|
||||
<a
|
||||
href="/"
|
||||
id="home-link"
|
||||
class="home-link"
|
||||
style="text-decoration: none; color: inherit"
|
||||
>
|
||||
<div class="terminal-container">
|
||||
<span class="terminal-prompt">{promptText}</span>
|
||||
<span class="terminal-cursor"></span>
|
||||
</div>
|
||||
</a>
|
||||
<span id="theme-toggle">☼⊂ʕ•ᴥ•ʔ</span>
|
||||
</header>
|
||||
|
||||
<style>
|
||||
#theme-toggle {
|
||||
cursor: pointer;
|
||||
user-select: none;
|
||||
}
|
||||
@media (max-width: 640px) {
|
||||
header {
|
||||
flex-direction: row;
|
||||
justify-content: space-between;
|
||||
gap: 10px;
|
||||
}
|
||||
.terminal-container {
|
||||
flex: 1 1 auto;
|
||||
min-width: 0;
|
||||
}
|
||||
.terminal-prompt {
|
||||
font-size: 0.9em;
|
||||
}
|
||||
#theme-toggle {
|
||||
flex: 0 0 auto;
|
||||
font-size: 1.2em;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
||||
<script is:inline>
|
||||
const homeLink = document.getElementById("home-link");
|
||||
|
||||
homeLink.addEventListener("click", (e) => {
|
||||
if (e.ctrlKey || e.metaKey) {
|
||||
e.preventDefault();
|
||||
window.open("/", "_blank", "noopener");
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<script src="/scripts/index.js" is:inline></script>
|
||||
80
src/components/TableOfContents.astro
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
---
|
||||
// Auto-generated TOC from MDX headings - Left sidebar
|
||||
interface Props {
|
||||
headings: Array<{
|
||||
depth: number;
|
||||
slug: string;
|
||||
text: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
const { headings } = Astro.props;
|
||||
|
||||
// Filter to only show h1 and h2
|
||||
const tocHeadings = headings.filter((h) => h.depth <= 2);
|
||||
---
|
||||
|
||||
{
|
||||
tocHeadings.length > 0 && (
|
||||
<aside class="toc-sidebar">
|
||||
<div class="toc-title">contents</div>
|
||||
<nav class="toc-nav">
|
||||
{tocHeadings.map((heading) => (
|
||||
<a
|
||||
href={`#${heading.slug}`}
|
||||
class={heading.depth === 1 ? "h1-link" : "h2-link"}
|
||||
>
|
||||
{heading.text}
|
||||
</a>
|
||||
))}
|
||||
</nav>
|
||||
</aside>
|
||||
)
|
||||
}
|
||||
|
||||
<style>
|
||||
.toc-sidebar {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.toc-title {
|
||||
font-weight: normal;
|
||||
margin-bottom: 1rem;
|
||||
text-align: left;
|
||||
font-size: 1.5em;
|
||||
}
|
||||
|
||||
.toc-nav {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 0.3rem;
|
||||
align-items: flex-start;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.toc-nav a {
|
||||
color: var(--text);
|
||||
text-decoration: none;
|
||||
line-height: 1.4;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.toc-nav a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.toc-nav a.h1-link {
|
||||
padding-left: 0;
|
||||
}
|
||||
|
||||
.toc-nav a.h2-link {
|
||||
padding-left: 1.5rem;
|
||||
}
|
||||
|
||||
@media (max-width: 1200px) {
|
||||
.toc-sidebar {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
226
src/content/algorithms/extrema-circular-buffer.mdx
Normal file
|
|
@ -0,0 +1,226 @@
|
|||
---
|
||||
title: "extrema circular buffer"
|
||||
date: "30/07/2024"
|
||||
useKatex: true
|
||||
---
|
||||
|
||||
import { Code } from "astro:components";
|
||||
|
||||
# context
|
||||
|
||||
While working for [TRB Capital Management](https://trbcap.com/), certain strategies necessitated finding the minimum and maximum of a moving window of prices.
|
||||
|
||||
## problem statement
|
||||
|
||||
Design a data structure supporting the following operations:
|
||||
|
||||
- `build(size_t capacity)` : initialize the data structure with capacity/window size `capacity`
|
||||
The data structure must always hold $\leq$ `capacity` prices.
|
||||
- `void push_back(double value)`
|
||||
- `void pop_front()` : remove the price from the front of the window
|
||||
- `size_t size()` : return the number of prices in the data structure
|
||||
- `double get()` : return the extrema (min or max)
|
||||
|
||||
# solution
|
||||
|
||||
Try to solve it yourself first. The point of this exercise it to create the most theoretically optimal solution you can, not brute-force and move on.
|
||||
|
||||
## naïve solution
|
||||
|
||||
One can design a data structure meeting these requirements through simulating the operations directly with a [`std::deque<double>`](https://en.cppreference.com/w/cpp/container/deque).
|
||||
|
||||
On the upside, this approach is simple to understand. Further, operations are all $O(1)$ time—that is, nearly all operations. The minimum/maximum element must be found via a linear scan in $O(n)$ time, certainly far from optimal.
|
||||
|
||||
```cpp
|
||||
#include <algorithm>
|
||||
#include <deque>
|
||||
#include <stdexcept>
|
||||
|
||||
class ExtremaCircularBuffer {
|
||||
public:
|
||||
ExtremaCircularBuffer(size_t capacity) : capacity(capacity) {}
|
||||
|
||||
void push_back(double value) {
|
||||
if (prices.size() == capacity) {
|
||||
prices.pop_front();
|
||||
}
|
||||
|
||||
prices.push_back(value);
|
||||
}
|
||||
|
||||
void pop_front() {
|
||||
if (prices.empty()) {
|
||||
throw std::out_of_range("Cannot pop_front() from empty buffer");
|
||||
}
|
||||
|
||||
prices.pop_front();
|
||||
}
|
||||
|
||||
size_t size() const { return prices.size(); }
|
||||
|
||||
double get() const {
|
||||
if (prices.empty()) {
|
||||
throw std::out_of_range("Cannot find max() of empty buffer");
|
||||
}
|
||||
|
||||
return *std::max_element(prices.begin(), prices.end());
|
||||
}
|
||||
|
||||
private:
|
||||
std::deque<double> prices;
|
||||
size_t capacity;
|
||||
};
|
||||
```
|
||||
|
||||
# optimizing the approach
|
||||
|
||||
Rather than bear the brunt of the work finding extrema in calls to `get()`, we can distribute it across the data structure as it is built.
|
||||
|
||||
Maintaining the prices in a sorted order seems to suffice, and gives access to both max _and_ min in $O(1)$ time. However, all of the problem constraints have not been addressed. Adhering to the interface of a circular buffer is another challenge.
|
||||
|
||||
Fortunately, pairing each element with a count allows intelligent removal/insertion of elements—if an element has a count of $0$, remove it from the list of sorted prices. A [std::map](https://en.cppreference.com/w/cpp/container/map) allows us to do all of this.
|
||||
|
||||
Now, we can access extrema instantly. Insertion and deletion take $O(log(n))$ time thanks to the map—but we can do better.
|
||||
|
||||
```cpp
|
||||
#include <deque>
|
||||
#include <map>
|
||||
#include <stdexcept>
|
||||
|
||||
class ExtremaCircularBuffer {
|
||||
public:
|
||||
void push_back(double value) {
|
||||
if (prices.size() == capacity) {
|
||||
double front = prices.front();
|
||||
|
||||
if (--sorted_prices[front] == 0)
|
||||
sorted_prices.erase(front);
|
||||
prices.pop_front();
|
||||
}
|
||||
|
||||
prices.push_back(value);
|
||||
++sorted_prices[value];
|
||||
}
|
||||
|
||||
void pop_front() {
|
||||
if (prices.empty()) {
|
||||
throw std::out_of_range("Cannot pop_front() from empty buffer");
|
||||
}
|
||||
|
||||
double front = prices.front();
|
||||
|
||||
if (--sorted_prices[front] == 0)
|
||||
sorted_prices.erase(front);
|
||||
prices.pop_front();
|
||||
}
|
||||
|
||||
size_t size() const { return prices.size(); }
|
||||
|
||||
double get_max() const {
|
||||
if (prices.empty()) {
|
||||
throw std::out_of_range("Cannot find max() of empty buffer");
|
||||
}
|
||||
|
||||
return sorted_prices.rbegin()->first;
|
||||
}
|
||||
|
||||
double get_min() const {
|
||||
if (prices.empty()) {
|
||||
throw std::out_of_range("Cannot find min() of empty buffer");
|
||||
}
|
||||
|
||||
return sorted_prices.begin()->first;
|
||||
}
|
||||
|
||||
/* methods & fields omitted for brevity */
|
||||
};
|
||||
```
|
||||
|
||||
## monotonic ~~queues~~ deques
|
||||
|
||||
Thinking a bit deeper about the problem constraints, it is clear that:
|
||||
|
||||
- If an extrema is pushed onto the data structure, all previously pushed elements are irrelevant to any further operations.
|
||||
|
||||
Elements are processed in FIFO order, enabling this observation to be exploited. This is the foundational idea of the [monotone priority queue](https://www.wikiwand.com/en/Monotone_priority_queue) data structure. So, for maintaining a minimum/maximum, the data structure will store a monotonically increasing/decreasing double-ended queue.
|
||||
|
||||
This solution does not satisfy a circular buffer inherently. If an arbitrary number of elements are removed from the data structure when an extrema is added, it is certainly not possible to maintain a window of fixed size.
|
||||
|
||||
Thus, we make one more observation to meet this criterion:
|
||||
|
||||
- If each price (extrema) on the monotonic double-ended queue also maintains a count of _previously popped elements_, we can deduce the proper action to take when the data structure reaches capacity.
|
||||
|
||||
1. If elements were previously popped before this extrema was added to the data structure, decrement the price's count of popped elements and do nothing.
|
||||
2. Otherwise, either no elements were pushed before this extrema or they've all been popped. Remove (pop) this element from the deque.
|
||||
|
||||
This approach supports all operations in amortized $O(1)$ time (with a monotonic sequence, elements are added or removed at least once; across a sequence of $n$ operations, $n$ total $O(1)$ operations will be executed).
|
||||
|
||||
```cpp
|
||||
#include <deque>
|
||||
#include <stdexcept>
|
||||
#include <utility>
|
||||
|
||||
class ExtremaCircularBuffer {
|
||||
public:
|
||||
void push_back(double value) {
|
||||
if (prices.size() == capacity) {
|
||||
double front_value = prices.front();
|
||||
pop_max(front_value);
|
||||
prices.pop_front();
|
||||
}
|
||||
|
||||
prices.push_back(value);
|
||||
push_max(value);
|
||||
}
|
||||
|
||||
void pop_front() {
|
||||
if (prices.empty()) {
|
||||
throw std::out_of_range("Cannot pop_front() from empty buffer");
|
||||
}
|
||||
|
||||
double front_value = prices.front();
|
||||
pop_max(front_value);
|
||||
prices.pop_front();
|
||||
}
|
||||
|
||||
double get_max() const {
|
||||
if (prices.empty()) {
|
||||
throw std::out_of_range("Cannot find max() of empty buffer");
|
||||
}
|
||||
|
||||
return maxs.front().first;
|
||||
}
|
||||
|
||||
private:
|
||||
void push_max(double value) {
|
||||
size_t popped = 0;
|
||||
|
||||
while (!maxs.empty() && maxs.back().first < value) {
|
||||
popped += maxs.back().second + 1;
|
||||
maxs.pop_back();
|
||||
}
|
||||
|
||||
maxs.emplace_back(value, popped);
|
||||
}
|
||||
|
||||
void pop_max(double value) {
|
||||
size_t popped = maxs.front().second;
|
||||
|
||||
if (popped == 0) {
|
||||
maxs.pop_front();
|
||||
} else {
|
||||
--maxs.front().second;
|
||||
}
|
||||
}
|
||||
|
||||
/* methods & fields omitted for brevity */
|
||||
};
|
||||
```
|
||||
|
||||
## further improvements
|
||||
|
||||
The final implementation utilized in the TRB includes the following features:
|
||||
|
||||
1. A ringbuffer using a statically-allocated `std::array`, as any fix-sized queue can be supplanted with one
|
||||
2. A templatized value type and comparator for flexibility
|
||||
3. C++ specific optimizations (rule of 5, smart pointers, and an STL-compliant API)
|
||||
351
src/content/algorithms/leetcode-daily.mdx
Normal file
|
|
@ -0,0 +1,351 @@
|
|||
---
|
||||
title: "leetcode daily"
|
||||
date: "11/9/2024"
|
||||
useKatex: true
|
||||
showToc: true
|
||||
---
|
||||
|
||||
# [count good numbers](https://leetcode.com/problems/count-good-numbers) <span class="date">13/12/2024</span>
|
||||
|
||||
## understanding the problem
|
||||
|
||||
This is a combinatoric problem at heart. You have some slots for evens and some for primes, with a limited number of choices for each. Leverage the multiplication rule, which states that if you have $n$ slots with $x$ choices, you get $x^n$ possible outcomes.
|
||||
|
||||
## doing it
|
||||
|
||||
So, what's the answer? If we know which slots we have and the number of choices for them, we're done. Since this is leetcode, they don't let you think—they just give you the answer. You have 2 types of slots (even and odd indices) with 5 (${0,2,4,6,8}$) and 4 (${2,3,5,7}$) choices respectively. Therefore, the answer is: $5^\text{\# even slots}\cdot 4^\text{\# odd slots}$. By counting or with small cases, we have $\lceil\frac{n}{2}\rceil$ even slots and $\lfloor\frac{n}{2}\rfloor$ odd slots. Let's submit it!
|
||||
|
||||
And.... TLE. Checking _everything_ when you submit your code—in this case, constraint $n\leq 10^{16}$ informs us of something suspect. In the worst case, $\frac{n}{2}\approx n^{14}$. This is far too many multiplications, so we can leverage binary exponentiation instead (and probably should've been the whole time!). Don't forget the mod.
|
||||
|
||||
```cpp
|
||||
class Solution {
|
||||
public:
|
||||
static constexpr long long MOD = 1e9 + 7;
|
||||
long long mpow(long long a, long long b, long long mod=MOD) {
|
||||
long long ans = 1;
|
||||
while (b > 0) {
|
||||
if (b & 1) {
|
||||
ans = (ans * a) % MOD;
|
||||
}
|
||||
a = (a * a) % MOD;
|
||||
b >>= 1;
|
||||
}
|
||||
return ans;
|
||||
}
|
||||
int countGoodNumbers(long long n) {
|
||||
long long even_slots = (n + 1) / 2, odd_slots = n / 2;
|
||||
return (mpow(5, even_slots) * mpow(4, odd_slots)) % MOD;
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
# [minimum number of operations to make array distinct](https://leetcode.com/problems/minimum-number-of-operations-to-make-elements-in-array-distinct) <span class="date">04/10/2024</span>
|
||||
|
||||
## understanding the problem
|
||||
|
||||
You can remove elements in groups of 3 _solely_ from the beginning of the array. Perform this operation until there are no more duplicates left, returning the number of times you had to perform the operation.
|
||||
|
||||
## solution: rephrase the question
|
||||
|
||||
Definitionally, you remove the _last_ duplicate. If such duplicate is at 0-indexed `i`, it belongs to the $\lceil \frac{i + 1}{3}\rceil$th chunk of 3 (i.e. operation). Find the last duplicate by leveraging a frequency map and iterating backwards through the input.
|
||||
|
||||
## asymptotic complexity
|
||||
|
||||
The solution is optimal, considering the least amount of elements possible in:
|
||||
|
||||
- <u>Time Complexity</u>: $O(n)$
|
||||
- <u>Space Complexity</u>: $\Theta(1)$
|
||||
|
||||
# [count the number of fair pairs](https://leetcode.com/problems/count-the-number-of-fair-pairs/) <span class="date">10/12/2024</span>
|
||||
|
||||
## problem statement
|
||||
|
||||
Given an array `nums` of integers and upper/lower integer bounds `upper`/`lower` respectively, return the number of unique valid index pairs such that:
|
||||
|
||||
$$i\neq j,lower\leq nums[i]+nums[j]\leq upper$$
|
||||
|
||||
## understanding the problem
|
||||
|
||||
This is another sleeper daily in which a bit of thinking in the beginning pays dividends. Intuitively, I think it makes sense to reduce the “dimensionality” of the problem. Choosing both `i` and `j` concurrently seems tricky, so let's assume we've found a valid `i`. What must be true? Well:
|
||||
|
||||
$$i\neq j,lower-nums[i]\leq nums[j]\leq upper-nums[i]$$
|
||||
|
||||
It doesn't seem like we've made much progress. If nums is a sequence of random integers, _there's truly no way to find all `j` satisfying this condition efficiently_.
|
||||
|
||||
The following question naturally arises: can we modify our input to find such `j` efficiently? Recall our goal: find the smallest/largest j to fit within our altered bounds—in other words, find the smallest $x$ less/greater than or equal to a number. If binary search bells aren't clanging in your head right now, I'm not sure what to say besides keep practicing.
|
||||
|
||||
So, it would be nice to sort `nums` to find such `j` relatively quickly. However: _are we actually allowed to do this?_ This is the core question I think everyone skips over. Maybe it is trivial but it is important to emphasize:
|
||||
|
||||
- _Yes, we are allowed to sort the input_. Re-frame the problem: what we are actually doing is choosing distinct `i`, `j` to satisfy some condition. The order of `nums` does not matter—rather, its contents do. Any input to this algorithm with `nums` with the same contents will yield the same result. If we were to modify `nums` instead of rearrange it, this would be invalid because we could be introducing/taking away valid index combinations.
|
||||
|
||||
Let's consider our solution a bit more before implementing it:
|
||||
|
||||
- Is the approach feasible? We're sorting `nums` then binary searching over it considering all `i`, which will take around $O(nlg(n))$ time. `len(nums)`$\leq10^5$, so this is fine.
|
||||
- How do we avoid double-counting? The logic so far makes no effort. If we consider making all pairs with indices _less than_ `i` for all `i` left-to-right, we'll be considering all valid pairs with no overlap. This is a common pattern—take a moment to justify it to yourself.
|
||||
- _Exactly_ how many elements do we count? Okay, we're considering some rightmost index `i` and we've found upper and lower index bounds `j` and `k` respectively. We can pair `nums[j]` with all elements up to an including `nums[k]` (besides `nums[j]`). There are exactly $k-j$ of these. If the indexing confuses you, draw it out and prove it to yourself.\* How do we get our final answer? Accumulate all `k-j` for all `i`.
|
||||
|
||||
## carrying out the plan
|
||||
|
||||
The following approach implements our logic quite elegantly and directly. The third and fourth arguments to the `bisect` calls specify `lo` (inclusive) and `hi` (exclusive) bounds for our search space, mirroring the criteria that we search across all indices $\lt i$.
|
||||
|
||||
```python
|
||||
def countFairPairs(self, nums, lower, upper):
|
||||
nums.sort()
|
||||
ans = 0
|
||||
|
||||
for i, num in enumerate(nums):
|
||||
k = bisect_left(nums, lower - num, 0, i)
|
||||
j = bisect_right(nums, upper - num, 0, i)
|
||||
|
||||
ans += k - j
|
||||
|
||||
return ans
|
||||
```
|
||||
|
||||
## optimizing the approach
|
||||
|
||||
If we interpret the criteria this way, the above approach is relatively efficient. To improve this approach, we'll need to reinterpret the constraints. Forget about the indexing and consider the constraint in aggregate. We want to find all $i,j$ with $x=nums[i]+nums[j]$ such that $i\neq j,lower\leq x\leq upper$.
|
||||
|
||||
We _still_ need to reduce the “dimensionality” of the problem—there are just too many moving parts to consider at once. This seems challening. Let's simplify the problem to identify helpful ideas: pretend `lower` does not exist (and, of course, that `nums` is sorted).
|
||||
|
||||
We're looking for all index pairs with sum $\leq upper$. And behold: (almost) two sum in the wild. This can be accomplished with a two-pointers approach—this post is getting quite long so we'll skip over why this is the case—but the main win here is that we can solve this simplified version of our problem in $O(n)$.
|
||||
|
||||
Are we any closer to actually solving the problem? Now, we have the count of index pairs $\leq upper$. Is this our answer? No—some may be too small, namely, with sum $\lt lower$. Let's exclude those by running our two-pointer approach with and upper bound of $lower-1$ (we want to include $lower$). Now, our count reflects the total number of index pairs with a sum in our interval bound.
|
||||
|
||||
Note that this really is just running a prefix sum/using the “inclusion-exclusion” principle/however you want to phrase it.
|
||||
|
||||
```python
|
||||
def countFairPairs(self, nums, lower, upper):
|
||||
nums.sort()
|
||||
ans = 0
|
||||
|
||||
def pairs_leq(x: int) -> int:
|
||||
pairs = 0
|
||||
l, r = 0, len(nums) - 1
|
||||
while l < r:
|
||||
if nums[l] + nums[r] <= x:
|
||||
pairs += r - l
|
||||
l += 1
|
||||
else:
|
||||
r -= 1
|
||||
return pairs
|
||||
|
||||
return pairs_leq(upper) - pairs_leq(lower - 1)
|
||||
```
|
||||
|
||||
## some more considerations
|
||||
|
||||
The second approach is _asymptotically_ equivalent. However, it's still worth considering for two reasons:
|
||||
|
||||
1. If an interviewer says “assume `nums` is sorted” or “how can we do better?”—you're cooked.
|
||||
2. (Much) more importantly, it's extremely valuable to be able to _reconceptualize_ a problem and look at it from different angles. Not being locked in on a solution shows perseverance, curiosity, and strong problem-solving abilities.
|
||||
|
||||
## asymptotic complexity
|
||||
|
||||
- <u>Time Complexity</u>: $O(nlg(n))$ for both—$O(n)$ if `nums` is sorted with
|
||||
respect to the second approach.
|
||||
- <u>Space Complexity</u>: $\Theta(1)$ for both.
|
||||
|
||||
# [most beautiful item for each query](https://leetcode.com/problems/most-beautiful-item-for-each-query/description/) <span class="date">09/12/2024</span>
|
||||
|
||||
## problem statement
|
||||
|
||||
Given an array `items` of $(price, beauty)$ tuples, answer each integer query of $queries$. The answer to some `query[i]` is the maximum beauty of an item with $price\leq$`items[i][0]`.
|
||||
|
||||
## understanding the problem
|
||||
|
||||
Focus on one aspect of the problem at a time. To answer a query, we need to have considered:
|
||||
|
||||
1. Items with a non-greater price
|
||||
2. The beauty of all such items
|
||||
|
||||
Given some query, how can we _efficiently_ identify the “last” item with an acceptable price? Leverage the most common pre-processing algorithm: sorting. Subsequently, we can binary search `items` (keyed by price, of course) to identify all considerable items in $O(lg(n))$.
|
||||
|
||||
Great. Now we need to find the item with the largest beauty. Naïvely considering all the element is a _correct_ approach—but is it correct? Considering our binary search $O(lg(n))$ and beauty search $O(n)$ across $\Theta(n)$ queries with `len(items)<=len(queries)`$\leq10^5$, an $O(n^2lg(n))$ approach is certainly unacceptable.
|
||||
|
||||
Consider alternative approaches to responding to our queries. It is clear that answering them in-order yields no benefit (i.e. we have to consider each item all over again, per query)—could we answer them in another order to save computations?
|
||||
|
||||
Visualizing our items from left-to-right, we're interested in both increasing beauty and prices. If we can scan our items left to right, we can certainly “accumulate” a running maximal beauty. We can leverage sorting once again to answer our queries from left to right, then re-order them appropriately before returning a final answer. Sorting both `queries` and `items` with a linear scan will take $O(nlg(n))$ time, meeting the constraints.
|
||||
|
||||
## carrying out the plan
|
||||
|
||||
A few specifics need to be understood before coding up the approach:
|
||||
|
||||
- Re-ordering the queries: couple `query[i]` with `i`, then sort. When responding to queries in sorted order, we know where to place them in an output container—index `i`.
|
||||
- The linear scan: accumulate a running maximal beauty, starting at index `0`. For some query `query`, we want to consider all items with price less than or equal to `query`. Therefore, loop until this condition is _violated_— the previous index will represent the last considered item.
|
||||
- Edge cases: it's perfectly possible the last considered item is invalid (consider a query cheaper than the cheapest item). Return `0` as specified by the problem constraints.
|
||||
|
||||
```cpp
|
||||
std::vector<int> maximumBeauty(std::vector<std::vector<int>>& items, std::vector<int>& queries) {
|
||||
std::sort(items.begin(), items.end());
|
||||
std::vector<std::pair<int, int>> sorted_queries;
|
||||
sorted_queries.reserve(queries.size());
|
||||
for (size_t i = 0; i < queries.size(); ++i) {
|
||||
sorted_queries.emplace_back(queries[i], i);
|
||||
}
|
||||
std::sort(sorted_queries.begin(), sorted_queries.end());
|
||||
|
||||
int beauty = items[0][1];
|
||||
size_t i = 0;
|
||||
std::vector<int> ans(queries.size());
|
||||
|
||||
for (const auto [query, index] : sorted_queries) {
|
||||
while (i < items.size() && items[i][0] <= query) {
|
||||
beauty = std::max(beauty, items[i][1]);
|
||||
++i;
|
||||
}
|
||||
ans[index] = i > 0 && items[i - 1][0] <= query ? beauty : 0;
|
||||
}
|
||||
|
||||
return ans;
|
||||
}
|
||||
```
|
||||
|
||||
## asymptotic complexity
|
||||
|
||||
Let `n=len(items)` and `m=len(queries)`. There may be more items than queries, or vice versa. Note that a “looser” upper bound can be found by analyzing the runtime in terms of $max\{n,m\}$.
|
||||
|
||||
- <u>Time Complexity</u>: $O(nlg(n)+mlg(m)+m)\in O(nlg(n)+mlg(m))$. An argument
|
||||
can be made that because `queries[i],items[i][{(0, 1)}]`$\leq10^9$, radix sort
|
||||
can be leveraged to achieve a time complexity of $O(d \cdot (n + k + m +
|
||||
k))\in O(9\cdot (n + m))\in O(n+m)$.
|
||||
- <u>Space Complexity</u>: $\Theta(1)$, considering that $O(m)$ space must be
|
||||
allocated. If `queries`/`items` cannot be modified in-place, increase the
|
||||
space complexity by $m$/$n$ respectively.
|
||||
|
||||
# [shortest subarray with or at least k ii](https://leetcode.com/problems/shortest-subarray-with-or-at-least-k-ii/description/) <span class="date">09/11/2024</span>
|
||||
|
||||
## problem statement
|
||||
|
||||
Given an array of non-negative integers $num$ and some $k$, find the length of the shortest non-empty subarray of nums such that its element-wise bitwise OR is greater than or equal to $k$—return -1 if no such array exists.
|
||||
|
||||
## developing an approach
|
||||
|
||||
Another convoluted, uninspired bitwise-oriented daily.
|
||||
|
||||
Anways, we're looking for a subarray that satisfies a condition. Considering all subarrays with `len(nums)`$\leq2\times10^5$ is impractical according to the common rule of $\approx10^8$ computations per second on modern CPUs.
|
||||
|
||||
Say we're building some array `xs`. Adding another element `x` to this sequence can only increase or element-wise bitwise OR. Of course, it makes sense to do this. However, consider `xs` after—it is certainly possible that including `x` finally got us to at least `k`. However, not all of the elements in the array are useful now; we should remove some.
|
||||
|
||||
Which do we remove? Certainly not any from the middle—we'd no longer be considering a subarray. We can only remove from the beginning.
|
||||
|
||||
Now, how many times do we remove? While the element-wise bitwise OR of `xs` is $\geq k$, we can naïvely remove from the start of `xs` to find the smallest subarray.Lastly, what's the state of `xs` after these removals? Now, we (may) have an answer and the element-wise bitwise OR of `xs` is guaranteed to be $\lt k$. Inductively, expand the array to search for a better answer.
|
||||
|
||||
This approach is generally called a variable-sized “sliding window”. Every element of `nums` is only added (considered in the element-wise bitwise OR) or removed (discard) one time, yielding an asymptotically linear time complexity. In other words, this is a realistic approach for our constraints.
|
||||
|
||||
## carrying out the plan
|
||||
|
||||
Plugging in our algorithm to my sliding window framework:
|
||||
|
||||
```python
|
||||
def minimumSubarrayLength(self, nums, k):
|
||||
# provide a sentinel for "no window found"
|
||||
ans = sys.maxsize
|
||||
window = deque()
|
||||
l = 0
|
||||
|
||||
# expand the window by default
|
||||
for r in range(len(nums)):
|
||||
# consider `nums[r]`
|
||||
window.append(nums[r])
|
||||
# shrink window while valid
|
||||
while l <= r and reduce(operator.or_, window) >= k:
|
||||
ans = min(ans, r - l + 1)
|
||||
window.popleft()
|
||||
l += 1
|
||||
|
||||
# normalize to -1 as requested
|
||||
return -1 if ans == sys.maxsize else ans
|
||||
```
|
||||
|
||||
Done, right? No. TLE.
|
||||
|
||||
If you thought this solution would work, you move too fast. Consider _every_ aspect of an algorithm before implementing it. In this case, we (I) overlooked one core question:
|
||||
|
||||
> How do we maintain our element-wise bitwise OR?
|
||||
|
||||
Calculating it by directly maintaining a window of length $n$ takes $n$ time—with a maximum window size of $n$, this solution is $O(n^2)$.
|
||||
|
||||
Let's try again. Adding an element is simple—OR it to some cumulative value. Removing an element, not so much. Considering some $x$ to remove, we only unset one of its bits from our aggregated OR if it's the “last” one of these bits set across all numbers contributing to our aggregated value.
|
||||
|
||||
Thus, to maintain our aggregate OR, we want to map bit “indices” to counts. A hashmap (dictionary) or static array will do just find. Adding/removing some $x$ will increment/decrement each the counter's bit count at its respective position. I like to be uselessly specific sometimes—choosing the latter approach, how big should our array be? As many bits as represented by the largest of $nums$—(or $k$ itself):
|
||||
|
||||
$$\lfloor \lg({max\{nums,k \})}\rfloor+1$$
|
||||
|
||||
Note that:
|
||||
|
||||
1. Below we use the [change of base formula for logarithms](https://artofproblemsolving.com/wiki/index.php/Change_of_base_formula) because $log_2(x)$ is not available in python.
|
||||
2. It's certainly possible that $max\{nums, k\}=0$. To avoid the invalid calculation $log(0)$, take the larger of $1$ and this calculation. The number of digits will then (correctly) be $1$ in this special case.
|
||||
|
||||
## asymptotic complexity
|
||||
|
||||
Note that the size of the frequency map is bounded by $lg_{2}({10^9})\approx30$.
|
||||
|
||||
- <u>Space Complexity</u>: Thus, the window uses $O(1)$ space.
|
||||
- <u>Time Complexity</u>: $\Theta($`len(nums)`$)$ —every element of `nums` is
|
||||
considered at least once and takes $O(1)$ work each to find the element-wise
|
||||
bitwise OR.
|
||||
|
||||
# [minimum array end](https://leetcode.com/problems/minimum-array-end/) <span class="date">09/10/2024</span>
|
||||
|
||||
## problem statement
|
||||
|
||||
Given some $x$ and $n$, construct a strictly increasing array (say `nums` ) of length $n$ such that `nums[0] & nums[1] ... & nums[n - 1] == x` , where `&` denotes the bitwise AND operator.
|
||||
|
||||
Finally, return the minimum possible value of `nums[n - 1]`.
|
||||
|
||||
## understanding the problem
|
||||
|
||||
The main difficulty in this problem lies in understanding what is being asked (intentionally or not, the phrasing is terrible). Some initial notes:
|
||||
|
||||
- The final array need not be constructed
|
||||
- If the element-wise bitwise AND of an array equals `x` if and only if each element has `x`'s bits set—and no other bit it set by all elements
|
||||
- It makes sense to set `nums[0] == x` to ensure `nums[n - 1]` is minimal
|
||||
|
||||
## developing an approach
|
||||
|
||||
An inductive approach is helpful. Consider the natural question: “If I had correctly generated `nums[:i]`”, how could I find `nums[i]`? In other words, _how can I find the next smallest number such that `nums` 's element-wise bitwise AND is still $x$?_
|
||||
|
||||
Hmm... this is tricky. Let's think of a similar problem to glean some insight: “Given some $x$, how can I find the next smallest number?”. The answer is, of course, add one (bear with me here).
|
||||
|
||||
We also know that all of `nums[i]` must have at least $x$'s bits set. Therefore, we need to alter the unset bits of `nums[i]`.
|
||||
|
||||
The key insight of this problem is combining these two ideas to answer our question: _Just “add one” to `nums[i - 1]`'s unset bits_. Repeat this to find `nums[n - 1]`.
|
||||
|
||||
One last piece is missing—how do we know the element-wise bitwise AND is _exactly_ $x$? Because `nums[i > 0]` only sets $x$'s unset bits, every number in `nums` will have at least $x$'s bits set. Further, no other bits will be set because $x$ has them unset.
|
||||
|
||||
## carrying out the plan
|
||||
|
||||
Let's flesh out the remaining parts of the algorithm:
|
||||
|
||||
- `len(nums) == n` and we initialize `nums[0] == x`. So, we need to “add one” `n - 1` times
|
||||
- How do we carry out the additions? We could iterate $n - 1$ times and simulate them. However, we already know how we want to alter the unset bits of `nums[0]` inductively— (add one) _and_ how many times we want to do this ($n - 1$). Because we're adding one $n-1$ times to $x$'s unset bits (right to left, of course), we simply set its unset bits to those of $n - 1$.The implementation is relatively straightfoward. Traverse $x$ from least-to-most significant bit, setting its $i$th unset bit to $n - 1$'s $i$th bit. Use a bitwise mask `mask` to traverse $x$.
|
||||
|
||||
```cpp
|
||||
long long minEnd(int n, long long x) {
|
||||
int bits_to_distribute = n - 1;
|
||||
long long mask = 1;
|
||||
|
||||
while (bits_to_distribute > 0) {
|
||||
if ((x & mask) == 0) {
|
||||
if ((bits_to_distribute & 1) == 1)
|
||||
x |= mask;
|
||||
bits_to_distribute >>= 1;
|
||||
}
|
||||
mask <<= 1;
|
||||
}
|
||||
|
||||
return x;
|
||||
}
|
||||
```
|
||||
|
||||
## asymptotic complexity
|
||||
|
||||
- <u>Space Complexity</u>: $\Theta(1)$—a constant amount of numeric variables
|
||||
are allocated regardless of $n$ and $x$.
|
||||
- <u>Time Complexity</u>: in the worst case, may need to traverse the entirety
|
||||
of $x$ to distribute every bit of $n - 1$ to $x$. This occurs if and only if
|
||||
$x$ is all ones ($\exists k\gt 0 : 2^k-1=x$)). $x$ and $n$ have $lg(x)$ and
|
||||
$lg(n)$ bits respectively, so the solution is $O(lg(x) + lg(n))\in
|
||||
O(log(xn))$. $1\leq x,n\leq 1e8$, so this runtime is bounded by
|
||||
$O(log(1e8^2))\in O(1)$.
|
||||
661
src/content/algorithms/models-of-production.mdx
Normal file
|
|
@ -0,0 +1,661 @@
|
|||
---
|
||||
title: "models of production"
|
||||
date: "22/06/2024"
|
||||
useKatex: true
|
||||
useD3: true
|
||||
scripts: ["/scripts/models-of-production.js"]
|
||||
showToc: true
|
||||
---
|
||||
|
||||
This post offers a basic introduction to the Solow, Romer, and Romer-Solow economic models, as taught by [Vladimir Smirnyagin](https://www.vladimirsmirnyagin.com/) and assisted by [Donghyun Suh](https://www.donghyunsuh.com/) in Intermediate Macroeconomics (ECON 3020) during the Spring semester of 2024 at the University of Virginia.
|
||||
|
||||
# solow
|
||||
|
||||
## introduction
|
||||
|
||||
The Solow Model is an economic model of production that incorporates the idea of capital accumulation. Based on the [Cobb-Douglas production function](https://en.wikipedia.org/wiki/Cobb%E2%80%93Douglas_production_function), the Solow Model describes production as follows:
|
||||
|
||||
$$
|
||||
Y_t=F(K_t,L_t)=\bar{A}K_t^\alpha L_t^{1-\alpha}
|
||||
$$
|
||||
|
||||
With:
|
||||
|
||||
- $\bar{A}$: total factor productivity (TFP)
|
||||
- $\alpha$: capital's share of output—usually $1/3$ based on [empirical data](https://arxiv.org/pdf/1105.2123)
|
||||
|
||||
In this simple model, the following statements describe the economy:
|
||||
|
||||
1. Output is either saved or consumed; in other words, savings equals investment
|
||||
2. Capital accumulates according to investment $I_t$ and depreciation $\bar{d}$, beginning with $K_0$ (often called the <u>Law of Capital Motion</u>)
|
||||
3. Labor $L_t$ is time-independent
|
||||
4. A savings rate $\bar{s}$ describes the invested portion of total output
|
||||
|
||||
Including the production function, these four ideas encapsulate the Solow Model:
|
||||
|
||||
1. $C_t + I_t = Y_t$
|
||||
2. $\Delta K_{t+1} = I_t - \bar{d} K_t$
|
||||
3. $L_t = \bar{L}$
|
||||
4. $I_t = \bar{s} Y_t$
|
||||
|
||||
## solving the model
|
||||
|
||||
Visualizing the model, namely output as a function of capital, provides helpful intuition before solving it.
|
||||
|
||||
Letting $(L_t,\alpha)=(\bar{L}, \frac{1}{3})$, it follows that:
|
||||
|
||||
$$
|
||||
Y_t=F(K_t,L_t)=\bar{A}K_t^{\frac{1}{3}} \bar{L}^{\frac{2}{3}}
|
||||
$$
|
||||
|
||||
Utilizing this simplification and its graphical representation below, output is clearly characterized by the cube root of capital:
|
||||
|
||||
<div class="graph">
|
||||
<div id="solow-visualization"></div>
|
||||
</div>
|
||||
<div class="sliders">
|
||||
<div style="padding-right: 20px">
|
||||
<ul>
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderSA">$\bar{A}:$</label>
|
||||
<span id="outputSA">1.00</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderSA"
|
||||
min="0.1"
|
||||
max="2"
|
||||
step="0.01"
|
||||
value="1"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderSd">$\bar{d}:$</label>
|
||||
<span id="outputSd">0.50</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderSd"
|
||||
min="0.01"
|
||||
max="0.99"
|
||||
step="0.01"
|
||||
value="0.50"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
<div style="padding-left: 20px">
|
||||
<ul start="3">
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderSs">$\bar{s}:$</label>
|
||||
<span id="outputSs">0.50</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderSs"
|
||||
min="0.01"
|
||||
max="0.99"
|
||||
step="0.01"
|
||||
value="0.50"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderSalpha">$\alpha:$</label>
|
||||
<span id="outputSalpha">0.33</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderSalpha"
|
||||
min="0.01"
|
||||
max="0.99"
|
||||
step="0.01"
|
||||
value="0.33"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
When investment is completely disincentivized by depreciation (in other words, $sY_t=\bar{d}K_t$), the economy equilibrates at a so-called "steady-state" with equilibrium $(K_t,Y_t)=(K_t^*,Y_t^*)$.
|
||||
|
||||
Using this equilibrium condition, it follows that:
|
||||
|
||||
$$
|
||||
\begin{align*}
|
||||
Y_t^* &= \bar{A}{K_t^*}^\alpha\bar{L}^{1-\alpha} \\
|
||||
\bar{d}K_t^* &= \bar{s}\bar{A}{K_t^*}^\alpha\bar{L}^{1-\alpha} \\
|
||||
K^* &= \bar{L}(\frac{\bar{s}\bar{A}}{\bar{d}})^\frac{1}{1-\alpha} \\
|
||||
Y^* &= \bar{A}^\frac{1}{1-\alpha}(\frac{\bar{s}}{\bar{d}})^\frac{\alpha}{1-\alpha}\bar{L}
|
||||
\end{align*}
|
||||
$$
|
||||
|
||||
Thus, the equilibrium intensive form (output per worker) of both capital and output are summarized as follows:
|
||||
|
||||
$$
|
||||
(k^*,y^*)=(\frac{K^*}{\bar{L}},\frac{Y^*}{\bar{L}}) =((\frac{\bar{s}\bar{A}}{\bar{d}})^\frac{1}{1-\alpha}, \bar{A}^\frac{1}{1-\alpha}(\frac{\bar{s}}{\bar{d}})^\frac{\alpha}{1-\alpha})
|
||||
$$
|
||||
|
||||
## analysis
|
||||
|
||||
Using both mathematical intuition and manipulating the visualization above, we find that:
|
||||
|
||||
- $\bar{A}$ has a positive relationship with steady-state output
|
||||
- Capital is influenced by workforce size, TFP, and savings rate
|
||||
- Capital output share's $\alpha$ impact on output is twofold:
|
||||
1. Directly through capital quantity
|
||||
2. Indirectly through TFP
|
||||
|
||||
- Large deviations in capital from steady-state $K^*$ induce net investments of larger magnitude, leading to an accelerated reversion to the steady-state
|
||||
- Economies stagnate at the steady-state $(K^*,Y^*)$—this model provides no avenues for long-run growth.
|
||||
|
||||
Lastly (and perhaps most importantly), exogenous parameters $\bar{s}, \bar{d}$, and $\bar{A}$ all have immense ramifications on economic status. For example, comparing the difference in country $C_1$'s output versus $C_2$'s using the Solow Model, we find that a difference in economic performance can only be explained by these factors:
|
||||
|
||||
$$
|
||||
\frac{Y_1}{Y_2}=\frac{\bar{A_1}}{\bar{A_2}}(\frac{\bar{s_1}}{\bar{s_2}})^\frac{\alpha}{1-\alpha}
|
||||
$$
|
||||
|
||||
We see that TFP is more important in explaining the differences in per-capital output ($\frac{1}{1-\alpha}>\frac{\alpha}{1-\alpha},\alpha\in[0,1)$). Notably, the Solow Model does not give any insights into how to alter the most important predictor of output, TFP.
|
||||
|
||||
# romer
|
||||
|
||||
## introduction
|
||||
|
||||
How, then, can we address these shortcomings?
|
||||
|
||||
The Romer Model provides an answer by both modeling ideas $A_t$ (analagous to TFP in the Solow model) endogenously and utilizing them to provide a justification for sustained long-run growth.
|
||||
|
||||
The Model divides the world into two parts:
|
||||
|
||||
- <u>Objects</u>: finite resources, like capital and labor in the Solow Model
|
||||
- <u>Ideas</u>: infinite,
|
||||
[non-rivalrous](https://en.wikipedia.org/wiki/Rivalry_(economics)) items
|
||||
leveraged in production (note that ideas may be excludable, though)
|
||||
|
||||
The Romer Models' production function can be modelled as:
|
||||
|
||||
$$
|
||||
Y_t=F(A_t,L_{yt})=A_tL_{yt}
|
||||
$$
|
||||
|
||||
With:
|
||||
|
||||
- $A_t$: the amount of ideas $A$ in period $t$
|
||||
- $L_{yt}$: the population working on production-facing (output-driving) tasks
|
||||
|
||||
Assuming $L_t=\bar{L}$ people work in the economy, a proportion $\bar{l}$ of the population focuses on making ideas: $L_{at}=\bar{l}\bar{L}\rightarrow L_{yt}=(1-\bar{l})\bar{L}$.
|
||||
|
||||
Further, this economy garners ideas with time at rate $\bar{z}$: the "speed of ideas". Now, we can describe the quantity of ideas tomorrow as function of those of today: <u>the Law of Ideal Motion</u> (I made that up).
|
||||
|
||||
$$
|
||||
A_{t+1}=A_t+\bar{z}A_tL_{at}\leftrightarrow\Delta A_{t+1}=\bar{z}A_tL_{at}
|
||||
$$
|
||||
|
||||
Analagously to capital in the solow model, ideas begin in the economy with some $\bar{A}_0\gt0$ and grow at an _exponential_ rate. At its core, this is because ideas are non-rivalrous; more ideas bring about more ideas.
|
||||
|
||||
Finally, we have a model:
|
||||
|
||||
1. $Y^*_t=A_tL_{yt}$
|
||||
2. $\Delta A_{t+1} = \bar{z}A_tL_{at}$
|
||||
3. $L_{yt}+L_{at}=\bar{L}$
|
||||
4. $L_{at}=\bar{l}\bar{L}$
|
||||
|
||||
A visualization of the Romer Model shows that the economy grows exponentially—production knows no bounds ([_ceteris paribus_](https://en.wikipedia.org/wiki/Ceteris_paribus), of course). A graph of $log_{10}(Y_t)$ can be seen below:
|
||||
|
||||
<div class="graph">
|
||||
<div id="romer-visualization"></div>
|
||||
</div>
|
||||
<div class="sliders">
|
||||
<div style="padding-right: 20px">
|
||||
<ul>
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderRz">$\bar{z}:$</label>
|
||||
<span id="outputRz">0.50</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderRz"
|
||||
min="0.1"
|
||||
max="0.99"
|
||||
step="0.01"
|
||||
value="0.50"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderRL">$\bar{L}:$</label>
|
||||
<span id="outputRL">505</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderRL"
|
||||
min="10"
|
||||
max="1000"
|
||||
step="19"
|
||||
value="505"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
<div style="padding-left: 20px">
|
||||
<ul start="3">
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderRl">$\bar{l}:$</label>
|
||||
<span id="outputRl">0.50</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderRl"
|
||||
min="0.01"
|
||||
max="0.99"
|
||||
step="0.01"
|
||||
value="0.50"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderRA0">$\bar{A}_0:$</label>
|
||||
<span id="outputRA0">500</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderRA0"
|
||||
min="0"
|
||||
max="1000"
|
||||
step="100"
|
||||
value="500"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
Playing with the sliders, this graph may seem underwhelming in comparison to the Solow Model. However, on a logarithmic scale, small changes in the parameters lead to massive changes in the growth rate of ideas and economices:
|
||||
|
||||
<div class="romer-table-container">
|
||||
<table id="romer-table">
|
||||
<thead>
|
||||
<tr id="romer-table-header"></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr id="row-A_t"></tr>
|
||||
<tr id="row-Y_t"></tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
## solving the model
|
||||
|
||||
To find the output in terms of exogenous parameters, first note that
|
||||
|
||||
$$
|
||||
L_t=\bar{L}\rightarrow L_{yt}=(1-\bar{l})\bar{L}
|
||||
$$
|
||||
|
||||
Now, all that remains is to find ideas $A_t$. It is assumed that ideas grow at some rate $g_A$:
|
||||
|
||||
$$
|
||||
A_t=A_0(1+g_A)^t
|
||||
$$
|
||||
|
||||
Using the growth rate formula, we find:
|
||||
|
||||
$$
|
||||
g_A=\frac{\Delta A_{t+1}-A_t}{A_t}=\frac{A_t+\bar{z}A_tL_{at}-A_t}{A_t}=\bar{z}L_{at}=\bar{z}\bar{l}\bar{L}
|
||||
$$
|
||||
|
||||
Thus, ideas $A_t=A_0(1+\bar{z}\bar{l}\bar{L})^t$. Finally, output can be solved the production function:
|
||||
|
||||
$$
|
||||
Y_t=A_t L_{yt}=A_0(1+\bar{z}\bar{l}\bar{L})^t(1-\bar{l})\bar{L}
|
||||
$$
|
||||
|
||||
## analysis
|
||||
|
||||
We see the Romer model exhibits long-run growth because ideas have non-diminishing returns due to their nonrival nature. In this model, capital and income eventually slow but ideas continue to yield increasing, unrestricted returns.
|
||||
|
||||
Further, all economy continually and perpetually grow along a constant "Balanced Growth Path" as previously defined by $Y_t$ as a function of the endogenous variables. This directly contrasts the Solow model, in which an economy converges to a steady-state via transition dynamics.
|
||||
|
||||
Changes in the growth rate of ideas, then, alter the growth rate of output itself—in this case, parameters $\bar{l}, \bar{z}$, and $\bar{L}$. This is best exemplified by comparing the growth rate before and and after a parameter changes. In the below example, a larger $\bar{l}$ initially drops output due to less workers being allocated to production. Soon after, though, output recovers along a "higher" Balanced Growth Path.
|
||||
|
||||
<div class="graph">
|
||||
<div id="romer-lchange-visualization"></div>
|
||||
</div>
|
||||
<div class="sliders">
|
||||
<div style="padding-right: 20px">
|
||||
<ul>
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderlChange">$\bar{l}_1:$</label>
|
||||
<span id="outputlChange">0.50</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderlChange"
|
||||
min="0.1"
|
||||
max="0.99"
|
||||
step="0.01"
|
||||
value="0.50"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
<div style="padding-left: 20px">
|
||||
<ul start="3">
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="slidert0">$t_0:$</label>
|
||||
<span id="outputt0">50</span>
|
||||
<input
|
||||
type="range"
|
||||
id="slidert0"
|
||||
min="1"
|
||||
max="99"
|
||||
step="1"
|
||||
value="50"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
Notably, while both the Romer and Solow Models help to analyze growth across countries, they both are unable to resolve one question: why can and do investment rates and TFP differ across contries? This is a more fundamental economic question involving culture, institutions, and social dynamics—one day I hope we'll have an answer.
|
||||
|
||||
# romer-solow
|
||||
|
||||
## introduction
|
||||
|
||||
While the Romer Model provides an avenue for long-run economic growth, it is anything but realistic—surely economies due not grow at an ever-increasing blistering rate into perpetuity. A model in which:
|
||||
|
||||
- Economies grow _faster_ the further _below_ they are from their balanced growth path
|
||||
- Economies grow _slower_ the further _above_ they are from their balanced growth path
|
||||
|
||||
would certainly be more pragmatic. The Solow Model's capital dynamics do, in some sense, mirror part of this behavior with respect to the steady-state (output converges quicker/slower to the steady state the further/closer it is from equilibrium).
|
||||
|
||||
Combining the dynamics of the Romer Model's ideas and the Solow Model's capital stock could yield the desired result. Intuitively, incorporating capital into output via the Solow Model's production function, as well as including the Law of Capital Motion seems like one way to legitimately create this so-called "Romer-Solow" model:
|
||||
|
||||
<div style="display: flex; justify-content: center">
|
||||
<div style="padding-right: 50px">
|
||||
<ol>
|
||||
<li>$Y_t=A_t K_t^\alpha L_{yt}^{1-\alpha}$</li>
|
||||
<li>$\Delta K_{t+1}=\bar{s}Y_t-\bar{d}K_t$</li>
|
||||
<li>$\Delta A_{t+1} = \bar{z}A_tL_{at}$</li>
|
||||
</ol>
|
||||
</div>
|
||||
<div style="padding-left: 50px">
|
||||
<ol start="4">
|
||||
<li>$L_{yt}+L_{at}=\bar{L}$</li>
|
||||
<li>$L_{at}=\bar{l}\bar{L}$</li>
|
||||
</ol>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
## solving the model
|
||||
|
||||
Based on the the motivations for creating this model, it is more useful to first analyze the growth rates of equilibrium long run output $Y_t^*$.
|
||||
|
||||
According to the production function, $g_Y=g_A+\alpha g_K+(1-\alpha)g_{L_y}$
|
||||
|
||||
From previous analysis it was found that $g_A=\bar{z}\bar{l}\bar{L}$.
|
||||
|
||||
Based on the Law of Capital Motion,
|
||||
|
||||
$$
|
||||
g_K=\frac{\Delta K_{t+1}}{K_t}=\bar{s}\frac{Y_t}{K_t}-\bar{d}
|
||||
$$
|
||||
|
||||
Because growth rates are constant on the Balanced Growth Path, $g_K$ must be constant as well. Thus, so is $\bar{s}\frac{Y_t}{K_t}-\bar{d}$; it must be that $g_K^*=g_Y^*$.
|
||||
|
||||
The model assumes population is constant, so $g_{\bar{L}}=0\rightarrow g_{\bar{L}_yt}=0$ as well.
|
||||
|
||||
Combining these terms, we find:
|
||||
|
||||
$$
|
||||
\begin{align*}
|
||||
g_Y^* &= \bar{z}\bar{l}\bar{L}+\alpha g_Y^*+(1-\alpha)\cdot 0 \\
|
||||
g_Y^* &= \frac{\bar{z}\bar{l}\bar{L}}{1-\alpha}
|
||||
\end{align*}
|
||||
$$
|
||||
|
||||
Solving for $Y_t^*$ is trivial after discovering $g_K=g_Y$ must hold on a balanced growth path.
|
||||
|
||||
Invoking the <u>Law of Capital Motion</u> with magic chants,
|
||||
|
||||
$$
|
||||
g_K^*=\bar{s}\frac{Y_t^*}{K_t^*}-\bar{d}=g_Y^*\rightarrow K_t^*=\frac{\bar{s}Y_t^*}{g_Y^*+\bar{d}}
|
||||
$$
|
||||
|
||||
Isolating $Y_t^*$,
|
||||
|
||||
$$
|
||||
\begin{align*}
|
||||
Y_t^* &= A_t^* (\frac{\bar{s}Y_t^*}{g_Y^*+\bar{d}})^\alpha ({(1-\bar{l})\bar{L}})^{1-\alpha} \\
|
||||
{Y_t^*}^{1-\alpha} &= A_t^*(\frac{\bar{s}}{g_Y^*+\bar{d}})^\alpha({(1-\bar{l})\bar{L}})^{1-\alpha}
|
||||
\end{align*}
|
||||
$$
|
||||
|
||||
Plugging in the known expressions for $A_t^*$ and $g_Y^*$, a final expression for the Balanced Growth Path output as a function of the endogenous parameters and time is obtained:
|
||||
|
||||
$$
|
||||
Y_t^*={(A_0(1+\bar{z}\bar{l}\bar{L})^t})^\frac{1}{1-\alpha}(\frac{\bar{s}}{\frac{\bar{z}\bar{l}\bar{L}}{1-\alpha}+\bar{d}})^\frac{\alpha}{1-\alpha}(1-\bar{l})\bar{L}
|
||||
$$
|
||||
|
||||
## analysis
|
||||
|
||||
First looking at the growth rate of output, $g*Y^*=\frac{\bar{z}\bar{l}\bar{L}}{1-\alpha}$, idea-driving factors and an increased allocation of labor to output increase the equilibrium Balanced Growth Path—the \_level\* of long-run growth. Thus, this model captures the influences of both capital and ideas on economic growth.
|
||||
|
||||
Looking at $Y_t^*$, ideas have both a direct and indirect effect on output. Firstly, ideas raise output because they increase productivity (directly); second, with the introduction of capital stock, ideas also increase capital, in turn increasing output further (indirectly). Mathematically, this is evident in both instances of $g_A^*$ in the formula for output $Y_t^*$—note that $\frac{1}{1-\alpha},\frac{\alpha}{1-\alpha}>0$ for any $\alpha\in(0,1)$, so $\frac{d}{dg_A^*}Y_t^*>0$.
|
||||
|
||||
Expectedly, output has a positive relationship with the savings rate and a negative relationship with the depreciation rate.
|
||||
|
||||
Using the visualization below, we see a growth pattern similar to that of the Romer Model. However, the Romer-Solow economy indeed grows at a faster rate than the Romer model—I had to cap $\bar{L}$ at $400$ and $\alpha$ at $0.4$ because output would be _too large_ for JavaScript to contain in a number (the graph would disappear).
|
||||
|
||||
<div class="graph">
|
||||
<div id="romer-solow-visualization"></div>
|
||||
</div>
|
||||
<div class="sliders">
|
||||
<div style="padding-right: 20px">
|
||||
<ul>
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderRSz">$\bar{z}:$</label>
|
||||
<span id="outputRSz">0.50</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderRSz"
|
||||
min="0.1"
|
||||
max="0.99"
|
||||
step="0.01"
|
||||
value="0.50"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderRSA0">$A_0:$</label>
|
||||
<span id="outputRSA0">500</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderRSA0"
|
||||
min="0"
|
||||
max="1000"
|
||||
step="10"
|
||||
value="500"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderRSd">$\bar{d}:$</label>
|
||||
<span id="outputRSd">0.50</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderRSd"
|
||||
min="0.01"
|
||||
max="0.99"
|
||||
step="0.01"
|
||||
value="0.50"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderRSs">$\bar{s}:$</label>
|
||||
<span id="outputRSs">0.50</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderRSs"
|
||||
min="0.01"
|
||||
max="0.99"
|
||||
step="0.01"
|
||||
value="0.50"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
<div style="padding-left: 20px">
|
||||
<ul start="3">
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderRSalpha">$\alpha:$</label>
|
||||
<span id="outputRSalpha">0.33</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderRSalpha"
|
||||
min="0.01"
|
||||
max="0.40"
|
||||
step="0.01"
|
||||
value="0.33"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderRSl">$\bar{l}:$</label>
|
||||
<span id="outputRSl">0.50</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderRSl"
|
||||
min="0.01"
|
||||
max="0.99"
|
||||
step="0.01"
|
||||
value="0.50"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderRSL">$\bar{L}:$</label>
|
||||
<span id="outputRSL">200</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderRSL"
|
||||
min="0"
|
||||
max="400"
|
||||
step="10"
|
||||
value="200"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
Playing with the parameters, the previous mathematical findings are validated. For example, because $g_Y^*=\frac{\bar{z}\bar{l}\bar{L}}{1-\alpha}$, only changes in parameters $\alpha,\bar{z},\bar{l}$, and $\bar{L}$ affect the growth rate of output, manifesting as the y-axis scaling up/down on a ratio scale.
|
||||
|
||||
However, do economics grow _faster_/_slower_ the further _below_/_above_ they are from their Balanced Growth Path, as initially desired? While this can be mathematically proven (of course), sometimes a visualization helps.
|
||||
|
||||
The graph below illustrates the transition dynamics of Romer-Solow Model. Namely, $(\bar{z}, \bar{l}, \bar{L}, \alpha)=(0.5, 0.5, 100, 0.33)\forall t<t_0$, then update to the slider values when $t>t_0$.
|
||||
|
||||
<div class="graph">
|
||||
<div id="romer-solow-change-visualization"></div>
|
||||
</div>
|
||||
<div class="sliders">
|
||||
<div style="padding-right: 20px">
|
||||
<ul>
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderRSCz0">$\bar{z}_0:$</label>
|
||||
<span id="outputRSCz0">0.50</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderRSCz0"
|
||||
min="0.1"
|
||||
max="0.99"
|
||||
step="0.01"
|
||||
value="0.50"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderRSCalpha0">$\alpha_0:$</label>
|
||||
<span id="outputRSCalpha0">0.33</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderRSCalpha0"
|
||||
min="0.01"
|
||||
max="0.54"
|
||||
step="0.01"
|
||||
value="0.33"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderRSCL0">$\bar{L}_0:$</label>
|
||||
<span id="outputRSCL0">100</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderRSCL0"
|
||||
min="0"
|
||||
max="200"
|
||||
step="10"
|
||||
value="100"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
<div style="padding-left: 20px">
|
||||
<ul start="3">
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderRSCl0">$\bar{l}_0:$</label>
|
||||
<span id="outputRSCl0">0.50</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderRSCl0"
|
||||
min="0.01"
|
||||
max="0.99"
|
||||
step="0.01"
|
||||
value="0.50"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
<div class="slider">
|
||||
<label for="sliderRSCt0">$t_0:$</label>
|
||||
<span id="outputRSCt0">50</span>
|
||||
<input
|
||||
type="range"
|
||||
id="sliderRSCt0"
|
||||
min="0"
|
||||
max="100"
|
||||
step="1"
|
||||
value="50"
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
Finally, it is clear that economies converge to their Balanced Growth Path as desired—something slightly more convoluted to prove from the complex expression for $Y^*$ derived earlier. For example, with an increase in $\alpha_0$, output grows at an increasing rate after the change, then increases at a decreasing rate as it converges to the new higher Balanced Growth Path. Increasing parameters $\bar{z},\bar{l},\bar{L}$ yield similar results, although the changes are visually less obvious.
|
||||
351
src/content/algorithms/proofs.mdx
Normal file
|
|
@ -0,0 +1,351 @@
|
|||
---
|
||||
title: "proofs"
|
||||
date: "13/06/2025"
|
||||
useKatex: true
|
||||
useD3: true
|
||||
---
|
||||
|
||||
import Pseudocode from "@components/Pseudocode.astro";
|
||||
|
||||
A computer science student attempting to learn proofs.
|
||||
|
||||
# 1032
|
||||
|
||||
## E
|
||||
|
||||
Minimize-Digit-Diff($l, r$):
|
||||
|
||||
1. Initialize $ans=18$, the largest possible value of $f(l, x) + f(x, r)$ with $l \leq r\leq 10^9$
|
||||
2. For $i:=1$ to $200$:
|
||||
|
||||
- Let $x$ be a random sample from the open interval $[l, r]$
|
||||
- Let $cost:=f(l, x) + f(x, r)$, computed in $O(1)$ time
|
||||
- $ans:=min(ans,cost)$
|
||||
|
||||
3. Return $ans$
|
||||
|
||||
---
|
||||
|
||||
_Proof._
|
||||
|
||||
We are interested in minimizing the expected probability of failure over $t\leq10^4$ tests. Firstly, consider a single test (i.e. a single $(l,r)$ input):
|
||||
|
||||
An $n$ digit candidate $x$ has a $\frac{1}{10}\cdot 2=\frac{1}{5}$ possibility of colliding with the corresponding digit from $l$ or $r$[^1].
|
||||
Thus, a digit has approximately a $1-\frac{1}{5}=\frac{4}{5}$ probability of minimizing the score for a digit.
|
||||
It follows that the candidate then has roughly a $(\frac{4}{5})^9\approx13\%$ chance of minimizing the entire score $f(l,x)+f(x,r)$.
|
||||
So, one random sample $x$ has a $100-13\approx87\%$ chance of being a suboptimal candidate.
|
||||
|
||||
Consider sampling $n$ times--the probability of all random samples being suboptimal is $0.87^n$. Let's only settle for a probability of failure near $1/10^{10}$ and solve for our number of trials:
|
||||
|
||||
$$
|
||||
0.87^n=1/10^{10}\rightarrow n\ln(0.87)=\ln(1/10^{10})\rightarrow n=\frac{\ln(1/10^{10})}{\ln(0.87)}\rightarrow n\approx165
|
||||
$$
|
||||
|
||||
The probability all tests succeed is $(1-1/10^{10})^{10^4}$, so the probability that at least one test fails is $1-((1-1/10^{10})^{10^4})\approx 9.99\cdot10^{-7}$.
|
||||
|
||||
$\blacksquare$
|
||||
|
||||
---
|
||||
|
||||
## G
|
||||
|
||||
Maximize-F(s):
|
||||
|
||||
1. Compute the prefix sum of zeroes $Z$ and ones $O$ in $s$
|
||||
2. Concatenate and sort $Z$ and $O$ into array $S$
|
||||
3. Let $first:=\sum_{1\leq i\leq n}i\cdot(n-i+1)$, computed in $O(|s|)$ time
|
||||
4. Let $second:=\sum_{1\leq i\leq n}S_i\cdot (2\cdot i-n)$, computed in $O(|s|)$ time
|
||||
5. Return $\frac{first+second}{2}$
|
||||
|
||||
---
|
||||
|
||||
_Proof._
|
||||
|
||||
Let $X$ and $Y$ be the number of occurrences of zeroes and ones in the substring $p$ of $s$. Then, $f(p)=max(X, Y)=\frac{X+Y+|X-Y|}{2}$. The goal is to compute:
|
||||
|
||||
$$
|
||||
\begin{align*}
|
||||
\sum_{1\leq r\leq n}\sum_{1\leq l<r} f(s[l:r+1]) &= \sum_{1\leq r\leq n}\sum_{1\leq l<r} \frac{X+Y+|X-Y|}{2} \\
|
||||
&= \frac{1}{2}(\sum_{1\leq r\leq n}\sum_{1\leq l<r} X+Y+\sum_{1\leq r\leq n}\sum_{1\leq l<r} |X-Y|)
|
||||
\end{align*}
|
||||
$$
|
||||
|
||||
Consider the first term, which counts the number of occurrences of ones and zeroes across all one-indexed bounds $l,r$.
|
||||
Rather than enumerate all $n^2$ subarrays, consider how much each index $1\leq i\leq n$ contributes to the sum.
|
||||
Exactly $i$ substrings start at or before $i$, including the character, each doing so $n-i+1$ times.
|
||||
Thus, the character contributes $i\cdot(n-i+1)$ to the term.
|
||||
So, the first term aggregates this quantity across all indices and can be computed in linear time: $\sum_{1\leq i\leq n}i\cdot(n-i+1)$.
|
||||
|
||||
For the second term, consider the sum more abstractly--it computes $|X-Y|$, the absolute difference of the number of zeroes and ones in the substring across all index pairs.
|
||||
Because $|a-b|=|b-a|$, we are free to rearrange $X$ and $Y$ as we see fit.
|
||||
Sort all $X$ and $Y$ values into some array $S$ and compute the difference across all pairs.
|
||||
As $S_i<S_{j>i}$, $|S_{j>i}-S_i|=S_{j>i}-S_i$--the sum is unchanged. Rewriting the second term:
|
||||
|
||||
$$
|
||||
\begin{align*}
|
||||
\sum_{1\leq r\leq n}\sum_{1\leq l<r} |X-Y| &= \sum_{1\leq r\leq n}\sum_{1\leq l<r}S_r-\sum_{1\leq r\leq n}\sum_{1\leq l<r}S_l \\
|
||||
&= \sum_{1\leq r\leq n}r\cdot S_r-\sum_{1\leq l<r\leq n} S_l \\
|
||||
&= \sum_{1\leq r\leq n}r\cdot S_r-\sum_{1\leq r\leq n} (n-r)\cdot S_r
|
||||
\end{align*}
|
||||
$$
|
||||
|
||||
which can be computed in linear time.
|
||||
|
||||
$\blacksquare$
|
||||
|
||||
# 993
|
||||
|
||||
## A
|
||||
|
||||
Count-Pairs($n$):
|
||||
|
||||
1. Return $n-1$
|
||||
|
||||
---
|
||||
|
||||
_Proof._
|
||||
|
||||
Suppose $(a,b)\in\mathbb{N}^2$. Because $a=n-b$ and $a\geq1$, it follows that $1\leq b\leq n-1$. Each choice of $b$ yields a unique $a=n-b$, so there are $n-1$ unique solutions.
|
||||
|
||||
$\blacksquare$
|
||||
|
||||
## B
|
||||
|
||||
Mirror-String($s$):
|
||||
|
||||
1. Reverse $s$
|
||||
2. For each character $c$ in $s$:
|
||||
|
||||
- If $c$ is "w": Print($c$)
|
||||
- If $c$ is "p": Print($q$)
|
||||
- If $c$ is "q": Print($p$)
|
||||
|
||||
---
|
||||
|
||||
_Proof._
|
||||
|
||||
The string appears fipped on the y-axis from within the score due to the perspective
|
||||
shifting. Structurally, it is read right-to-left. "p"/"q"/"w" appear as "q"/"p"/"w" when flipped on its y-axis.
|
||||
|
||||
$\blacksquare$
|
||||
|
||||
## C
|
||||
|
||||
Seat-Monkeys($a$, $b$, $c$, $m$):
|
||||
|
||||
1. Return $min(m, a)+min(m, b)+min(c, 2\cdot m-(min(m, a) + min(m, b)))$
|
||||
|
||||
---
|
||||
|
||||
_Proof._
|
||||
|
||||
Consider an assignment of monkeys $S$ that sits the $a$ and $b$ monkeys in the first and second row and then fills the remaining seats with the $c$ monkeys.
|
||||
|
||||
Assume there exists a more optimal assignment of monkeys $S^{'}$. WLOG, assume $S^{'}$ sits $a$ and $b$ monkeys first in their respective rows.
|
||||
|
||||
$S^{'}$ can only differ from $S$ as follows:
|
||||
|
||||
1. Seats a $c$ monkey in row 1 instead of an $a$ monkey
|
||||
|
||||
- $S^{'}$ leaves an $a$ monkey unseated. $S$ seats this monkey instead--the same number of monkeys are seated in $S$.
|
||||
|
||||
2. Seats a $c$ monkey in row 2 instead of a $b$ monkey
|
||||
|
||||
- $S^{'}$ leaves a $b$ monkey unseated. $S$ seats this monkey instead--the same number of monkeys are seated in $S$.
|
||||
|
||||
3. Does not seat a monkey where $S$ has
|
||||
|
||||
- $S$ seats more than $S^{'}$.
|
||||
|
||||
In all cases, $S^{'}$ is no better than S, therefore $S$ is optimal.
|
||||
|
||||
$\blacksquare$
|
||||
|
||||
## D
|
||||
|
||||
Construct-B($a$):
|
||||
|
||||
1. Let $b$ be an array of size $n=\#a$ and $X$ be the set of numbers in $a$.
|
||||
2. For each element $x$ of $a$ at index $i$:
|
||||
|
||||
- If $x\in X$:
|
||||
- $b[i]:=x$
|
||||
- $X:=X \backslash \{x\}$
|
||||
|
||||
3. Let $Y=\{1,2,\cdots,n\}\backslash X$
|
||||
4. For each element $x$ of $b$ at index $i$:
|
||||
|
||||
- If $b[i]$ is NIL:
|
||||
- $b[i]:=\text{first-element}(Y)$
|
||||
- $Y:=Y\backslash\{\text{first-element}{(Y)}\}$
|
||||
|
||||
5. Return $b$
|
||||
|
||||
---
|
||||
|
||||
_Proof._
|
||||
|
||||
Consider the array $b$ from Construct-B.
|
||||
|
||||
If there are $x$ unique elements in $a$, the algorithm assigns each of those to the first $x$ positions in $b$. There are $n-x$ duplicate elements in $a$ and $n-x$ remaining positions in $b$.
|
||||
|
||||
Since $\forall a\in A:1\leq a\leq n$, there are $n-x$ unused unique elements in $\{1,2,\cdots,n\}$, each of which is assigned a unique position in $b$.
|
||||
|
||||
Therefore, all elements of $b$ are unique and thus a mode. As every unique element in $a$ is assigned to an index no later in $b$, all $a_i$ is a mode in $b[i+1:]$.
|
||||
|
||||
$\blacksquare$
|
||||
|
||||
## E
|
||||
|
||||
Count-Pairs($l_1$, $l_2$, $r_1$, $r_2$, $k$):
|
||||
|
||||
1. Let $A:=\lfloor log_k(r_2/l_1)\rfloor$
|
||||
2. Let $B:=\lfloor max(0, log_k(l_2/r_1))\rfloor$
|
||||
3. Let $\text{total}:=0$
|
||||
4. For each $A\leq i\leq B$:
|
||||
|
||||
- Let $r=\lfloor r_2/ k^n\rfloor$
|
||||
- Let $l=\lfloor l_2/k^n\rfloor$
|
||||
- $\text{total} := \text{total} + max(0, r - l + 1)$
|
||||
|
||||
5. Return $\text{total}$
|
||||
|
||||
---
|
||||
|
||||
_Proof._
|
||||
|
||||
Each value of $n$ corresponds to a line with slope $k^n$ because $y/x=k^n\leftrightarrow y=x\cdot k^n$. The problem can be visualized as follows:
|
||||
|
||||

|
||||
|
||||
It is sufficient to count the number of ordered $(x,y)$ pairs for all valid $n$. Because $y=x\cdot k^n\leftrightarrow n=log_k(y/x)$, $n\in [log_k(l_2/r_1), log_k(r_2/l_1)]$.
|
||||
|
||||
For each $n_0$ in this range, the smallest $x$ satisfying $y=x\cdot k^n_0$ is $\lceil l_2/k^n_0\rceil$ and the largest $\lfloor r_2/k^n_0\rfloor$, so $n_0$ contributes $max(0, \lfloor r_2/k^n_0\rfloor - \lceil l_2/k^n_0\rceil + 1)$ ordered pairs.
|
||||
|
||||
## F
|
||||
|
||||
1. Let $A=\sum a$ and $B=\sum b$.
|
||||
2. For each query with requested beauty $q$:
|
||||
|
||||
- If $\exists (i,j)\in(\{1,2,\cdots,n\},\{1,2,\cdots,m\}):(A-a[i])\cdot(B-b[j])=x$, print "YES"
|
||||
- Otherwise, print "NO"
|
||||
|
||||
---
|
||||
|
||||
_Proof._
|
||||
|
||||
The beauty of the grid equals $B=\sum_i \sum_j M_{i,j}=\sum_i\sum_j a_i\cdot b_j=\sum_i(a_i\cdot \sum_j b_j)=(\sum_i a_i)\cdot (\sum_j b_j)$.
|
||||
|
||||
Formulating setting row $i$ and column $j$ to zero, the new beauty is:
|
||||
|
||||
$$
|
||||
\begin{align*}
|
||||
q&=B-(b_j\cdot(\sum a)+a_i\cdot(\sum b)-a_i\cdot b_j) \\
|
||||
&=((\sum a)-a_i)\cdot((\sum b)-b_j)
|
||||
\end{align*}
|
||||
$$
|
||||
|
||||
If such $a_i$ and $b_j$ exist, the operation can be performed.
|
||||
|
||||
## G1
|
||||
|
||||
1. Let $G$ be the input graph
|
||||
2. Let $\text{ans}=0$
|
||||
3. For each component $C$ in $G$:
|
||||
|
||||
- Let $\text{cycle}$ be the set of nodes in the cycle of $C$
|
||||
- Let $d=max_{u\in \text{cycle}}\text{Distance-To-Cycle(C, u)}$
|
||||
- $\text{ans}:=max(\text{ans}, d)$
|
||||
|
||||
4. Return $\text{ans}$
|
||||
|
||||
---
|
||||
|
||||
_Proof._
|
||||
|
||||
Consider some $v_i\in V$. There must exist an edge $e=(v_i,v_j),i\neq j$. Following this path from $v_i$, the each edge must map to a previously seen node, forming a cycle, or a new node. Because the graph is finite, the path starting any $v_i$ must contain a cycle. Thus, G is a graph of components with one cycle each.
|
||||
|
||||
Next, consider some component $C\in G$. For every spider $v_i$ in $C$:
|
||||
|
||||
- If $v_i$ is in the cycle:
|
||||
- The cycle itself will always be stable. Every spider has a plushie and each spider will give and receive a plushie.
|
||||
- Otherwise, $v_i$ is not in the cycle.
|
||||
- Let $v_j$ be the furthest spider on a path with a plushie containing $v_i$ to the cycle. When $v_j$ gives a plushie to its child, the graph is unstable because $v_j$ transitions from state $0$ to $1$. However, the path containing $v_j$ and its ancestors become stable the next year because they never receive or give a plushie again. The path takes $d_j$ years to become stable, the distance from $v_j$ to the path.
|
||||
|
||||
Therefore, $C$ becomes stable in $d_C:=max_{v_j\in C}(d_j)$ time.
|
||||
|
||||
The entire graph becomes stable when each component becomes stable, which is the longest time any component takes to become stable. Thus, the graph becomes stable in $max_{C\in G}d_C$ years.
|
||||
|
||||
$\blacksquare$
|
||||
|
||||
## G2
|
||||
|
||||
1. Let $G$ be the input graph
|
||||
2. Let $\text{ans}=0$
|
||||
3. For each component $C$ in $G$:
|
||||
|
||||
- Let $U$ be the set of all nodes not in the cycle of $C$
|
||||
- Let $\text{count}$ be the number of plushies each spider has
|
||||
- Let $\text{par}$ be the set of parents for each spider
|
||||
- Run a multi-source BFS simulating the state transitions on $U$. For each iteration at year $y$:
|
||||
- $\text{count[u]}:=\text{count[u]}+\sum_{p\in \text{par[u]}}\text{count[p]}$
|
||||
- $\text{ans}:=max(\text{ans}, \text{count[u]})$
|
||||
- $y:=y+1$
|
||||
|
||||
4. Return $\text{ans}$
|
||||
|
||||
---
|
||||
|
||||
_Proof._
|
||||
|
||||
Consider some $v_i\in V$. There must exist an edge $e=(v_i,v_j),i\neq j$. Following this path from $v_i$, the each edge must map to a previously seen node, forming a cycle, or a new node. Because the graph is finite, the path starting any $v_i$ must contain a cycle. Thus, G is a graph of components with one cycle each.
|
||||
|
||||
Next, consider some component $C\in G$. For every spider $v_i$ in $C$:
|
||||
|
||||
- If $v_i$ is in the cycle:
|
||||
- The cycle itself will always be stable. If a spider has $x$ plushies in year $y$, it will give and receive one plushie and have $x$ in the next year as well.
|
||||
- Otherwise, $v_i$ is not in the cycle.
|
||||
- Let $v_j$ be the furthest spider on a path containing $v_i$ to the cycle. When $v_j$ gives a plushie to its child, the graph is unstable because $v_j$ transitions from state $0$ to $1$. However, the path containing $v_j$ and its ancestors become stable the next year because they never receive or give a plushie again.
|
||||
- However, the child may have more than one plushie next year. If the child had $x_0$ plushies the on year $y$ received $x_1$ plushies the next, it must ultimately give $x_0+x_1$ plushies taking $x_0+x_1y$. The path becomes stable in the maximum time it takes any spider on the path to give its plushies, $d_i$. The algorithm gathers the $x_1$ term by considering all parents of the child and propagates the plushie counts by simulation.
|
||||
|
||||
Therefore, $C$ becomes stable in $d_C:=max_{v_i\in C}(d_i)$ time.
|
||||
|
||||
The entire graph becomes stable when each component becomes stable, which is the longest time any component takes to become stable. Thus, the graph becomes stable in $max_{C\in G}d_C$ years.
|
||||
|
||||
$\blacksquare$
|
||||
|
||||
## H
|
||||
|
||||
- Let $\text{Prefix}$ be the 2D-prefix sum matrix of $A$
|
||||
- Let $\text{Colwise}$ be the column-wise 2D-prefix coefficient sum matrix of $A$
|
||||
- Let $\text{Rowwise}$ be the row-wise 2D-prefix coefficient sum matrix of $A$
|
||||
- Let $\text{Submatrix-Sum}(x_1,y_1,x_2,y_2)$ compute the sum of the submatrix of $M$ bounded by $(x_1,y_1)$ and $(x_2,y_2)$
|
||||
|
||||
1. For each query $x_1,x_2,y_1,y_2$:
|
||||
|
||||
- Let $w=y_2-y_1+1$
|
||||
- Let $prefix=\text{Submatrix-Sum}(\text{Prefix},x_1,y_1,x_2,y_2)$
|
||||
- Let $rowsum=\text{Submatrix-Sum}(\text{Rowwise},x_1,y_1,x_2,y_2)$
|
||||
- Let $colsum=\text{Submatrix-Sum}(\text{Colwise},x_1,y_1,x_2,y_2)$
|
||||
- Return $w\cdot rowsum+colsum-(x_1+y_1-1)\cdot \text{prefix}$
|
||||
|
||||
---
|
||||
|
||||
_Proof._
|
||||
|
||||
Mathematically formulated:
|
||||
|
||||
$$
|
||||
\begin{align*}
|
||||
\sum_{i}i\cdot A_i
|
||||
&= \sum_{i=x_1}^{x_2}\sum_{j=y_1}^{y_2}(i+(y_2-y_1+1)\cdot j)\cdot M_{i,j} \\
|
||||
&= \sum_{i=x_1}^{x_2}\sum_{j=y_1}^{y_2}M_{i,j}\cdot i+(y_2-y_1+1)\sum_{i=x_1}^{x_2}\sum_{j=y_1}^{y_2} M_{i,j}\cdot j \\
|
||||
&= \sum_{i=1}^{x_2} \sum_{j=1}^{y_2} i\cdot M_{i,j} + (y_2-y_1+1)\cdot\sum_{i=1}^{x_2} \sum_{j=1}^{y_2}j\cdot M_{i,j} - ( x_1 + y_1 - 1 ) \sum_{i=x_1}^{x_2} \sum_{j=y_1}^{y_2} M_{i,j}
|
||||
\end{align*}
|
||||
$$
|
||||
|
||||
Where the first term is $\text{Colwise}$ and the second $\text{Rowwise}$. Because the query matrix is offset by $x_1$ rows and $y_1$ cols, the algorithm avoids double-counting by subtracting $\text{prefix}$ matrix sum $x_1+y_1-1$.
|
||||
|
||||
$\blacksquare$
|
||||
|
||||
[^1]: This is a massive and inaccurate simplification made for pedagogical purposes. In reality, there are many less candidates, and thus optimal solutions, then all permutations of 10 digits between some $l$ and $r$. However, because the computation is so lightweight, one may increase the number of trials to 1000 or even 10000. While not rigorous, given that only $n=165$ candidates can provide a probability of failure of $10^{-7}$, running an order of magnitude greater number of trials is nearly certain to pass all test cases.
|
||||
182
src/content/autonomous-racing/multithreading-a-gui.mdx
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
---
|
||||
title: "multithreading a gui"
|
||||
date: "28/05/2025"
|
||||
---
|
||||
|
||||
# the problem
|
||||
|
||||
On the [Cavalier Autonomous Racing](https://autonomousracing.dev) team at my school, we have a pretty ~~useless~~ cool basestation acting as a real-time telemetry visualization tool. We want to expand this GUI significantly, including tabulated windows for each sub-team.
|
||||
|
||||
Leveraging concurrency is vital for keeping things efficient and data up-to-date but this is easier said than done. Consider the original design:
|
||||
|
||||
# original architecture
|
||||
|
||||
Originally, the GUI followed the traditional QtC++ ROS single-threaded pattern. The GUI event loop ran on one thread, the main one:
|
||||
|
||||
```cpp
|
||||
|
||||
void spin_node(basestation::Gui* gui, ...) {
|
||||
...
|
||||
|
||||
rclcpp::executors::SingleThreadedExecutor executor;
|
||||
rclcpp::Node::SharedPtr node = std::make_shared<basestation::GuiNode>(init_gui);
|
||||
|
||||
executor.add_node(node);
|
||||
while (init_gui->is_running) {
|
||||
executor.spin_once(timeout);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
All of 30+ topic callbacks were registered on this thread:
|
||||
|
||||
```cpp
|
||||
GuiNode::GuiNode(basestation::Gui* init_gui) : Node("uva_gui") {
|
||||
this->gui = init_gui;
|
||||
|
||||
rclcpp::QoS best_effort_qos = rclcpp::QoS(rclcpp::QoSInitialization(RMW_QOS_POLICY_HISTORY_KEEP_LAST, 1));
|
||||
best_effort_qos.best_effort();
|
||||
|
||||
this->pubDesVel_ = create_publisher<std_msgs::msg::Float32>("vehicle/set_desired_velocity", 1);
|
||||
...
|
||||
this->subAutonomy_ = create_subscription<deep_orange_msgs::msg::Autonomy>("/telemetry/autonomy", 1, std::bind(&GuiNode::receiveAutonomy, this, std::placeholders::_1));
|
||||
}
|
||||
```
|
||||
|
||||
The Qt Framework is quite complex and beyond the scope of this post. Big picture, the GUI controls a bunch of data-independent visualizations that are handled on this GUI thread.
|
||||
|
||||
How do we optimize this? It is necessary to take a step back and observe the structure of the entire application:
|
||||
|
||||

|
||||
|
||||
Many flaws are now clear:
|
||||
|
||||
- Single, high-frequency topics can flood the GUI, causing stale data for other Widgets
|
||||
- Data races in which multiple callbacks modify shared data are especially difficult to handle
|
||||
- Poor separation of responsibility: the `MainWindow` is responsible for too much—it should not need first-hand knowledge of every Widget's API
|
||||
|
||||
Luckily, we're far from the first (and the last) to encounter a problem such as this. Enter ROS's exhaustive suite of concurrency tools.
|
||||
|
||||
# multi-threaded architecture
|
||||
|
||||
First, let's pin down what exactly can be parallelized.
|
||||
|
||||
1. Subscriptions: group subscription callbacks[^1] according to the displayed widgets. For the CAR, this was three `Vehicle`, `Trajectory`, and `Perception` groups. Now, widgets can be updated as dependent data is received.
|
||||
2. Visualizations: now that data is handled independently, GUI widgets can be updated (with care) as well.
|
||||
|
||||
> Just kidding. Because of ROS, all GUI widgets can only be updated on the main GUI thread.
|
||||
|
||||
This lead me to the follow structure:
|
||||
|
||||

|
||||
|
||||
- Three callback groups are triggered at differing intervals according to their urgency on the GUI node
|
||||
- A thread-safe queue[^2] processes all ingested data for each callback group
|
||||
- Every 10ms, the GUI is updated, highest to lowest urgency messages first
|
||||
- The `MainWindow` houses the visualization widgets as before—however, the GUI thread actually performs the update logic
|
||||
- GUI Widgets were re-implemented to be thread-safe with basic locking, a small amount of overhead for safe memory access
|
||||
|
||||
## \*actual\* multi-threaded implementation
|
||||
|
||||
Sounds good, right? Well, I should've done my research first. The Qt framework has _already internalized_ the logic for this entire paradigm of multithreaded code. Turns out all I need are:
|
||||
|
||||
- [Signals/slots](https://doc.qt.io/qt-6/signalsandslots.html) and a `Qt::QueuedConnection`
|
||||
- Running the GUI with `MultithreadedExecutor`
|
||||
|
||||
As it turns out, signals and slots _automatically_ leverage ROS's internal thread-safe message queue, ensuring deserialization one at a time.
|
||||
|
||||
The following (final) design employs two threads:
|
||||
|
||||
1. Main GUI Thread (Qt Event Loop): handles UI rendering + forward signals/slots to executor
|
||||
2. Executor Thread: runs callbacks and publishes messages
|
||||
|
||||
The executor is simply spawned in the main thread:
|
||||
|
||||
```cpp
|
||||
std::thread ros_thread([this]() {
|
||||
executor.spin();
|
||||
});
|
||||
```
|
||||
|
||||
Data flows from a called subscription → queued signal → signal connected to a slot → slot runs the GUI widget when scheduled.
|
||||
|
||||
```cpp
|
||||
// 1. Subscribe to a topic
|
||||
this->subAutonomy_ = create_subscription<...>("/telemetry/autonomy", 1, std::bind(&GuiNode::receiveAutonomy, ...));
|
||||
|
||||
// 2. Queue a signal to the emitter
|
||||
void GuiNode::receiveAutonomy(deep_orange_msgs::msg::Autonomy::SharedPtr msg) {
|
||||
this->des_vel = msg->desired_velocity_readout;
|
||||
signal_emitter->autonomyReceived(msg);
|
||||
}
|
||||
|
||||
// 3. Signal connects to slot (registered in initialization)
|
||||
QObject::connect(signal_emitter, &GuiSignalEmitter::autonomyReceived,
|
||||
window, &MainWindow::receiveAutonomy, Qt::QueuedConnection);
|
||||
|
||||
// 4. Slot-performed logic when ROS runs thread
|
||||
void MainWindow::receiveAutonomy(
|
||||
const deep_orange_msgs::msg::Autonomy::SharedPtr msg) {
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
Elegantly, registering a signal/slot with `Qt::QueuedConnection` does all of the hard work:
|
||||
|
||||
- Queueing messages in the target thread's loop
|
||||
- Actual slot execution happens in the GUI thread
|
||||
- Prevents cross-thread memory access/critical sections
|
||||
- Qt event-level synchronization
|
||||
|
||||
# retrospective
|
||||
|
||||
Looking back, this GUI should've been implemented with a modern web framework such as [React](https://react.dev/) with [react-ros](https://github.com/flynneva/react-ros?tab=readme-ov-file). CAR needs high-speed, reactive data, and a QtC++ front-end is simply not meant for this level of complexity. I made it a lot harder than it needed to be with my lack of due diligence, but the single-threaded GUI event loop in ROS is more harm than help.
|
||||
|
||||
[^1]: See [the ROS documentation](https://docs.ros.org/en/foxy/How-To-Guides/Using-callback-groups.onhtml) to learn more. The CAR publishes various topic-related data at set rates, so I'm looking to run various groups of mutually exclusive callbacks at a set interval (i.e. `MutuallyExclusive`)
|
||||
|
||||
[^2]: The simplest implementation did the job:
|
||||
|
||||
```cpp
|
||||
...
|
||||
template <typename T>
|
||||
class ThreadSafeQueue {
|
||||
public:
|
||||
void push(const T& item) {
|
||||
std::lock_guard<std::mutex> lock(mutex_);
|
||||
queue_.push(item);
|
||||
condition_.notify_one();
|
||||
}
|
||||
|
||||
bool pop(T& item, std::chrono::milliseconds timeout = std::chrono::milliseconds(0)) {
|
||||
std::unique_lock<std::mutex> lock(mutex_);
|
||||
if (timeout.count() > 0) {
|
||||
if (!condition_.wait_for(lock, timeout, [this] { return !queue_.empty(); })) {
|
||||
return false;
|
||||
}
|
||||
} else if (queue_.empty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
item = queue_.front();
|
||||
queue_.pop();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool empty() const {
|
||||
std::lock_guard<std::mutex> lock(mutex_);
|
||||
return queue_.empty();
|
||||
}
|
||||
|
||||
size_t size() const {
|
||||
std::lock_guard<std::mutex> lock(mutex_);
|
||||
return queue_.size();
|
||||
}
|
||||
|
||||
void clear() {
|
||||
std::lock_guard<std::mutex> lock(mutex_);
|
||||
std::queue<T> empty;
|
||||
std::swap(queue_, empty);
|
||||
}
|
||||
...
|
||||
};
|
||||
```
|
||||
191
src/content/autonomous-racing/refactoring-a-state-machine.mdx
Normal file
|
|
@ -0,0 +1,191 @@
|
|||
---
|
||||
title: "refactoring a state machine"
|
||||
description: "Improving the design and performance of a complex state machine used in autonomous racing systems"
|
||||
date: "05/06/2025"
|
||||
useKatex: true
|
||||
---
|
||||
|
||||
Given the recent switch to the [Marelli flag system](https://www.racecar-engineering.com/articles/wrc-electronic-yellow-flag-system/) in autonomous races, the [CAR](https://autonomousracing.dev/) flag state machine needed to be expanded and refactored.
|
||||
|
||||
# the problem
|
||||
|
||||
Our original state machine implementation had grown organically over several racing seasons. What started as a simple finite state machine under a more trivial flag system grew vastly more complex as it maladapted to the new flag system. Specifically, the original flag system only had one flag system for the entire track and a few car-specific ones (i.e. request to retire the car after a rules violation). The Marelli flag system has both flag _and_ vehicle flags, each of which enforces different requirements.
|
||||
|
||||
The state machine worked "fine" for its initial use case:
|
||||
|
||||
1. States were read in through a declarative `state_machine.yaml` in which states had a few entry/exit conditions.
|
||||
|
||||
```yaml
|
||||
State_Machine:
|
||||
init_state: "Red Track"
|
||||
states:
|
||||
red_track:
|
||||
id: "Red Track"
|
||||
entry_condition: "track_flag == 3"
|
||||
state_actions:
|
||||
desired_velocity: "velocity=0"
|
||||
next_states: ["Overtake", "Switch to Pits", ...]
|
||||
```
|
||||
|
||||
2. Subsequently, the code was read in and parsed with a [cpp yaml library](https://github.com/jbeder/yaml-cpp).
|
||||
|
||||
3. Since all of this was done at runtime, `state_actions`, `entry_condition`, and `next_states` were _manually parsed_, hard-coded in a sort of custom language encoded in a string. Opening up our codebase, I found this set of foundational structs that the data was converted into:
|
||||
|
||||
```cpp
|
||||
struct Action {
|
||||
std::string func;
|
||||
std::vector<std::string> param_types;
|
||||
std::vector<std::variant<int, double, std::string>> params;
|
||||
};
|
||||
|
||||
struct Expression {
|
||||
std::string var, func, value;
|
||||
};
|
||||
|
||||
struct Condition {
|
||||
int left, right;
|
||||
std::string logic_op = "";
|
||||
};
|
||||
```
|
||||
|
||||
A state action could be anything from a variable assignment to a function call, all of which had variable syntax and parsing logic by extension. One method had the following signature:
|
||||
|
||||
```cpp
|
||||
std::unordered_map<std::string, std::vector<std::variant<int, double>>> StateMachine::check_state_change;
|
||||
```
|
||||
|
||||
It was time for a change.
|
||||
|
||||
# the refactor
|
||||
|
||||
## testing
|
||||
|
||||
As I was unaware of the internals of every possible state transition in the machine, I simply coded up an exhaustive test across all transitions looking something like below. Invalid/impossible state transitions according to the Marelli flag system are omitted for simplicity.
|
||||
|
||||
```cpp
|
||||
state_machine_->override_state(current_state);
|
||||
|
||||
for (int tf : track_flags) {
|
||||
for (int vf : vehicle_flags) {
|
||||
for (int pl : pit_locations.at(current_state)) {
|
||||
for (double vs : vehicle_speeds.at(current_state)) {
|
||||
if (state_machine_->get_current_state() != expected_state) {
|
||||
FAIL() << "incorrect state change\n";
|
||||
...
|
||||
```
|
||||
|
||||
This was integrated into our CI via `CMake` + `colcon test`, giving (near) certainty that any state machine passing the test was exactly what we needed.
|
||||
|
||||
## design considerations
|
||||
|
||||
I knew state machines were ubiquitous in software so I explored a few previous approaches:
|
||||
|
||||
1. One big switch: nested conditions (i.e. all combinations of vehicle + track flags) seemed a bit nightmarish to look at as we continue to build out the state machine
|
||||
|
||||
2. [Boost SMS](https://www.boost.org/library/latest/msm/): this and other existing libraries looked enticing but three things stopped me:
|
||||
1. Integrating dependencies into our production code is simple but takes time for other members of the team to vet it
|
||||
2. It also bloats our deployment latency and final size because:
|
||||
3. It is overkill and writing things from scratch is more fun
|
||||
|
||||
3. A minimalist and expressive approach:
|
||||
|
||||
## final design: templates and bitmasks
|
||||
|
||||
Considering each element of the state machine, I simplified it into the following components:
|
||||
|
||||
- State: uniquely encode vehicle and track flags
|
||||
- Transitions: perform arbitrary side effects depending on state
|
||||
- Conditions: limit state transitions according to entry and exit states
|
||||
|
||||
### struct design
|
||||
|
||||
Since we only have $N\ll 32$ vehicle flags and $M\ll 32$ track flags, all of this information (all of the state flags) can be encoded in a `uint32_t`:
|
||||
|
||||

|
||||
|
||||
The state must also store a few other relevant details:
|
||||
|
||||
```cpp
|
||||
using VehicleTrackFlag = uint32_t;
|
||||
|
||||
struct StateInfo {
|
||||
VehicleTrackFlag flags;
|
||||
uint8_t pit_location;
|
||||
bool pit_stop;
|
||||
double speed;
|
||||
...
|
||||
};
|
||||
|
||||
enum class StateID { State1, State2, ..., Last };
|
||||
```
|
||||
|
||||
A transition has entry and exit conditions which are compositions of state (i.e. just our bitmask) and an arbitrary action (i.e. a lambda). The entry/exit bits must be set/unset in order to enter/exit the state.
|
||||
|
||||
```cpp
|
||||
struct Transition {
|
||||
VehicleTrackFlags entry;
|
||||
VehicleTrackFlags exit;
|
||||
std::function<void(State&)> action{};
|
||||
StateID to;
|
||||
};
|
||||
```
|
||||
|
||||
### the event loop
|
||||
|
||||
Now that some of the low-level details are resolved, I considered the design top-down. I wanted the main loop to simply iterate through all connected states and check if they could be entered:
|
||||
|
||||
```cpp
|
||||
void tick(State& state, StateInfo& state_id) {
|
||||
auto mask = state.flags;
|
||||
for (auto&& prospective_state : state_table[current]) {
|
||||
bool enter = (mask & prospective_state.entry) == prospective_state.entry;
|
||||
bool exit = (mask & prospective_state.exit ) == 0;
|
||||
if (enter && exit) {
|
||||
prospective_state.action(state);
|
||||
state_id = prospective_state.to;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The `state_table` would need to be a "master lookup" mapping a state id (enum index) to several `Transition`s[^1]:
|
||||
|
||||
```cpp
|
||||
constexpr std::array<std::span<const Transition>,
|
||||
static_cast<std::size_t>(StateID::Last)>
|
||||
state_table {
|
||||
red_track_flags_transitions,
|
||||
switch_to_pits_transitions,
|
||||
...
|
||||
};
|
||||
```
|
||||
|
||||
Finally, the collection of transitions from some state to another are declaratively defined. I inlined each `Transition` with [anonymous struct syntax](https://en.cppreference.com/w/c/language/struct_initialization.html) for the purposes of brevity:
|
||||
|
||||
```cpp
|
||||
constexpr Transition red_track_flag_transitions[] = {
|
||||
// entry exit action destination
|
||||
...
|
||||
// example: stay in the same state
|
||||
{ 0, StateID::RedTrack, null_action, make_vtf(TrackFlag::Red) }
|
||||
};
|
||||
```
|
||||
|
||||
## final considerations
|
||||
|
||||
The final state machine is not bad:
|
||||
|
||||
- Expressive and (relatively) succinct considering the massive amounts of logic encoded
|
||||
- Easily extendible
|
||||
- Memory efficient (no heap allocations, compressed state)
|
||||
- Quick: the [tight event loop](https://www.wikiwand.com/en/dictionary/tight_loop) is fast, especially given the outdegree of any state machine node is $\leq5$
|
||||
|
||||
but it of course has some downsides:
|
||||
|
||||
- Inflexible for new logic: entry/exit conditions are mere bits. More comprehensive conditions (i.e. "perform at least 3 laps before entering this state") would require a refactor with lambdas
|
||||
- Harder to read than the YAML custom language (we're not going back to that)
|
||||
|
||||
A large improvement if you ask me.
|
||||
|
||||
[^1]: Why a [std::span](https://www.cppreference.com/w/cpp/container/span.html)? Here, the number of transitions for each array is known at compile-time. A `std::vector` would heap-allocate the structs and a `std::array` would be difficult given that the `Transition` arrays are each of varying sizes. In contrast, one can extend this state machine by adding an entry to the array and nothing more—the `std::span` performs the magic.
|
||||
|
|
@ -8,8 +8,16 @@ const base = z.object({
|
|||
useD3: z.boolean().optional(),
|
||||
scripts: z.array(z.string()).optional(),
|
||||
redirect: z.string().optional(),
|
||||
showToc: z.boolean().optional(),
|
||||
});
|
||||
|
||||
export const collections = {
|
||||
x: defineCollection({ type: "content", schema: base }),
|
||||
algorithms: defineCollection({ type: "content", schema: base }),
|
||||
software: defineCollection({ type: "content", schema: base }),
|
||||
meditations: defineCollection({ type: "content", schema: base }),
|
||||
"autonomous-racing": defineCollection({ type: "content", schema: base }),
|
||||
death: defineCollection({ type: "content", schema: base }),
|
||||
|
||||
git: defineCollection({ type: "content", schema: base }),
|
||||
gists: defineCollection({ type: "content", schema: base }),
|
||||
};
|
||||
|
|
|
|||
34
src/content/gists/zen.mdx
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
---
|
||||
title: "zen.lua"
|
||||
date: "12/12/2025"
|
||||
---
|
||||
|
||||
[zen-mode.nvim](https://github.com/folke/zen-mode.nvim), no plugin needed.
|
||||
|
||||
```lua
|
||||
local state = nil
|
||||
|
||||
vim.keymap.set('n', '<leader>iz', function()
|
||||
if state then
|
||||
for k, v in pairs(state) do
|
||||
vim.opt[k] = v
|
||||
end
|
||||
state = nil
|
||||
else
|
||||
state = {
|
||||
number = opt.number:get(),
|
||||
relativenumber= opt.relativenumber:get(),
|
||||
signcolumn = opt.signcolumn:get(),
|
||||
statuscolumn = opt.statuscolumn:get(),
|
||||
laststatus = opt.laststatus:get(),
|
||||
cmdheight = opt.cmdheight:get(),
|
||||
}
|
||||
vim.opt.number = false
|
||||
vim.opt.relativenumber = false
|
||||
vim.opt.signcolumn = "no"
|
||||
vim.opt.statuscolumn = ""
|
||||
vim.opt.laststatus = 0
|
||||
vim.opt.cmdheight = 0
|
||||
end
|
||||
end, { desc = "Toggle Zen Mode" })
|
||||
```
|
||||
173
src/content/git/auto-theme.nvim.mdx
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
---
|
||||
title: "auto-theme.nvim"
|
||||
slug: "auto-theme.nvim"
|
||||
date: "28/11/2025"
|
||||
---
|
||||
|
||||
# the problem
|
||||
|
||||
I toggle between light and dark mode at at around 17:00 every day. Resetting my environment and theme across all my applications was a pain. I built auto-theme.nvim to update my neovim theme automatically.
|
||||
|
||||
## the solution
|
||||
|
||||
I spawn a OS-specific watcher to listen, in a non-polling manner, for theme updates:
|
||||
|
||||
- linux: I integrate with D-Bus, parsing and filtering messages for settings changes to the system appearance.
|
||||
- macOS: I integrate with the Apple toolkit AppKit to do the same thing--wait for a theme changed notification to be pushed out to the running app object and print accordingly.
|
||||
|
||||
Stdout is continually parsed from the job and the appropriate theme is set.
|
||||
|
||||
# the interesting problem
|
||||
|
||||
OK, cool. Now, how can I implement automatic theme-switching for _all_ programs I use?
|
||||
|
||||
> Note: I use my color scheme [midnight.nvim](/git/midnight.nvim.html)
|
||||
|
||||
I most commonly use the following applications:
|
||||
|
||||
1. [neovim](https://neovim.io/)
|
||||
2. [ghostty](https://ghostty.org/)
|
||||
3. [sioyek](https://sioyek.info/)
|
||||
4. [ungoogled-chromium](https://github.com/ungoogled-software/ungoogled-chromium)
|
||||
5. [swaywm](https://swaywm.org/)
|
||||
6. [rofi](https://github.com/davatorium/rofi)
|
||||
7. [tmux](https://github.com/tmux/tmux/wiki)
|
||||
8. [fzf](https://github.com/junegunn/fzf)
|
||||
9. [ripgrep](https://github.com/BurntSushi/ripgrep)
|
||||
10. [zsh](https://www.zsh.org/)
|
||||
|
||||
All of the code can be found in my [dotfiles](https://github.com/barrett-ruth/dots). The implementations are scattered and I provide no guarantee that files will not be moved
|
||||
|
||||
## success criteria
|
||||
|
||||
I run or trigger _one command_--every application updates automatically.
|
||||
|
||||
The feasability of this depends on the underlying support software has for dynamic reloading of configuration. In many cases this is not possible.
|
||||
|
||||
## the solution
|
||||
|
||||
As of November 28, 2025, I've created [this script](https://github.com/barrett-ruth/dots/blob/main/scripts/theme) which is bound by a [karabiner](https://karabiner-elements.pqrs.org/) and [keyd](https://github.com/rvaiya/keyd) binding for macOS and linux, respectively, for quick access.
|
||||
|
||||
### successes
|
||||
|
||||
1. neovim: [auto-theme.nvim](https://github.com/barrett-ruth/auto-theme.nvim)
|
||||
2. ghostty: Ghostty supports [light and dark themes based on the system appearance](https://github.com/tmux/tmux/wiki)--easy.
|
||||
3. sioyek: Any changes to user configuration are automatically reloaded--my script updates the program's settings file `prefs_user.config` in-place
|
||||
4. ungoogled-chromium: I folded and used the default system theme which automatically reads and updates according to the system environment
|
||||
5. swaywm (linux): sway reads from a symlink'ed theme file updated by the scripts; `swaymsg reload` triggers an instant window-manager-wide reload
|
||||
6. rofi: the config file, `config.rasi`, derives its theme from a symlink'ed file updated and reloaded by the script
|
||||
7. tmux: similarly to rofi, the config `tmux.conf` reads from symlink'ed theme files that are automatically reloaded with `source-file`. I also refresh the UI with `refresh-client`:
|
||||
|
||||
```sh
|
||||
ln -sf ~/.config/tmux/themes/$theme.tmux ~/.config/tmux/themes/theme.tmux
|
||||
tmux source-file ~/.config/tmux/themes/$theme.tmux
|
||||
tmux refresh-client -S
|
||||
```
|
||||
|
||||
### failures
|
||||
|
||||
Unfortunately, the following programs I've found nearly impossible to dynamically reload:
|
||||
|
||||
8. fzf: Overwriting fzf's themes, from the interactive shell `fzf` binary to `fzf-{cd,file}-widget` to integration with [fzf-lua](https://github.com/ibhagwan/fzf-lua/), I found this potentially doable but just _way too complex_. Feel free to investigate yourself--I'm going with the default theme.
|
||||
9. ripgrep: I use the default theme. The ripgrep global configuration file does not support environment variables, exterminating the option to provide a `${THEME}`-based path in the global configuration file.
|
||||
10. zsh: it's impossible to update `$THEME` across all existing shells (simply a limit of posix). However, all affected _programs_ will read the proper `$THEME`--I'm fine compromising here.
|
||||
|
||||
### upd: fzf, ripgrep, fzf-lua, and shell improvements <span class="date">30/11/2025</span>
|
||||
|
||||
After some _extreme_ amounts of finagling I'm now able to automatically update fzf and ripgrep themes both in the shell (after re-rendering the prompt\*) and in new fzf-lua instances. I consider this a 99% win.
|
||||
|
||||
I do it with the following strategy for:
|
||||
|
||||
a) cli programs
|
||||
|
||||
Since, according to \#10 above, it is impossible to update all running shells, one way I can come to auto-updating program themes is in between terminal prompts. To get around this, I added a zsh precmd hook to my `zshrc` for both programs which checks if a symlink has been updated:
|
||||
|
||||
- fzf: If `~/.config/fzf/themes/theme` is updated, re-export `$FZF_DEFAULT_OPTS` to include the new colors:
|
||||
|
||||
```zsh
|
||||
_fzf_theme_precmd() {
|
||||
local theme_file=~/.config/fzf/themes/theme
|
||||
local theme_target=$(readlink "$theme_file" 2>/dev/null) || return
|
||||
typeset -g _FZF_THEME_TARGET
|
||||
test "$theme_target" = "$_FZF_THEME_TARGET" && return
|
||||
_FZF_THEME_TARGET="$theme_target"
|
||||
test -r "$theme_file" && export FZF_DEFAULT_OPTS="$(<"$theme_file") $FZF_OPTS"
|
||||
}
|
||||
add-zsh-hook precmd _fzf_theme_precmd
|
||||
```
|
||||
|
||||
- ripgrep: If `~/.config/rg/themes/theme` is updated, re-build the `$RIPGREP_CONFIG_PATH` file as a concatentation of the _new_ theme and a separete _base_ configuration file and re-export the environment variable.
|
||||
|
||||
Any and all shells, after re-rendering their terminal prompts, will see proper colors for all fzf and ripgrep commands.
|
||||
|
||||
b) neovim
|
||||
|
||||
This is a bit trickier. How can a running neovim process automatically update its internal colors used by fzf-lua? There are two aspects of this problem:
|
||||
|
||||
1. Finding and interacting with all existing Neovim instances: RPCs with remote neovim features provide the ability to remotely probe neovim instances.
|
||||
|
||||
I use an RPC to trigger the update (see \#2 below). Of course, this requires automatically configuring a socket for each neovim instance to listen on. I use the process id, unique to the neovim instance--any 1:1 mapping from neovim instance to socket identifier will do:
|
||||
|
||||
```lua
|
||||
local socket_path = ('/tmp/nvim-%d.sock'):format(vim.fn.getpid())
|
||||
vim.fn.serverstart(socket_path)
|
||||
```
|
||||
|
||||
Then send a a command like so (see `:h remote`):
|
||||
|
||||
```sh
|
||||
nvim --server "$socket" --remote-send "<c-o><cmd>lua require('fzf_reload').reload()<cr>" 2>/dev/null || true
|
||||
```
|
||||
|
||||
Neovim instances can be found by just listing `/tmp/nvim-*.sock`.
|
||||
|
||||
2. Re-configuring fzf-lua: fzf-lua does not support "dynamic" reconfiguration but you can re-initialize the plugin with `require('fzf-lua').setup(opts)`.
|
||||
|
||||
- fzf: I expose a function for RPC calls `fzf_theme.reload_colors()` which re-initializes the fzf environment. Special care must be taken to store and pass down the _initial_ fzf-lua configuration and update it with the new environments colors.
|
||||
- ripgrep: automatically re-reads from `$RIPGREP_CONFIG_PATH`, a symlink updated by my theme script
|
||||
|
||||
I confess that this solution is not perfect. For example, existing pickers cannot have their theme dynamically re-loaded ([I'm looking into this](https://github.com/ibhagwan/fzf-lua/discussions/2448)) as `FzFLua resume` cannot build theme context. I'm close enough (for now! >:)).
|
||||
|
||||
### upd: neovim, tmux improvements <span class="date">2/23/2025</span>
|
||||
|
||||
Apparently, neovim [already supports auto-switching the `vim.o.background`](https://github.com/neovim/neovim/commit/d460928263d0ff53283f301dfcb85f5b6e17d2ac) as of November, 26, 2024. So, technically, auto-theme.nvim is defunct. Welp, who cares... it was fun to make! I use the following config to auto-toggle the theme when the aforementioned option is set:
|
||||
|
||||
```lua
|
||||
vim.api.nvim_create_autocmd({ 'OptionSet' }, {
|
||||
pattern = 'background',
|
||||
callback = function()
|
||||
vim.cmd.colorscheme(
|
||||
vim.o.background == 'dark' and 'midnight' or 'daylight'
|
||||
)
|
||||
end,
|
||||
group = vim.api.nvim_create_augroup(
|
||||
'Midnight',
|
||||
{ clear = true }
|
||||
),
|
||||
})
|
||||
```
|
||||
|
||||
tmux 3.6 also supports this too as of 3.6--see [here](https://github.com/tmux/tmux/issues/4699). I updated my tmux.conf, conveniently removing my `~/.config/tmux/themes/*` symlink hackiness:
|
||||
|
||||
```mux
|
||||
if -F '#{==:#{client_theme},dark}' {
|
||||
# set dark mode
|
||||
}
|
||||
|
||||
if -F '#{==:#{client_theme},light}' {
|
||||
# set light mode
|
||||
}
|
||||
```
|
||||
|
||||
## upd: improve tmux theme updating <span class="date">14/12/2025</span>
|
||||
|
||||
The above config does not always update the tmux theme in tandem with the system theme. For example, on system startup, the tmux theme is not set. I could only get around this by manually reloading the configuration _inside_ of tmux.
|
||||
|
||||
It is simpler (and more correct) to set the theme as a function of when tmux itself detects changes to the system theme. This is possible by leveraging the exposed hooks `client-{light,dark}-theme` as follows:
|
||||
|
||||
```tmux
|
||||
set-hook -g client-light-theme 'source $XDG_CONFIG_HOME/tmux/themes/daylight.conf'
|
||||
set-hook -g client-dark-theme 'source $XDG_CONFIG_HOME/tmux/themes/midnight.conf'
|
||||
```
|
||||
|
||||
where the configuration files house the individual theme logic.
|
||||
7
src/content/git/barrettruth.com.mdx
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
title: "barrettruth.com"
|
||||
date: "07/10/2025"
|
||||
slug: "barrettruth.com"
|
||||
---
|
||||
|
||||
code for this website
|
||||
8
src/content/git/bmath.mdx
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
title: "bmath"
|
||||
date: "09/10/2025"
|
||||
---
|
||||
|
||||
wip header-only c++23 math library
|
||||
|
||||
built to learn and apply modern c++ in a competitive programming environment
|
||||
10
src/content/git/competitive-programming.mdx
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
title: "competitive-programming"
|
||||
date: "09/10/2025"
|
||||
---
|
||||
|
||||
a collection of my competitive programming solutions categorized as follows:
|
||||
|
||||
- `/kattis`: a few ICPC problems from UVA's "practices"
|
||||
- `/codeforces`: codeforces contests, including every division 4 round
|
||||
- `/cses`: ~100 basic cses problems
|
||||
69
src/content/git/cp.nvim.mdx
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
---
|
||||
title: "cp.nvim"
|
||||
slug: "cp.nvim"
|
||||
date: "10/10/2025"
|
||||
---
|
||||
|
||||
Things have changed since I last documented my competitive programming setup [here](/software/my-competitive-programming-setup.html).
|
||||
|
||||
# my goals
|
||||
|
||||
After many months of using the aforementioned `make` based setup, I had a few qualms:
|
||||
|
||||
- <u>I'm lazy</u>: I grew tired of copying (and mis-copying) inputs, outputs,
|
||||
etc from online judges.
|
||||
- <u>I'm lazy</u>: I frequently submitted incorrect solutions after erroneously
|
||||
asserting that my outputs matched those of the sample test cases
|
||||
- <u>External dependencies</u>: it unsettles me that my bare-bones setup
|
||||
required copy-pasting an entire suite of scripts
|
||||
- <u>Non-native neovim experience</u>: while composition and the UNIX philosophy
|
||||
are great, there's only so much you can do with pipes and files. - Raw I/O
|
||||
files meant I couldn't see colored stdin/stdout - Fine-grained per-testcase
|
||||
I/O was suspect--isolating and running a subset of test cases required manual
|
||||
intervention
|
||||
|
||||
The solution was to leverage Neovim's great APIs to give me granular control over every aspect of my problem-solving experience.
|
||||
|
||||
# the solution: cp.nvim
|
||||
|
||||
The GitHub page documents the plugin well enough so I'll avoid re-hashing it here. Instead, what's more interesting to document is why I thought this was a worthwhile experience.
|
||||
|
||||
1. <u>Making Something Useful for Others</u>: cp.nvim is an opportunity for me
|
||||
to make my first open-source project "right"--not some side project or demo,
|
||||
but a *real*, usable tool that I'll be rolling out to the public soon. I
|
||||
consider the following in my active development of the plugin:
|
||||
|
||||
- Comprehensive continuous integration (_real_ testing, linting, and more)
|
||||
- [LuaRocks](https://luarocks.org/) integration (the future of neovim package management)
|
||||
- Concise and thorough Vimdoc documentation that communicates effectively
|
||||
- Modern lua tooling: use of [busted](https://lunarmodules.github.io/busted/), [selene](https://kampfkarren.github.io/selene/) and more integrated with the neovim lua interpreter
|
||||
- Sensible user defaults & extreme customization
|
||||
- Proper versioning, tagging, and releases
|
||||
|
||||
2. <u>The Neovim Community</u>: I'm elated to finally give back to the community
|
||||
(even if no one uses this plugin). [folke](https://github.com/folke),
|
||||
[bfredl](https://github.com/bfredl), and
|
||||
[echasnovski](https://github.com/echasnovski) are my greatest inspirations as
|
||||
an open-source developer and I've had enough of taking without giving back.
|
||||
|
||||
- In the coming months I plan to contribute to [neovim core](https://github.com/neovim/neovim), including making `:checkhealth` asynchronous and integrating an [mdx](https://mdxjs.com/) parser.
|
||||
|
||||
3. <u>Learning Random things</u>: I think this plugin is *really* cool by virtue
|
||||
of its efficacy and the miscellany of knowledge I accrued in the 15k+ LOC as
|
||||
of version v0.3.0. Some things I learned include:
|
||||
|
||||
- <u>ANSI terminal colors and escape codes</u>: I wrote my own stateful ANSI
|
||||
escape sequence parser to map raw bytes to native neovim highlighted text
|
||||
- <u>Extmarks</u>: neovim extmarks (`:h extmarks`) are extremely powerful. Here,
|
||||
I used them to apply dynamic highlighting across various components of the
|
||||
plugin but I also plan to leverage virtual text to catch compile errors in
|
||||
real-time
|
||||
- <u>VIM filetypes and diffing</u>: Vim is strange and the event-based system is
|
||||
fragile. I faced filetype detection race conditions and odd side effects of
|
||||
functions (such as `:diffthis` resetting `foldcolumn`).
|
||||
- <u>[LuaCATS](https://github.com/LuaCATS)</u>: apparently writing comments is
|
||||
the best way to typecheck in lua...
|
||||
- <u>The (Neo)Vim event loop</u>: Scraper subprocesses spawned with
|
||||
`vim.system`. Though a powerful API, I often had to obey the event loop and
|
||||
wrap side effects with `vim.schedule` to ensure they ran after jobs finished.
|
||||
This was useful to defer UI updates.
|
||||
6
src/content/git/dots.mdx
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
title: "dots"
|
||||
date: "07/10/2025"
|
||||
---
|
||||
|
||||
collection of configurations for neovim, zsh, ghostty, xorg, tmux, as well as os-specific configurations and custom scripts
|
||||
9
src/content/git/import-cost.nvim.mdx
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
title: "import-cost.nvim"
|
||||
slug: "import-cost.nvim"
|
||||
date: "1/1/2024"
|
||||
---
|
||||
|
||||
display javascript import costs inside of neovim
|
||||
|
||||

|
||||
9
src/content/git/midnight.nvim.mdx
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
title: "midnight.nvim"
|
||||
slug: "midnight.nvim"
|
||||
date: "8/11/2025"
|
||||
---
|
||||
|
||||
# a theme for code, not colors
|
||||
|
||||
I was tired of all the over-engineered and distracting color schemes. I wrote this to focus on the code in competitive programming and workplace environments. I included highlighting of constants (e.g. strings, numbers, booleans) and language keywords to emphasize the maximally important aspects of code and its structure, respectively.
|
||||
6
src/content/git/sl.mdx
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
title: "sl"
|
||||
date: "09/10/2025"
|
||||
---
|
||||
|
||||
[archive](/meditations/suck-less-or-suck-more.html) of [suckless](https://suckless.org/) repositories
|
||||
6
src/content/git/wp.mdx
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
title: "wp"
|
||||
date: "07/10/2025"
|
||||
---
|
||||
|
||||
some of my wallpapers
|
||||
35
src/content/meditations/suck-less-or-suck-more.mdx
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
---
|
||||
title: "suck less or suck more"
|
||||
date: "30/05/2025"
|
||||
---
|
||||
|
||||
I love [suckless software](https://suckless.org/) and the suckless philosophy. I've been using [dwm](https://dwm.suckless.org/), [st](https://st.suckless.org/), [dmenu](https://dwm.suckless.org/), and [slock](https://tools.suckless.org/slock/) for years. You can see my code [here](https://git.barrettruth.com).
|
||||
|
||||
Simplicity is always better and I love being able to understand why things are happening. It's fun, it's quirky, it's cool.
|
||||
|
||||
Except when it doesn't work.
|
||||
|
||||
After years of use, I've found that suckless software is not built to be compatible with other less-sucking software. It is not compatible with running 40 ROS nodes at a time, nor 3 Chrome instances with a plethora of tabs and jobs open. I've had my st terminal go blank and my dwm crash on me consistently.
|
||||
|
||||
Maybe this is the point. But I can't avoid [foxglove](https://wiki.ros.org/FoxgloveStudio), I can't avoid bloat to work for school and research-related projects. Most importantly, after spending half of my day patching st and having it crash on me, I don't feel like fixing it any longer. **Sadly, suck-ful software is here to stay and I don't have time to fight it right now.**
|
||||
|
||||
I've switched to [spectrwm](https://github.com/conformal/spectrwm) (essentially [this](https://github.com/conformal/spectrwm)) and [ghostty](https://ghostty.org/).
|
||||
|
||||
# upd: goodbye xorg <span class="date">09/10/2025</span>
|
||||
|
||||
I am now sick of Xorg and their lack of per-monitor DPI scaling. The new stack is:
|
||||
|
||||
- window system protocol: [wayland](https://wayland.freedesktop.org/)
|
||||
- compositor: [sway](https://swaywm.org/)
|
||||
- dynamic menu: [rofi](https://github.com/davatorium/rofi)
|
||||
- screen lock: [swaylock](https://github.com/swaywm/swaylock) triggered by [swayidle](https://github.com/swaywm/swayidle)
|
||||
- [its](https://github.com/swaywm/swaylock/issues/416) [pretty](https://github.com/swaywm/swayidle/issues/169) [bad](https://github.com/swaywm/swaylock/issues/306)
|
||||
|
||||
# upd 2: hello [hyprland](https://hypr.land/)
|
||||
|
||||
Unfortunately, sway was too fragile. The floating window support was juvenile (invisible windows, flickering, inconsistent tiling). Further, while [redesigning my developer workflow in December 2025](/software/improving-my-developer-workflow.html), I realized that I needed a level of customizability that sway could not provide. The new stack is as follows:
|
||||
|
||||
- compositor: hyprland
|
||||
- screen lock: [hypridle](https://wiki.hypr.land/Hypr-Ecosystem/hypridle/)
|
||||
- browser: [Google Chrome](https://www.google.com/chrome/)
|
||||
- Sigh... I know. I had ungoogled-chromium freak out when using my hardware key and completely break. Cookie-related issues also required me to be proactive with respect to website permissions. With all the weird sites I browse this was not a recipe for success. Do not ask about the sites I browse.
|
||||
22
src/content/meditations/the-problem-with-cs-curricula.mdx
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
title: "the problem with cs curricula"
|
||||
date: "16/05/2025"
|
||||
useKatex: true
|
||||
---
|
||||
|
||||
Edsger Wybe Dijkstra's ["On the cruelty of really teaching computing science"](https://www.cs.utexas.edu/~EWD/transcriptions/EWD10xx/EWD1036.html) perfectly sums up my gripes with how Computer Science is taught at a university level (at my school, at least).
|
||||
|
||||
Succinctly put, my time learning computer science at my unnamed college exemplified nearly everything he (and I) believe a CS curriculum should _not do_:
|
||||
|
||||
- Ignore the existential questions about computer programs (what are they? why do they exist? can they want? what should they be used for?)
|
||||
- Ignore the notion of program behavior, i.e. provability (this is set aside as an advanced core class, counterintuitively reserved for a third or fourth year).
|
||||
- Excessively simplify and frame new technologies with analogy, effectively instilling maladaptive thinking patterns that fail to extend to more novel problems
|
||||
- Give up on doing the inverse of the above because it is too hard for young students.
|
||||
|
||||
Walking out of my third year, I left with the sad realization that I got by the majority of my classes by only understanding things as they pertained to assignments and exams. **And by "got by", I mean straight A's**.
|
||||
|
||||
I always knew something was wrong with how my school taught computer science (despite it being the biggest major as of 2025). As of late, though, I realized the gargantuan amount of damage it caused to my reasoning abilities. Damage that I have to reverse by, essentially, doing everything all over again.
|
||||
|
||||
My [competitive programming journey](/algorithms/competitive-programming-log.html) epitomizes this point: to this day I struggle with reasoning, argumentation, and understanding program behavior. I know how a segment tree works but can't formalize the constraints of a problem. I can do dynamic programming on trees but I can barely manipulate and work with primitive mathematical concepts such as the $gcd$ function. I cannot think of a more useless skillset.
|
||||
|
||||
Nearly all of this is my fault. However, _it should not be possible for this to happen in a computer science curriculum_. In other words, Djikstra is right.
|
||||
83
src/content/software/designing-this-website.mdx
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
---
|
||||
title: "designing this website"
|
||||
date: "18/06/2024"
|
||||
---
|
||||
|
||||
# HTML, JavaScript, and CSS
|
||||
|
||||
That's all there is to it.
|
||||
|
||||
I thought about using the following frameworks:
|
||||
|
||||
1. [React.js](https://react.dev/)
|
||||
2. [Next.js](https://nextjs.org/)
|
||||
3. [Hugo](https://gohugo.io/)
|
||||
4. [Astro](https://astro.build/)
|
||||
|
||||
But I did not actually _need_ any of them to make this site look decent.
|
||||
|
||||
## what i've learned
|
||||
|
||||
Of course, most people build simple websites like these to learn a new technology or framework, not to use an optimal tool. That's actually why I [hosted this website on AWS](/software/from-github-pages-to-aws.html).
|
||||
|
||||
Building this website with truly bare-bones technologies has made me appreciate _why_ these web frameworks have emerged.
|
||||
|
||||
- Writing JavaScript to manipulate the DOM works just fine but lacks the readability and composability that many JavaScript frameworks bring to the table.
|
||||
- Re-using code is odd. For example, I created a "common.js" with general utilities—there is zero indication (both to me and my language servers) that these functions are exposed to other scripts included by the same HTML file.
|
||||
- JSX is great. Dynamically inserting HTML as raw strings or writing them line by line with the DOM is a pain, and a verbose one at that.
|
||||
- Similarly, CSS styling (inline/stylesheet) works at the small scale. However, with styles being completely divorced from the HTML itself, much is left to be desired.
|
||||
- Reusing HTML, styles, and JavaScript feels extremely fragile. Innovative type-safe, optimized, and composable solutions definitely have their place in the web.
|
||||
- **You can be efficient with HTML, JS, and CSS.** My iteration speed on on this site versus other React.js/MDX blogs I have worked on is the same if not faster. While this may be a testament to my lack of JavaScript experience, I think people conclude too early that their task is beyond the technologies that form the foundation of the web today.
|
||||
|
||||
## the setup <span class="date">15/06/2024</span>
|
||||
|
||||
This website is pure HTML, CSS, and JavaScript.
|
||||
|
||||
AWS-wise, I use:
|
||||
|
||||
- S3, to host the content (static for now)
|
||||
- CloudFront, to serve and cache said content
|
||||
- Route53, to manage routing
|
||||
- GoDaddy, to reserve [barrettruth.com](https://barrettruth.com)
|
||||
|
||||
A user request can be modelled as follows:
|
||||
|
||||
1. A user accesses the website by typing barrettruth.com in their browser.
|
||||
2. GoDaddy's DNS servers are queried, which translating the domain name to my Route53's IP address.
|
||||
3. Route53 then routes to the request to my CloudFront distribution associated with my S3 bucket.
|
||||
4. CloudFront checks its edge caches for the requested content. If the content is stale or not cached, CloudFront fetches the content from S3. Otherwise, it uses the cached content from an edge server.
|
||||
5. CloudFront returns the content to the user's browser.
|
||||
|
||||

|
||||
|
||||
## difficulties
|
||||
|
||||
The hardest part of hosting this website was interfacing with GoDaddy.
|
||||
|
||||
For example, configuring SSL certificates with GoDaddy is needlessly challenging. Follow [AWS's guide](https://docs.aws.amazon.com/amplify/latest/userguide/to-add-a-custom-domain-managed-by-godaddy.html) if you really want to. Otherwise, [configure your GoDaddy nameservers](https://www.godaddy.com/help/edit-my-domain-nameservers-664) and point them to your own DNS service (like Route53) instead.
|
||||
|
||||
# upd: port to astro <span class="date">22/05/2025</span>
|
||||
|
||||
I'm expanding my website to include more detailed algorithms, implementations, write-ups, and low-level optimization case studies.
|
||||
|
||||
I thought about writing these posts in the raw HTML as I've been doing and physically cringed.
|
||||
|
||||
Then I recalled the below post I made around one year ago and realized the following:
|
||||
|
||||
- Sure, you can be efficient with raw HTML/CSS/JS. However, _no matter what you do_ snippets, hotkeys, etc, nothing is faster than writing markdown.
|
||||
- Overhead (i.e. the massive overhead of copying over content, writing the html) matters
|
||||
- I'll be needing more advanced features that, while possible to do in vanilla web, would just be painful to maintain.
|
||||
- Sure, frameworks come with bloat. At this point, I'd added web components and script finagling—I was on the path to reinventing React myself.
|
||||
|
||||
Enter [astro](https://astro.build/).
|
||||
|
||||
- Lower overhead
|
||||
- Small bundle size
|
||||
- SSR opt in/out
|
||||
- Minimal boilerplate
|
||||
|
||||
Everything is now in MDX. I had to say goodbye to my d3 latex labels (I could only do this with MathJax, which I recently found out was overkill for my needs) and a bit of custom styling.
|
||||
|
||||
On the upside, I have around the same LOC, a dead-simple blog post setup ([here](https://github.com/barrett-ruth/barrettruth.com/commit/8666e5a16983b177118f6e8a3246feb0d6907fff) was my biggest commit), and the entire Astro community at my back.
|
||||
|
||||
The choice of Astro was of no significance. It did the job and that's all that matters. I'm not quite a fan of the funky `---` syntax to separate HTML and JS, though. I find it counterintuitive to separate the UI and the frontend logic, which ought to be tightly coupled. I don't want to imagine working on larger files in Astro.
|
||||
175
src/content/software/hosting-a-git-server.mdx
Normal file
|
|
@ -0,0 +1,175 @@
|
|||
---
|
||||
title: "hosting a git server"
|
||||
date: "07/05/2025"
|
||||
---
|
||||
|
||||
# why
|
||||
|
||||
No reason. Perhaps to host personal files in the future. AWS's micro free tier is great, too.
|
||||
|
||||
# what
|
||||
|
||||
- Write my own git web ui
|
||||
- Support clones from my own website
|
||||
- Host private files on my git ui
|
||||
|
||||
# the process
|
||||
|
||||
I detail self-hosting a git server on an AWS t2.micro instance ("free" for 1 year) as of May 2025. [Git's instructions](https://git-scm.com/book/en/v2/Git-on-the-Server-The-Protocols) were vastly outdated so hopefully this saves a lucky reader some time.
|
||||
|
||||
2. Create the ec2 instance with setup wizard and add \{in,out\}bound rules for \{SSH,HTTP,HTTPS,your ip\} in the wizard security group.
|
||||
3. Use an elastic ip (free) to address public ip reassigning—this is a bother when ssh'ing (new verb?) into the box locally and/or configuring an Apache HTTP server.
|
||||
4. Understand bare git repositories and the ssh protocol.
|
||||
5. Configure an keypair and ssh in (the official instructions are fine for this). I moved it to `~/.ssh` and added an alias in `~/.ssh/config` for convenience. Clone a repo on the server to test.
|
||||
6. Set up a git daemon for `git://` protocol cloning at your own risk.
|
||||
7. Set up an Apache HTTPD server.
|
||||
8. Configure file permissions for the new user:
|
||||
1. `sudo chown -R git:git /srv/git`
|
||||
2. `sudo chgrp -R apache /srv/git`
|
||||
9. To deal with "dubious ownership" issues when cloning with HTTPS, I needed to add **exactly** the following configuration to `/etc/gitconfig`. _No group permission finagling will work_! Git only allows cloning repositories that are owned by the user. If you wish to clone via SSH with, say, user A, this same user must also be employed by your HTTP server to clone the files (customize HTTPD/whatever you're using accordingly).
|
||||
|
||||
```gitconfig
|
||||
[safe]
|
||||
directory = *
|
||||
```
|
||||
|
||||
10. Security-wise, set up TLS/HTTPS with [Let's Encrypt](https://letsencrypt.org/). Further, only allow authorized people to actually _push_ to the server. The following is my HTTPD configuration file `/etc/apache/conf.d/git-server.conf` hosting the web ui at the root and clone urls at `/git`:
|
||||
|
||||
```apacheconf
|
||||
<VirtualHost *:443>
|
||||
ServerName <servername>
|
||||
|
||||
SSLEngine on
|
||||
SSLCertificateFile /etc/letsencrypt/live/<servername>/fullchain.pem
|
||||
SSLCertificateKeyFile /etc/letsencrypt/live/<servername>/privkey.pem
|
||||
|
||||
SetEnv GIT_PROJECT_ROOT /srv/git
|
||||
SetEnv REMOTE_USER $REDIRECT_REMOTE_USER
|
||||
|
||||
ScriptAlias /git/ /usr/libexec/git-core/git-http-backend/
|
||||
|
||||
<Directory "/usr/libexec/git-core">
|
||||
Options +ExecCGI -MultiViews +SymLinksIfOwnerMatch
|
||||
Require all granted
|
||||
AllowOverride None
|
||||
</Directory>
|
||||
|
||||
<Files "git-http-backend">
|
||||
AuthType Basic
|
||||
AuthName "Git Access"
|
||||
AuthUserFile /srv/git/.htpasswd
|
||||
Require expr !(%{QUERY_STRING} -strmatch '*service=git-receive-pack*' || %{REQUEST_URI} =~ m#/git-receive-pack$#)
|
||||
Require valid-user
|
||||
</Files>
|
||||
ProxyPassMatch ^/git/ !
|
||||
ProxyPreserveHost On
|
||||
ProxyPass / http://127.0.0.1:8000/
|
||||
ProxyPassReverse / http://127.0.0.1:8000/
|
||||
</VirtualHost>
|
||||
```
|
||||
|
||||
11. There are a variety of choices for web ui, including [cgit](https://git.zx2c4.com/cgit/), [gitweb](https://git-scm.com/docs/gitweb) (I do not recommend this—the scripts are ancient and require manual tuning), and some even heavier options that allow for further customization. I am not a fan of viewing code on the web, so you cannot in [my custom ui](https://git.barrettruth.com). I spin up a simple python server to walk the projects in `/srv/git` and configured a systemd service to run it in the ec2 box:
|
||||
|
||||
```systemd
|
||||
[Unit]
|
||||
Description=Git Server UI
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
User=apache
|
||||
WorkingDirectory=/srv/git/git-server-ui
|
||||
ExecStart=/usr/local/bin/gunicorn --workers 3 --bind 0.0.0.0:8000 --chdir /srv/git/git-server-ui wsgi:app
|
||||
Restart=on-failure
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
```
|
||||
|
||||
12. I use the name `main` instead of `master` as my default branch (it's less letters, ok?)--set this up on the server too:
|
||||
|
||||
```console
|
||||
$ sudo git config --system init.defaultBranch main
|
||||
```
|
||||
|
||||
# lessons
|
||||
|
||||
- **It feels great to do things yourself**: I used GPT-4o for linux server command help, that was about it
|
||||
- **Always ask "what is this?" before using something**: this would've saved me hours of realizing a 12 year old perl script should not have been running my git ui.
|
||||
|
||||
# upd: moving to lightsail <span class="date">09/11/2025</span>
|
||||
|
||||
Welp, ec2 costed way too much (~\$15/mo!). Enter [AWS Lightsail](https://aws.amazon.com/lightsail/): small compute with a flat $5/mo charge. This is a reasonably "scalable" solution for my website—unfortunately I do not have too much traffic as of now.
|
||||
|
||||
Anyways, the migration is complete. Everything is nearly identical but my HTTPD config has been minimally updated as follows:
|
||||
|
||||
- Now that I host project descriptions on `https://barrettruth.com/git/<project>.html` and gists on `https://barrettruth.com/gist/<code>`, support redirects for `{git,gist}.barrettruth.com->barrettruth.com/{git,gist}.html`
|
||||
- Minify and simplify letsencrypt logic for the domain
|
||||
|
||||
Here's the updated config:
|
||||
|
||||
```apacheconf
|
||||
<VirtualHost *:80>
|
||||
ServerName git.barrettruth.com
|
||||
Redirect permanent / https://git.barrettruth.com/
|
||||
</VirtualHost>
|
||||
|
||||
<VirtualHost *:80>
|
||||
ServerName gist.barrettruth.com
|
||||
Redirect permanent / https://barrettruth.com/gist.html
|
||||
</VirtualHost>
|
||||
|
||||
<VirtualHost *:443>
|
||||
ServerName git.barrettruth.com
|
||||
|
||||
SSLEngine on
|
||||
SSLCertificateFile /etc/letsencrypt/live/git.barrettruth.com/fullchain.pem
|
||||
SSLCertificateKeyFile /etc/letsencrypt/live/git.barrettruth.com/privkey.pem
|
||||
Include /etc/letsencrypt/options-ssl-apache.conf
|
||||
|
||||
DocumentRoot /srv/git
|
||||
SetEnv GIT_PROJECT_ROOT /srv/git
|
||||
SetEnv GIT_HTTP_EXPORT_ALL
|
||||
ScriptAlias / /usr/libexec/git-core/git-http-backend/
|
||||
|
||||
<Directory "/usr/libexec/git-core">
|
||||
Options +ExecCGI -MultiViews +SymLinksIfOwnerMatch
|
||||
Require all granted
|
||||
</Directory>
|
||||
|
||||
ErrorLog /var/log/httpd/git_error.log
|
||||
CustomLog /var/log/httpd/git_access.log combined
|
||||
</VirtualHost>
|
||||
|
||||
<VirtualHost *:443>
|
||||
ServerName gist.barrettruth.com
|
||||
|
||||
SSLEngine on
|
||||
SSLCertificateFile /etc/letsencrypt/live/git.barrettruth.com/fullchain.pem
|
||||
SSLCertificateKeyFile /etc/letsencrypt/live/git.barrettruth.com/privkey.pem
|
||||
Include /etc/letsencrypt/options-ssl-apache.conf
|
||||
|
||||
Redirect "/" "https://barrettruth.com/gist.html"
|
||||
|
||||
ErrorLog /var/log/httpd/unified.log
|
||||
CustomLog /var/log/httpd/unified.log combined
|
||||
</VirtualHost>
|
||||
|
||||
<VirtualHost *:443>
|
||||
ServerName barrettruth.com
|
||||
|
||||
SSLEngine on
|
||||
SSLCertificateFile /etc/letsencrypt/live/git.barrettruth.com/fullchain.pem
|
||||
SSLCertificateKeyFile /etc/letsencrypt/live/git.barrettruth.com/privkey.pem
|
||||
Include /etc/letsencrypt/options-ssl-apache.conf
|
||||
|
||||
DocumentRoot /var/www/html
|
||||
<Directory /var/www/html>
|
||||
Options -Indexes +FollowSymLinks
|
||||
AllowOverride All
|
||||
Require all granted
|
||||
</Directory>
|
||||
|
||||
ErrorLog /var/log/httpd/unified.log
|
||||
CustomLog /var/log/httpd/unified.log combined
|
||||
</VirtualHost>
|
||||
```
|
||||
90
src/content/software/improving-my-developer-workflow.mdx
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
---
|
||||
title: "improving my developer workflow"
|
||||
date: "26/12/2025"
|
||||
---
|
||||
|
||||
# the problem
|
||||
|
||||
Everything about my workflow was slow.
|
||||
|
||||
# some context
|
||||
|
||||
Since May 2025, I've been using the same tmux+neovim-centric setup. Sessions house projects, performing different duties in different windows. It worked for me at the time. However, after a summer at DRW and a fall at Ramp, this became far from the case. During both internships I was met with more tools, third-party integrations, (begrudgingly) Macs, and a litany of previously irrelevant inefficiencies. Here are some problems that I ran into:
|
||||
|
||||
# the problems
|
||||
|
||||
- <u>Vibe coding</u>: I switched back and forth between
|
||||
Claude/Cursor/Windsurf/neovim in nearly every commit.
|
||||
- <u>Poor OS understanding</u>: I waddled my way through the MacOS ecosystem. I
|
||||
barely understood how the OS, package managers, and display manager worked,
|
||||
preventing me from efficiently automating it.
|
||||
- <u>Archaic git workflow</u>: Now that I was frequently collaborating on large
|
||||
codebases, I needed to context-switch rapidly. I could not. This was primarily
|
||||
due to two things:
|
||||
1. <u>Nonexistent task management system</u>: I used a raw `.txt` file to manage
|
||||
my priorities. I have also been known to use Chrome tabs as a backup todo list.
|
||||
2. <u>Poor workflow processes</u>: creating PRs, leaving reviews, and iterating
|
||||
was slow. Coding was slow. Typing was slow. My neovim broke _all_ the time. It
|
||||
was nearly impossible to keep my Linux config in sync when I went home to code
|
||||
on personal projects.
|
||||
|
||||
# the solutions
|
||||
|
||||
- <u>Quit MacOS</u>: easy enough.
|
||||
- <u>Quit vibe-coding</u>: I used vibe-coding to replace my train of thought
|
||||
rather than amplify it.
|
||||
- <u>Constantly tweak</u>: Humility, coupled with the willingness to adapt,
|
||||
would render every point above moot. I constantly analyze and improve my
|
||||
config to a reasonable[^1] extent. For example, I recently extended a hyprland
|
||||
windowing script to interactively prompt for a desired selection. This is
|
||||
because I would consistently pull incorrect Chrome instances with multiple of
|
||||
them open at a time, forcing me to manually reorder them. Evidence of this
|
||||
constant iteration can be found in the revitalized history of my
|
||||
[dotfiles](https://github.com/barrett-ruth/dots).
|
||||
- <u>Use tools that "just work"</u>: one of my friend's fathers told me that I
|
||||
was not special during a lacrosse practice in elementary school. I used many
|
||||
programs to feel special, from quirky Linux window managers to niche Neovim
|
||||
plugins. I eliminated tools that could not stand the test of time and
|
||||
sparingly chose well-regarded ones to replace them. For example, I dropped
|
||||
swaywm for hyprland and ungoogled-chromium for Google Chrome (see [my suckless
|
||||
post](/meditations/suck-less-or-suck-more.html)).
|
||||
- <u>Manage tasks effectively</u>: develop a philosophy for prioritizing and
|
||||
solving problems. Find or make a program that implements it and immediately
|
||||
abandon *everything* else. [taskwarrior](https://taskwarrior.org/) and some
|
||||
custom scripts worked for me.
|
||||
|
||||
# the final product
|
||||
|
||||
The below video shows be performing the following actions bl\*zingly fast:
|
||||
|
||||
- Previewing some pdfs in the terminal with lf and the kitty graphics protocol
|
||||
- Running this site itself locally with overseer.nvim
|
||||
- Creating a new file with oil.nvim
|
||||
- Switching to a new branch with fzf-lua
|
||||
- Committing such file with vim-fugitive
|
||||
- Pulling a specific chrome instance to the current workspace with a custom hyprland script
|
||||
|
||||
Notably, all coding, git, and task-running actions in neovim are completely separate (in their own tabs).
|
||||
This allows my coding experience to be completely uniform and unaffected by, for example, a fugitive buffer.
|
||||
I declaratively switch between these tabs with a few bindings, removing all neovim buffer/window overhead.
|
||||
|
||||
<video autoplay muted loop playsinline style={{ maxWidth: "100%", height: "auto" }}>
|
||||
<source src="/improving-my-developer-workflow/workflow.webm" type="video/webm" />
|
||||
<source src="/improving-my-developer-workflow/workflow.mp4" type="video/mp4" />
|
||||
Your browser does not support the video tag.
|
||||
</video>
|
||||
|
||||
## appendix: config changes, enumerated
|
||||
|
||||
Here are only a few of the config changes I made in the past month:
|
||||
|
||||
- Configure hyprland windows extensively to my needs. I can instantly pull and push any window in any workspace without having to memorize application-to-workspace number mappings.
|
||||
- Perform all coding-related tasks in one neovim instance rather than separate windows. This creates an improved separation of concerns and seamless integration with git actions and project task management.
|
||||
- Use taskwarrior for tasks and todo management.
|
||||
- Use Google Chrome search engines as URL shortcuts, the [fuzzy url finder](https://chromewebstore.google.com/detail/ff-fuzzy-finder-for-chrom/dbgeolnmmjmhcfndmmahnpicpmnpibep?hl=en) chrome extension to find URLs, and [a tab numberer](https://chromewebstore.google.com/detail/chrome-show-tab-numbers/pflnpcinjbcfefgbejjfanemlgcfjbna?hl=en) to find tabs.
|
||||
- Make dotfiles OS-agnostic through a variety of OS-specific conditionals in configs and refactoring the directory structure to mirror that of the Unix-like file structure (my dots repo now has `.config`, `/etc` folders).
|
||||
- Use [lf](https://github.com/gokcehan/lf) for quickly opening files. Waste a day of your life writing an awesome previewer that supports native (treesitter/vim syntax!) neovim highlighting and video/pdf/gif support. This thing is seriously cool--[check it out](https://github.com/barrett-ruth/dots/blob/main/.config/lf/lf.lua).
|
||||
- Auto-theme switching (see [here](/git/auto-theme.nvim.html)).
|
||||
- Swap to a stable AppImage build of my PDF reader, [sioyek](https://sioyek.info/), to avoid versioning dependencies.
|
||||
|
||||
[^1]: This is an art form. I leverage time-blocking, making changes _only_ based on evidence (i.e. how often do I perform this action in practice?) and intentionally selecting tools after extensive research/trial. I explored around 10 plugins before settling on [overseer.nvim](https://github.com/stevearc/overseer.nvim) when configuring how to run tasks and projects.
|
||||
36
src/content/software/my-competitive-programming-setup.mdx
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
---
|
||||
title: "my competitive programming setup"
|
||||
date: "15/04/2025"
|
||||
---
|
||||
|
||||
# my goals
|
||||
|
||||
I wanted the following features in my competitive programming (cp) setup:
|
||||
|
||||
- Flexibility: support various environments (codeforces, USACO, cses, etc.) with ease
|
||||
- Speed: instantaneous, non-blocking running/debugging; automatic environment configuration and easy code testing
|
||||
- Editor-agnostic: while I do provide first-in-class neovim integration for my setup, it should be easily portable to _any_ os/editor
|
||||
|
||||
# the solution
|
||||
|
||||
Some (POSIX-compliant, of course) scripts and a `makefile` are more than enough. I created the following intuitive way to interact with my CP setup:
|
||||
|
||||
1. `make setup`: populate the environment with configurations in `~/.config/cp-template` for `clang-format` and `clangd`
|
||||
2. `make run file`
|
||||
3. `make debug file`
|
||||
4. `make clean`
|
||||
|
||||
That's it. The `makefile` relies on some scripts that compile code and run the corresponding executables.
|
||||
|
||||
# neovim integration
|
||||
|
||||

|
||||
|
||||
Leveraging [LuaSnip](https://github.com/L3MON4D3/LuaSnip), a custom `CP` user command, and some scripting for window management and asynchronous jobs, I'm able to:
|
||||
|
||||
- Asynchronously format, run, and debug code (`:h vim.system`)
|
||||
- Use a three-window (input, output, and code) view
|
||||
- Toggle between problems instantly (yes, the windows update)
|
||||
- Automatically populate my coding buffers with competition-specific templates (i.e. USACO, CSES, etc.)
|
||||
- Run the code from the CLI in less than a second
|
||||
- Easily tweak and change the setup—there's absolutely nothing fancy.
|
||||
|
|
@ -1,26 +1,66 @@
|
|||
---
|
||||
const { title, description = "barrett ruth's website" } = Astro.props;
|
||||
import Header from "../components/Header.astro";
|
||||
import Footer from "../components/Footer.astro";
|
||||
|
||||
const {
|
||||
title,
|
||||
description = "Barrett Ruth's website",
|
||||
bodyClass = "graph-background",
|
||||
useHeader = true,
|
||||
} = Astro.props;
|
||||
---
|
||||
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<script is:inline>
|
||||
(function () {
|
||||
const stored = localStorage.getItem("theme");
|
||||
const theme =
|
||||
stored ||
|
||||
(matchMedia("(prefers-color-scheme: dark)").matches
|
||||
? "dark"
|
||||
: "light");
|
||||
document.documentElement.setAttribute("data-theme", theme);
|
||||
})();
|
||||
</script>
|
||||
<meta charset="UTF-8" />
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<meta name="description" content={description} />
|
||||
<link rel="icon" type="image/webp" href="/logo.webp" />
|
||||
<link rel="stylesheet" href="/styles/base.css" />
|
||||
<link rel="stylesheet" href="/styles/common.css" />
|
||||
<title>{title}</title>
|
||||
<slot name="head" />
|
||||
</head>
|
||||
<body>
|
||||
<slot name="header" />
|
||||
|
||||
<body class={bodyClass}>
|
||||
{useHeader && <Header />}
|
||||
<main class="main">
|
||||
<slot />
|
||||
</main>
|
||||
|
||||
<Footer />
|
||||
<script is:inline>
|
||||
window.getTopicColor = function (topicName) {
|
||||
switch ((topicName || "").toLowerCase()) {
|
||||
case "software":
|
||||
return "#0073e6";
|
||||
case "algorithms":
|
||||
return "#d50032";
|
||||
case "meditations":
|
||||
return "#6a0dad";
|
||||
case "autonomous-racing":
|
||||
return "#3d8a44";
|
||||
case "git":
|
||||
return "#cc5500";
|
||||
case "death":
|
||||
return "#000000";
|
||||
default:
|
||||
return getComputedStyle(document.documentElement)
|
||||
.getPropertyValue("--text")
|
||||
.trim();
|
||||
}
|
||||
};
|
||||
</script>
|
||||
<slot name="scripts" />
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
|||
31
src/layouts/GitLayout.astro
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
---
|
||||
const { frontmatter } = Astro.props;
|
||||
---
|
||||
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width" />
|
||||
<link rel="stylesheet" href="/styles/common.css" />
|
||||
<link rel="stylesheet" href="/styles/posts.css" />
|
||||
<link rel="stylesheet" href="/styles/git.css" />
|
||||
<slot name="head" />
|
||||
<script src="/scripts/index.js" is:inline></script>
|
||||
</head>
|
||||
<body>
|
||||
<header class="post-header">
|
||||
<a class="home-link" href="/">barrettruth.com</a>
|
||||
<h1 class="post-title">{frontmatter.title}</h1>
|
||||
{
|
||||
frontmatter.description && (
|
||||
<p class="post-description">{frontmatter.description}</p>
|
||||
)
|
||||
}
|
||||
</header>
|
||||
<main class="post-container">
|
||||
<article class="post-body">
|
||||
<slot />
|
||||
</article>
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
||||
99
src/layouts/PostLayout.astro
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
---
|
||||
import BaseLayout from "./BaseLayout.astro";
|
||||
import { getTopicColor } from "../utils/colors.js";
|
||||
import TableOfContents from "../components/TableOfContents.astro";
|
||||
|
||||
interface Props {
|
||||
frontmatter: {
|
||||
title: string;
|
||||
description?: string;
|
||||
date?: string;
|
||||
useKatex?: boolean;
|
||||
useD3?: boolean;
|
||||
scripts?: string[];
|
||||
category?: string;
|
||||
showToc?: boolean;
|
||||
};
|
||||
post?: {
|
||||
id?: string;
|
||||
collection?: string;
|
||||
slug?: string;
|
||||
};
|
||||
headings?: Array<{
|
||||
depth: number;
|
||||
slug: string;
|
||||
text: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
const { frontmatter, post, headings = [] } = Astro.props as Props;
|
||||
const {
|
||||
title,
|
||||
description,
|
||||
useKatex = false,
|
||||
useD3 = false,
|
||||
showToc = false,
|
||||
} = frontmatter;
|
||||
|
||||
let documentTitle = title;
|
||||
if (post?.collection === "git" && post?.slug) {
|
||||
documentTitle = `${post.slug}.git`;
|
||||
} else if (
|
||||
(post?.collection === "gists" || post?.collection === "gist") &&
|
||||
post?.slug
|
||||
) {
|
||||
documentTitle = `${post.slug}`;
|
||||
}
|
||||
|
||||
const topicColor = getTopicColor(post?.collection);
|
||||
---
|
||||
|
||||
<BaseLayout title={documentTitle} description={description}>
|
||||
<Fragment slot="head">
|
||||
<link rel="stylesheet" href="/styles/posts.css" />
|
||||
<link rel="stylesheet" href="/styles/graph.css" />
|
||||
{
|
||||
useKatex && (
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="https://cdn.jsdelivr.net/npm/katex@0.16.22/dist/katex.min.css"
|
||||
integrity="sha384-5TcZemv2l/9On385z///+d7MSYlvIEw9FuZTIdZ14vJLqWphw7e7ZPuOiCHJcFCP"
|
||||
crossorigin="anonymous"
|
||||
/>
|
||||
)
|
||||
}
|
||||
{useD3 && <script src="https://d3js.org/d3.v7.min.js" is:inline />}
|
||||
<slot name="head" />
|
||||
</Fragment>
|
||||
|
||||
<div
|
||||
class="post-wrapper"
|
||||
style={topicColor ? `--topic-color: ${topicColor};` : ""}
|
||||
>
|
||||
{
|
||||
showToc && headings.length > 0 && (
|
||||
<aside class="toc-column">
|
||||
<TableOfContents headings={headings} />
|
||||
</aside>
|
||||
)
|
||||
}
|
||||
|
||||
<div class="post-container">
|
||||
<header class="post-header">
|
||||
<h1 class="post-title">{title}</h1>
|
||||
{frontmatter.date && <p class="post-meta">{frontmatter.date}</p>}
|
||||
</header>
|
||||
|
||||
<article class="post-article">
|
||||
<slot />
|
||||
</article>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Fragment slot="scripts">
|
||||
<script src="/scripts/index.js" is:inline></script>
|
||||
<script src="/scripts/centerKatex.js" is:inline></script>
|
||||
{frontmatter.scripts?.map((src) => <script src={src} is:inline />)}
|
||||
<slot name="scripts" />
|
||||
</Fragment>
|
||||
</BaseLayout>
|
||||
|
|
@ -2,13 +2,46 @@
|
|||
import BaseLayout from "../layouts/BaseLayout.astro";
|
||||
---
|
||||
|
||||
<BaseLayout title="page not found">
|
||||
<Fragment slot="head">
|
||||
<link rel="stylesheet" href="/styles/base.css" />
|
||||
<link rel="stylesheet" href="/styles/404.css" />
|
||||
</Fragment>
|
||||
|
||||
<div class="container">
|
||||
<BaseLayout title="404 - Not Found">
|
||||
<div class="not-found-container">
|
||||
<h1>404</h1>
|
||||
</div>
|
||||
</BaseLayout>
|
||||
|
||||
<script>
|
||||
document.addEventListener("DOMContentLoaded", function () {
|
||||
const base = "barrett@ruth:~$ ";
|
||||
const el = document.querySelector(".terminal-prompt");
|
||||
if (!el) return;
|
||||
|
||||
const type = () => {
|
||||
const target = "/not-found";
|
||||
let i = 0;
|
||||
el.textContent = base;
|
||||
(function step() {
|
||||
if (i < target.length) {
|
||||
el.textContent += target.charAt(i++);
|
||||
setTimeout(step, 250 / target.length);
|
||||
}
|
||||
})();
|
||||
};
|
||||
|
||||
type();
|
||||
});
|
||||
</script>
|
||||
|
||||
<style>
|
||||
.not-found-container {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
height: 100%;
|
||||
min-height: 60vh;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 5em;
|
||||
font-weight: normal;
|
||||
margin: 0;
|
||||
}
|
||||
</style>
|
||||
|
|
|
|||
49
src/pages/[category]/[slug].astro
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
---
|
||||
import { getCollection } from "astro:content";
|
||||
import PostLayout from "../../layouts/PostLayout.astro";
|
||||
import * as collections from "../../content/config";
|
||||
import type { PostCollection, AnyCollectionEntry } from "../../types";
|
||||
|
||||
export async function getStaticPaths() {
|
||||
const categories = Object.keys(collections.collections).filter(
|
||||
(c) => c !== "git" && c !== "gists",
|
||||
) as PostCollection[];
|
||||
|
||||
const entries: Array<{
|
||||
params: { category: string; slug: string };
|
||||
props: { post: AnyCollectionEntry };
|
||||
}> = [];
|
||||
|
||||
for (const category of categories) {
|
||||
const docs = await getCollection(category);
|
||||
for (const doc of docs) {
|
||||
entries.push({
|
||||
params: { category, slug: doc.slug },
|
||||
props: { post: doc },
|
||||
});
|
||||
}
|
||||
}
|
||||
return entries;
|
||||
}
|
||||
|
||||
interface Props {
|
||||
post: AnyCollectionEntry;
|
||||
}
|
||||
|
||||
const { post } = Astro.props;
|
||||
const category = Astro.params.category;
|
||||
const { Content, headings } = await post.render();
|
||||
const pageTitle = `${category}/${post.data.title ?? post.slug}`;
|
||||
|
||||
if (post.data?.redirect) {
|
||||
return Astro.redirect(post.data.redirect, 301);
|
||||
}
|
||||
---
|
||||
|
||||
<PostLayout frontmatter={post.data} post={post} headings={headings}>
|
||||
<Fragment slot="head">
|
||||
<title>{pageTitle}</title>
|
||||
<script src="/scripts/index.js" is:inline></script>
|
||||
</Fragment>
|
||||
<Content />
|
||||
</PostLayout>
|
||||
57
src/pages/[category]/index.astro
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
---
|
||||
import BaseLayout from "../../layouts/BaseLayout.astro";
|
||||
import { getCollection } from "astro:content";
|
||||
import { sortItem } from "../../utils/sort.js";
|
||||
import * as collections from "../../content/config";
|
||||
import type { PostCollection } from "../../types";
|
||||
|
||||
export async function getStaticPaths() {
|
||||
return Object.keys(collections.collections)
|
||||
.filter((category) => category !== "git" && category !== "gists")
|
||||
.map((category) => ({
|
||||
params: { category },
|
||||
}));
|
||||
}
|
||||
|
||||
const category = Astro.params.category as PostCollection;
|
||||
const title = "Barrett Ruth";
|
||||
|
||||
const posts = await getCollection(category);
|
||||
posts.sort(sortItem);
|
||||
---
|
||||
|
||||
<BaseLayout title={title}>
|
||||
<slot name="head" slot="head">
|
||||
<link rel="stylesheet" href="/styles/index.css" />
|
||||
</slot>
|
||||
|
||||
<div class="content">
|
||||
<ul class="topics">
|
||||
<li class="topic algorithms">
|
||||
<a href="/algorithms" data-topic="algorithms">algorithms</a>
|
||||
</li>
|
||||
<li class="topic software">
|
||||
<a href="/software" data-topic="software">software</a>
|
||||
</li>
|
||||
<li class="topic meditations">
|
||||
<a href="/meditations" data-topic="meditations">meditations</a>
|
||||
</li>
|
||||
<li class="topic autonomous-racing">
|
||||
<a href="/autonomous-racing" data-topic="autonomous-racing"
|
||||
>autonomous-racing</a
|
||||
>
|
||||
</li>
|
||||
<li class="topic death">
|
||||
<a href="/death" data-topic="death">death</a>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
<div class="posts" id="posts">
|
||||
{
|
||||
posts.map((p) => (
|
||||
<a href={`/${category}/${p.slug}.html`}>{p.data.title}</a>
|
||||
))
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
</BaseLayout>
|
||||
44
src/pages/about.astro
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
---
|
||||
import BaseLayout from "../layouts/BaseLayout.astro";
|
||||
---
|
||||
|
||||
<BaseLayout title="Barrett Ruth">
|
||||
<div class="post-container">
|
||||
<header class="post-header">
|
||||
<h1 class="post-title">Barrett Ruth</h1>
|
||||
</header>
|
||||
<article class="post-article">
|
||||
<p>
|
||||
I am a software developer studying computer science at the University of
|
||||
Virginia.
|
||||
</p>
|
||||
<p>
|
||||
I began working as a software engineer part-time with
|
||||
<a target="blank" href="https://gotransverse.com/">GoTransverse</a>
|
||||
in high school. After developing an interest in the financial/venture capital
|
||||
world, I transitioned to
|
||||
<a target="blank" href="https://www.nthventure.com/">Nth Venture</a>
|
||||
in the spring of my second year. I worked at
|
||||
<a target="blank" href="https://usa.visa.com/">VISA</a> and
|
||||
<a href="https://trbcap.com" target="_blank">TRB Capital Management</a>
|
||||
during the summer of 2024. Luckily enough, I'll be joining
|
||||
<a href="https://drw.com" target="_blank">DRW</a> and
|
||||
<a href="https://ramp.com" target="_blank">Ramp</a> in the summer and spring
|
||||
of 2025.
|
||||
</p>
|
||||
<p>
|
||||
I've a developing interest in high-performance computing, quantitative
|
||||
finance, and open-source software. I am also a passionate contributor to
|
||||
the (Neo)Vim ecosystem and beyond.
|
||||
</p>
|
||||
<p>
|
||||
You can see my related contributions on
|
||||
<a target="blank" href="https://github.com/barrett-ruth">GitHub</a>.
|
||||
</p>
|
||||
</article>
|
||||
</div>
|
||||
|
||||
<slot name="head" slot="head">
|
||||
<link rel="stylesheet" href="/styles/posts.css" />
|
||||
</slot>
|
||||
</BaseLayout>
|
||||
68
src/pages/death.astro
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
---
|
||||
import BaseLayout from "../layouts/BaseLayout.astro";
|
||||
const title = "my father";
|
||||
---
|
||||
|
||||
<BaseLayout title={title} useHeader={false}>
|
||||
<div class="death-container">
|
||||
<img id="death-image" src="/death/death.webp" alt="Philip Matthew Ruth" />
|
||||
<div id="tribute-text" class="tribute">
|
||||
rip philip matthew ruth<br />
|
||||
february 8, 1967 – c. december 2, 2025
|
||||
</div>
|
||||
<div class="credit">
|
||||
gary wray<br />
|
||||
<em>waiting in line</em>, 2021
|
||||
</div>
|
||||
</div>
|
||||
<style>
|
||||
html,
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
overflow: hidden;
|
||||
background: black;
|
||||
color: white !important;
|
||||
}
|
||||
.death-container {
|
||||
position: relative;
|
||||
width: 100vw;
|
||||
height: 100vh;
|
||||
display: grid;
|
||||
grid-template-rows: 1fr auto;
|
||||
grid-template-columns: 1fr 1fr;
|
||||
}
|
||||
img {
|
||||
grid-row: 1 / -1;
|
||||
grid-column: 1 / -1;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
object-fit: cover;
|
||||
display: block;
|
||||
}
|
||||
.tribute {
|
||||
grid-row: 2;
|
||||
grid-column: 2;
|
||||
justify-self: end;
|
||||
align-self: end;
|
||||
padding: 1.5rem;
|
||||
font-size: clamp(1.5rem, 4vmin, 3em);
|
||||
z-index: 10;
|
||||
text-align: right;
|
||||
color: white;
|
||||
}
|
||||
.credit {
|
||||
grid-row: 2;
|
||||
grid-column: 1;
|
||||
justify-self: start;
|
||||
align-self: end;
|
||||
padding: 1.5rem;
|
||||
z-index: 10;
|
||||
font-size: clamp(1.2rem, 3vmin, 2em);
|
||||
text-align: left;
|
||||
color: white;
|
||||
}
|
||||
</style>
|
||||
</BaseLayout>
|
||||
33
src/pages/gist.astro
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
---
|
||||
import BaseLayout from "../layouts/BaseLayout.astro";
|
||||
import { getCollection } from "astro:content";
|
||||
import { sortItem } from "../utils/sort.js";
|
||||
|
||||
const title = "gists";
|
||||
const gists = await getCollection("gists");
|
||||
gists.sort(sortItem);
|
||||
---
|
||||
|
||||
<BaseLayout title={title}>
|
||||
<slot name="head" slot="head">
|
||||
<link rel="stylesheet" href="/styles/index.css" />
|
||||
<script src="/scripts/index.js" is:inline></script>
|
||||
</slot>
|
||||
|
||||
<div class="content">
|
||||
<ul class="topics">
|
||||
{
|
||||
gists.map((gist) => (
|
||||
<li class="topic">
|
||||
<a
|
||||
href={`/gist/${gist.slug}.html`}
|
||||
data-topic={`gist/${gist.slug}`}
|
||||
>
|
||||
{gist.data.title || gist.slug}
|
||||
</a>
|
||||
</li>
|
||||
))
|
||||
}
|
||||
</ul>
|
||||
</div>
|
||||
</BaseLayout>
|
||||
19
src/pages/gist/[slug].astro
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
import { getCollection } from "astro:content";
|
||||
import PostLayout from "../../layouts/PostLayout.astro";
|
||||
|
||||
export async function getStaticPaths() {
|
||||
const gists = await getCollection("gists");
|
||||
return gists.map((gist) => ({
|
||||
params: { slug: gist.slug },
|
||||
props: { gist },
|
||||
}));
|
||||
}
|
||||
|
||||
const { gist } = Astro.props;
|
||||
const { Content } = await gist.render();
|
||||
---
|
||||
|
||||
<PostLayout frontmatter={gist.data} post={gist}>
|
||||
<Content />
|
||||
</PostLayout>
|
||||
31
src/pages/git.astro
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
---
|
||||
import BaseLayout from "../layouts/BaseLayout.astro";
|
||||
import { getCollection } from "astro:content";
|
||||
import { sortItem } from "../utils/sort.js";
|
||||
|
||||
const title = "git repos";
|
||||
const repos = await getCollection("git");
|
||||
repos.sort(sortItem);
|
||||
---
|
||||
|
||||
<BaseLayout title={title}>
|
||||
<slot name="head" slot="head">
|
||||
<link rel="stylesheet" href="/styles/index.css" />
|
||||
<script src="/scripts/index.js" is:inline></script>
|
||||
</slot>
|
||||
|
||||
<div class="content">
|
||||
<ul class="topics" id="repo-list">
|
||||
{
|
||||
repos.map((r) => (
|
||||
<li class="topic">
|
||||
<a href={`/git/${r.slug}.html`} data-topic={`git/${r.slug}`}>
|
||||
{r.data.title || r.slug}
|
||||
</a>
|
||||
</li>
|
||||
))
|
||||
}
|
||||
</ul>
|
||||
<div class="posts" id="posts"></div>
|
||||
</div>
|
||||
</BaseLayout>
|
||||
32
src/pages/git/[slug].astro
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
---
|
||||
import { getCollection } from "astro:content";
|
||||
import PostLayout from "../../layouts/PostLayout.astro";
|
||||
|
||||
export async function getStaticPaths() {
|
||||
const repos = await getCollection("git");
|
||||
return repos.map((repo) => ({
|
||||
params: { slug: repo.slug },
|
||||
props: { repo },
|
||||
}));
|
||||
}
|
||||
|
||||
const { repo } = Astro.props;
|
||||
const { Content } = await repo.render();
|
||||
const cloneCommand = `git clone https://git.barrettruth.com/${repo.slug}.git`;
|
||||
const githubUrl = `https://github.com/barrett-ruth/${repo.slug}`;
|
||||
---
|
||||
|
||||
<PostLayout frontmatter={repo.data} post={repo}>
|
||||
<Fragment slot="head">
|
||||
<link rel="stylesheet" href="/styles/git.css" />
|
||||
</Fragment>
|
||||
|
||||
<div class="clone-line">
|
||||
<code><span class="prompt">> </span>{cloneCommand}</code>
|
||||
</div>
|
||||
|
||||
<br />
|
||||
<div><a href={githubUrl}>source code</a></div>
|
||||
|
||||
<Content />
|
||||
</PostLayout>
|
||||
|
|
@ -1,83 +1,71 @@
|
|||
---
|
||||
import BaseLayout from "../layouts/BaseLayout.astro";
|
||||
import { sortItem } from "../utils/sort.js";
|
||||
import { getCollection } from "astro:content";
|
||||
import type { PostCollection } from "../types";
|
||||
|
||||
const title = "Barrett Ruth";
|
||||
const CATS: PostCollection[] = [
|
||||
"algorithms",
|
||||
"software",
|
||||
"meditations",
|
||||
"autonomous-racing",
|
||||
];
|
||||
|
||||
type PostData = {
|
||||
id: string;
|
||||
slug: string;
|
||||
data: {
|
||||
title: string;
|
||||
date: string | null;
|
||||
};
|
||||
};
|
||||
|
||||
const postsByCategory: Record<string, PostData[]> = {};
|
||||
for (const c of CATS) {
|
||||
const entries = await getCollection(c);
|
||||
entries.sort(sortItem);
|
||||
postsByCategory[c] = entries.map((e) => ({
|
||||
id: `${c}/${e.slug}.mdx`,
|
||||
slug: e.slug,
|
||||
data: {
|
||||
title: e.data.title ?? e.slug,
|
||||
date: e.data.date ?? null,
|
||||
},
|
||||
}));
|
||||
}
|
||||
---
|
||||
|
||||
<BaseLayout title="barrett ruth">
|
||||
<Fragment slot="head">
|
||||
<link rel="stylesheet" href="/styles/base.css" />
|
||||
<BaseLayout title={title}>
|
||||
<slot name="head" slot="head">
|
||||
<link rel="stylesheet" href="/styles/index.css" />
|
||||
</Fragment>
|
||||
</slot>
|
||||
|
||||
<div class="container">
|
||||
<header>
|
||||
<h1>barrett ruth</h1>
|
||||
<span class="delta">Δ</span>
|
||||
</header>
|
||||
|
||||
<section>
|
||||
<div>
|
||||
<h2>experience</h2>
|
||||
<ul>
|
||||
<li>
|
||||
<a target="_blank" href="https://imc.com">imc</a>—july 2026
|
||||
</li>
|
||||
<li>
|
||||
<a target="_blank" href="https://ramp.com">ramp</a>—fall 2025
|
||||
</li>
|
||||
<li>
|
||||
<a target="_blank" href="https://drw.com">drw</a>—summer 2025
|
||||
</li>
|
||||
<li>
|
||||
<a target="_blank" href="https://trbcap.com">trb capital</a
|
||||
>—summer 2024
|
||||
</li>
|
||||
<li>
|
||||
<a target="_blank" href="https://usa.visa.com">visa</a>—summer
|
||||
2024
|
||||
</li>
|
||||
<li>
|
||||
<a target="_blank" href="https://www.nthventure.com">nth venture</a
|
||||
>—spring 2023
|
||||
</li>
|
||||
<li>
|
||||
<a target="_blank" href="https://gotransverse.com">gotransverse</a
|
||||
>—summer 2022
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h2>ideas</h2>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h2>contact</h2>
|
||||
<ul>
|
||||
<li><a target="_blank" href="mailto:br@barrettruth.com">email</a></li>
|
||||
<li>
|
||||
<a target="_blank" href="https://github.com/barrettruth">github</a>
|
||||
</li>
|
||||
<li>
|
||||
<a target="_blank" href="https://codeforces.com/profile/barrettruth"
|
||||
>codeforces</a
|
||||
>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h2>about</h2>
|
||||
<ul>
|
||||
<li>
|
||||
<a target="_blank" href="/resume.pdf" target="_blank">resume</a>
|
||||
</li>
|
||||
<li>
|
||||
<a target="_blank" href="/transcript.pdf" target="_blank"
|
||||
>transcript</a
|
||||
>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</section>
|
||||
<div class="content">
|
||||
<ul class="topics">
|
||||
<li class="topic algorithms">
|
||||
<a href="#algorithms" data-topic="algorithms">algorithms</a>
|
||||
</li>
|
||||
<li class="topic software">
|
||||
<a href="#software" data-topic="software">software</a>
|
||||
</li>
|
||||
<li class="topic meditations">
|
||||
<a href="#meditations" data-topic="meditations">meditations</a>
|
||||
</li>
|
||||
<li class="topic autonomous-racing">
|
||||
<a href="#autonomous-racing" data-topic="autonomous-racing"
|
||||
>autonomous racing</a
|
||||
>
|
||||
</li>
|
||||
<li class="topic death">
|
||||
<a href="/death.html" data-topic="death">death</a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="posts" id="posts"></div>
|
||||
</div>
|
||||
|
||||
<script slot="scripts" define:vars={{ postsByCategory }} is:inline>
|
||||
window.postsByCategory = postsByCategory;
|
||||
</script>
|
||||
<script slot="scripts" src="/scripts/index.js" is:inline></script>
|
||||
</BaseLayout>
|
||||
|
|
|
|||
20
src/types.ts
|
|
@ -0,0 +1,20 @@
|
|||
import type { CollectionEntry } from "astro:content";
|
||||
|
||||
export type CollectionKey =
|
||||
| "algorithms"
|
||||
| "software"
|
||||
| "meditations"
|
||||
| "autonomous-racing"
|
||||
| "git"
|
||||
| "gists"
|
||||
| "death";
|
||||
export type PostCollection = Exclude<CollectionKey, "git" | "gists">;
|
||||
|
||||
export type AnyCollectionEntry =
|
||||
| CollectionEntry<"algorithms">
|
||||
| CollectionEntry<"software">
|
||||
| CollectionEntry<"meditations">
|
||||
| CollectionEntry<"autonomous-racing">
|
||||
| CollectionEntry<"death">
|
||||
| CollectionEntry<"git">
|
||||
| CollectionEntry<"gists">;
|
||||
18
src/utils/colors.js
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
export function getTopicColor(topicName) {
|
||||
switch ((topicName || "").toLowerCase()) {
|
||||
case "software":
|
||||
return "#0073e6";
|
||||
case "algorithms":
|
||||
return "#d50032";
|
||||
case "meditations":
|
||||
return "#6a0dad";
|
||||
case "autonomous-racing":
|
||||
return "#3d8a44";
|
||||
case "git":
|
||||
return "#cc5500";
|
||||
case "death":
|
||||
return "#000000";
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||