feat: refactor
This commit is contained in:
parent
b83f17d087
commit
8666e5a169
57 changed files with 5734 additions and 5313 deletions
21
.github/workflows/aws.yaml
vendored
21
.github/workflows/aws.yaml
vendored
|
|
@ -10,10 +10,27 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Setup PNPM
|
||||||
|
uses: pnpm/action-setup@v2
|
||||||
|
with:
|
||||||
|
version: 8
|
||||||
|
|
||||||
|
- name: Setup Node
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 18
|
||||||
|
cache: 'pnpm'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pnpm install
|
||||||
|
|
||||||
|
- name: Build site
|
||||||
|
run: pnpm build
|
||||||
|
|
||||||
- name: Configure AWS Credentials
|
- name: Configure AWS Credentials
|
||||||
uses: aws-actions/configure-aws-credentials@v1
|
uses: aws-actions/configure-aws-credentials@v2
|
||||||
with:
|
with:
|
||||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
|
|
||||||
18
.gitignore
vendored
18
.gitignore
vendored
|
|
@ -1,5 +1,17 @@
|
||||||
public/**/*.ttf
|
public/**/*.ttf
|
||||||
*.md
|
|
||||||
!readme.md
|
|
||||||
.DS_Store
|
.DS_Store
|
||||||
.git
|
|
||||||
|
dist/
|
||||||
|
.astro/
|
||||||
|
|
||||||
|
node_modules/
|
||||||
|
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
pnpm-debug.log*
|
||||||
|
|
||||||
|
.env
|
||||||
|
.env.production
|
||||||
|
|
||||||
|
.idea/
|
||||||
|
|
|
||||||
5
.prettierignore
Normal file
5
.prettierignore
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
node_modules/
|
||||||
|
dist/
|
||||||
|
build/
|
||||||
|
.astro/
|
||||||
|
pnpm-lock.yaml
|
||||||
47
README.md
Normal file
47
README.md
Normal file
|
|
@ -0,0 +1,47 @@
|
||||||
|
# Astro Starter Kit: Minimal
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pnpm create astro@latest -- --template minimal
|
||||||
|
```
|
||||||
|
|
||||||
|
[](https://stackblitz.com/github/withastro/astro/tree/latest/examples/minimal)
|
||||||
|
[](https://codesandbox.io/p/sandbox/github/withastro/astro/tree/latest/examples/minimal)
|
||||||
|
[](https://codespaces.new/withastro/astro?devcontainer_path=.devcontainer/minimal/devcontainer.json)
|
||||||
|
|
||||||
|
> 🧑🚀 **Seasoned astronaut?** Delete this file. Have fun!
|
||||||
|
|
||||||
|
## 🚀 Project Structure
|
||||||
|
|
||||||
|
Inside of your Astro project, you'll see the following folders and files:
|
||||||
|
|
||||||
|
```text
|
||||||
|
/
|
||||||
|
├── public/
|
||||||
|
├── src/
|
||||||
|
│ └── pages/
|
||||||
|
│ └── index.astro
|
||||||
|
└── package.json
|
||||||
|
```
|
||||||
|
|
||||||
|
Astro looks for `.astro` or `.md` files in the `src/pages/` directory. Each page is exposed as a route based on its file name.
|
||||||
|
|
||||||
|
There's nothing special about `src/components/`, but that's where we like to put any Astro/React/Vue/Svelte/Preact components.
|
||||||
|
|
||||||
|
Any static assets, like images, can be placed in the `public/` directory.
|
||||||
|
|
||||||
|
## 🧞 Commands
|
||||||
|
|
||||||
|
All commands are run from the root of the project, from a terminal:
|
||||||
|
|
||||||
|
| Command | Action |
|
||||||
|
| :------------------------ | :----------------------------------------------- |
|
||||||
|
| `pnpm install` | Installs dependencies |
|
||||||
|
| `pnpm dev` | Starts local dev server at `localhost:4321` |
|
||||||
|
| `pnpm build` | Build your production site to `./dist/` |
|
||||||
|
| `pnpm preview` | Preview your build locally, before deploying |
|
||||||
|
| `pnpm astro ...` | Run CLI commands like `astro add`, `astro check` |
|
||||||
|
| `pnpm astro -- --help` | Get help using the Astro CLI |
|
||||||
|
|
||||||
|
## 👀 Want to learn more?
|
||||||
|
|
||||||
|
Feel free to check [our documentation](https://docs.astro.build) or jump into our [Discord server](https://astro.build/chat).
|
||||||
58
about.html
58
about.html
|
|
@ -1,58 +0,0 @@
|
||||||
<!doctype html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
||||||
<link rel="stylesheet" href="/styles/common.css" />
|
|
||||||
<link rel="stylesheet" href="/styles/index.css" />
|
|
||||||
<link rel="stylesheet" href="/styles/post.css" />
|
|
||||||
<link rel="icon" type="image/webp" href="/public/logo.webp" />
|
|
||||||
<title>Barrett Ruth</title>
|
|
||||||
</head>
|
|
||||||
<body class="graph-background">
|
|
||||||
<site-header></site-header>
|
|
||||||
<main class="main">
|
|
||||||
<div class="post-container">
|
|
||||||
<header class="post-header">
|
|
||||||
<h1 class="post-title">Barrett Ruth</h1>
|
|
||||||
</header>
|
|
||||||
<article class="post-article">
|
|
||||||
<p>
|
|
||||||
I am a software developer studying computer science at the
|
|
||||||
University of Virginia.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
I began working as a software engineer part-time with
|
|
||||||
<a target="blank" href="https://gotransverse.com/">GoTransverse</a>
|
|
||||||
in high school. After developing an interest in the
|
|
||||||
financial/venture capital world , I transitioned to
|
|
||||||
<a target="blank" href="https://www.nthventure.com/">Nth Venture</a>
|
|
||||||
in the spring of my second year. I worked at
|
|
||||||
<a target="blank" href="https://usa.visa.com/">VISA</a> and
|
|
||||||
<a href="https://trbcap.com" target="_blank"
|
|
||||||
>TRB Capital Management</a
|
|
||||||
>
|
|
||||||
during the summer of 2024. Luckily enough, I'll be joining
|
|
||||||
<a href="https://drw.com" target="_blank">DRW</a> and
|
|
||||||
<a href="https://ramp.com" target="_blank">Ramp</a> in the summer
|
|
||||||
and spring of 2025.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
I've a developing interest in high-performance computing,
|
|
||||||
quantitative finance, and open-source software. I am also a
|
|
||||||
passionate contributor to the (Neo)Vim ecosystem and beyond.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
You can see my related contributions on
|
|
||||||
<a target="blank" href="https://github.com/barrett-ruth">GitHub</a>.
|
|
||||||
</p>
|
|
||||||
</article>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
<site-footer></site-footer>
|
|
||||||
<script src="/scripts/common.js"></script>
|
|
||||||
<script src="/scripts/index.js"></script>
|
|
||||||
<script src="/scripts/post.js"></script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
22
astro.config.mjs
Normal file
22
astro.config.mjs
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
import { defineConfig } from "astro/config";
|
||||||
|
import mdx from "@astrojs/mdx";
|
||||||
|
import remarkMath from "remark-math";
|
||||||
|
import rehypeKatex from "rehype-katex";
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
integrations: [
|
||||||
|
mdx({
|
||||||
|
remarkPlugins: [remarkMath],
|
||||||
|
rehypePlugins: [rehypeKatex],
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
markdown: {
|
||||||
|
remarkPlugins: [remarkMath],
|
||||||
|
rehypePlugins: [rehypeKatex],
|
||||||
|
shikiConfig: {
|
||||||
|
theme: "github-light",
|
||||||
|
langs: [],
|
||||||
|
wrap: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
37
index.html
37
index.html
|
|
@ -1,37 +0,0 @@
|
||||||
<!doctype html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
||||||
<link rel="stylesheet" href="/styles/common.css" />
|
|
||||||
<link rel="stylesheet" href="/styles/index.css" />
|
|
||||||
<link rel="icon" type="image/webp" href="/public/logo.webp" />
|
|
||||||
<title>Barrett Ruth</title>
|
|
||||||
</head>
|
|
||||||
<body class="graph-background">
|
|
||||||
<site-header></site-header>
|
|
||||||
<main class="main">
|
|
||||||
<div class="content">
|
|
||||||
<ul class="topics">
|
|
||||||
<li class="topic algorithms">
|
|
||||||
<a href="/algorithms" onclick="typechars(event)">algorithms</a>
|
|
||||||
</li>
|
|
||||||
<li class="topic operating-systems">
|
|
||||||
<a href="/operating-systems" onclick="typechars(event)">operating systems</a>
|
|
||||||
</li>
|
|
||||||
<li class="topic software">
|
|
||||||
<a href="/software" onclick="typechars(event)">software</a>
|
|
||||||
</li>
|
|
||||||
<li class="topic meditations">
|
|
||||||
<a href="/meditations" onclick="typechars(event)">meditations</a>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<div class="posts" id="posts"></div>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
<site-footer></site-footer>
|
|
||||||
<script src="scripts/common.js"></script>
|
|
||||||
<script src="scripts/index.js"></script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
20
package.json
Normal file
20
package.json
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
{
|
||||||
|
"name": "barrettruth.com",
|
||||||
|
"type": "module",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"scripts": {
|
||||||
|
"dev": "astro dev",
|
||||||
|
"build": "astro build",
|
||||||
|
"preview": "astro preview",
|
||||||
|
"astro": "astro"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@astrojs/mdx": "^4.3.0",
|
||||||
|
"astro": "^5.8.0",
|
||||||
|
"rehype-katex": "^7.0.1",
|
||||||
|
"remark-math": "^6.0.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"prettier": "^3.5.3"
|
||||||
|
}
|
||||||
|
}
|
||||||
3711
pnpm-lock.yaml
generated
Normal file
3711
pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,215 +0,0 @@
|
||||||
<!doctype html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
||||||
<link rel="stylesheet" href="/styles/common.css" />
|
|
||||||
<link rel="stylesheet" href="/styles/post.css" />
|
|
||||||
<link rel="icon" type="image/webp" href="/public/logo.webp" />
|
|
||||||
<link href="/public/prism/prism.css" rel="stylesheet" />
|
|
||||||
<link href="/public/prism/prism-theme.css" rel="stylesheet" />
|
|
||||||
<script defer src="/public/prism/prism.js"></script>
|
|
||||||
<script
|
|
||||||
src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"
|
|
||||||
async
|
|
||||||
></script>
|
|
||||||
<title>extreme circular buffer</title>
|
|
||||||
</head>
|
|
||||||
<body class="graph-background">
|
|
||||||
<site-header></site-header>
|
|
||||||
<main class="main">
|
|
||||||
<div class="post-container">
|
|
||||||
<header class="post-header">
|
|
||||||
<h1 class="post-title">extrema circular buffer</h1>
|
|
||||||
<p class="post-meta">
|
|
||||||
<time datetime="2024-07-30">30/07/2024</time>
|
|
||||||
</p>
|
|
||||||
</header>
|
|
||||||
<article class="post-article">
|
|
||||||
<h2>context</h2>
|
|
||||||
<div>
|
|
||||||
<p>
|
|
||||||
While working for
|
|
||||||
<a href="https://trbcap.com/">TRB Capital Management</a>, certain
|
|
||||||
strategies necessitated finding the minimum and maximum of a
|
|
||||||
moving window of prices.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<h2>problem statement</h2>
|
|
||||||
<p>Design a data structure supporting the following operations:</p>
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
<span><code>build(size_t capacity)</code></span>
|
|
||||||
: initialize the data structure with capacity/window size
|
|
||||||
<span><code>capacity</code></span>
|
|
||||||
</li>
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
The data structure must always hold \(\leq\)
|
|
||||||
<span><code>capacity</code></span>
|
|
||||||
prices.
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<li>
|
|
||||||
<span><code>void push_back(double value)</code></span>
|
|
||||||
</li>
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
If the data structure exceeds capacity, remove elements from the
|
|
||||||
front of the window.
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<li>
|
|
||||||
<span><code>void pop_front()</code></span>
|
|
||||||
: remove the price from the front of the window
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<span><code>size_t size()</code></span>
|
|
||||||
: return the number of prices in the data structure
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<span><code>double get()</code></span>
|
|
||||||
: return the extrema (min or max)
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<h2>solution</h2>
|
|
||||||
<p>
|
|
||||||
Try to solve it yourself first. The point of this exercise it to
|
|
||||||
create the most theoretically optimal solution you can, not
|
|
||||||
brute-force and move on.
|
|
||||||
</p>
|
|
||||||
<div class="fold">
|
|
||||||
<h3>naïve solution</h3>
|
|
||||||
</div>
|
|
||||||
<div class="problem-content">
|
|
||||||
<p>
|
|
||||||
One can design a data structure meeting these requirements through
|
|
||||||
simulating the operations directly with a
|
|
||||||
<a
|
|
||||||
target="blank"
|
|
||||||
href="https://en.cppreference.com/w/cpp/container/deque"
|
|
||||||
><span><code>std::deque<double></code></span></a
|
|
||||||
>.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
On the upside, this approach is simple to understand. Further,
|
|
||||||
operations are all \(O(1)\) time—that is, nearly all
|
|
||||||
operations. The minimum/maximum element must be found via a linear
|
|
||||||
scan in \(O(n)\) time, certainly far from optimal.
|
|
||||||
</p>
|
|
||||||
<div class="code" data-file="naive.cpp"></div>
|
|
||||||
</div>
|
|
||||||
<h3>optimizing the approach</h3>
|
|
||||||
<div class="problem-content">
|
|
||||||
<p>
|
|
||||||
Rather than bear the brunt of the work finding extrema in calls to
|
|
||||||
<span><code>get()</code></span
|
|
||||||
>, we can distribute it across the data structure as it is built.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Maintaining the prices in a sorted order seems to suffice, and
|
|
||||||
gives access to both max <i>and</i> min in \(O(1)\) time. However,
|
|
||||||
all of the problem constraints have not been addressed. Adhering
|
|
||||||
to the interface of a circular buffer is another challenge.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Fortunately, pairing each element with a count allows intelligent
|
|
||||||
removal/insertion of elements—if an element has a count of
|
|
||||||
\(0\), remove it from the list of sorted prices. A
|
|
||||||
<a
|
|
||||||
target="blank"
|
|
||||||
href="https://en.cppreference.com/w/cpp/container/map"
|
|
||||||
>std::map</a
|
|
||||||
>
|
|
||||||
allows us to do all of this.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Now, we can access extrema instantly. Insertion and deletion take
|
|
||||||
\(O(log(n))\) time thanks to the map—but we can do better.
|
|
||||||
</p>
|
|
||||||
<div class="code" data-file="map.cpp"></div>
|
|
||||||
</div>
|
|
||||||
<h3>monotonic <s>queues</s> deques</h3>
|
|
||||||
<div class="problem-content">
|
|
||||||
<p>
|
|
||||||
Thinking a bit deeper about the problem constraints, it is clear
|
|
||||||
that:
|
|
||||||
</p>
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
If an extrema is pushed onto the data structure, all previously
|
|
||||||
pushed elements are irrelevant to any further operations.
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<p>
|
|
||||||
Elements are processed in FIFO order, enabling this observation to
|
|
||||||
be exploited. This is the foundationl idea of the
|
|
||||||
<a
|
|
||||||
target="blank"
|
|
||||||
href="https://www.wikiwand.com/en/Monotone_priority_queue"
|
|
||||||
>monotone priority queue</a
|
|
||||||
>
|
|
||||||
data structure. So, for maintaining a minimum/maximum, the data
|
|
||||||
structure will store a monotonically increasing/decreasing
|
|
||||||
double-ended queue.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
This solution does not satisfy a circular buffer inherently. If an
|
|
||||||
arbitrary number of elements are removed from the data structure
|
|
||||||
when an extrema is added, it is certainly not possible to maintain
|
|
||||||
a window of fixed size.
|
|
||||||
</p>
|
|
||||||
<p>Thus, we make one more observation to meet this criterion:</p>
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
If each price (extrema) on the monotonic double-ended queue also
|
|
||||||
maintains a count of <i>previously popped elements</i>, we can
|
|
||||||
deduce the proper action to take when the data structure reaches
|
|
||||||
capacity.
|
|
||||||
</li>
|
|
||||||
<ol>
|
|
||||||
<li>
|
|
||||||
If elements were previously popped before this extrema was
|
|
||||||
added to the data structure, decrement the price's count
|
|
||||||
of popped elements and do nothing.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Otherwise, either no elements were pushed before this extrema
|
|
||||||
or they've all been popped. Remove (pop) this element
|
|
||||||
from the deque.
|
|
||||||
</li>
|
|
||||||
</ol>
|
|
||||||
</ul>
|
|
||||||
<p>
|
|
||||||
This approach supports all operations in amortized \(O(1)\) time
|
|
||||||
(with a monotonic sequence, elements are added or removed at least
|
|
||||||
once; across a sequence of \(n\) operations, \(n\) total \(O(1)\)
|
|
||||||
operations will be executed).
|
|
||||||
</p>
|
|
||||||
<div class="code" data-file="monotonic.cpp"></div>
|
|
||||||
<h3>further improvements</h3>
|
|
||||||
<p>
|
|
||||||
The final implementation utilized in the TRB includes the
|
|
||||||
following features:
|
|
||||||
</p>
|
|
||||||
<ol>
|
|
||||||
<li>
|
|
||||||
A ringbuffer a statically-allocated <code>std::array</code>, as
|
|
||||||
any fix-sized queue can be supplanted with one
|
|
||||||
</li>
|
|
||||||
<li>A templatized value type and comparator for flexibility</li>
|
|
||||||
<li>
|
|
||||||
C++ specific optimizations (rule of 5, smart pointers, and an
|
|
||||||
STL-compliant API)
|
|
||||||
</li>
|
|
||||||
</ol>
|
|
||||||
</div>
|
|
||||||
</article>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
<site-footer></site-footer>
|
|
||||||
<script src="/scripts/common.js"></script>
|
|
||||||
<script src="/scripts/post.js"></script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
@ -1,646 +0,0 @@
|
||||||
<!doctype html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
||||||
<link rel="stylesheet" href="/styles/common.css" />
|
|
||||||
<link rel="stylesheet" href="/styles/post.css" />
|
|
||||||
<link rel="icon" type="image/webp" href="/public/logo.webp" />
|
|
||||||
<link href="/public/prism/prism.css" rel="stylesheet" />
|
|
||||||
<link href="/public/prism/prism-theme.css" rel="stylesheet" />
|
|
||||||
<script defer src="/public/prism/prism.js"></script>
|
|
||||||
<script
|
|
||||||
src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"
|
|
||||||
async
|
|
||||||
></script>
|
|
||||||
<title>leetcode daily</title>
|
|
||||||
</head>
|
|
||||||
<body class="graph-background">
|
|
||||||
<site-header></site-header>
|
|
||||||
<main class="main">
|
|
||||||
<div class="post-container">
|
|
||||||
<header class="post-header">
|
|
||||||
<h1 class="post-title">leetcode daily</h1>
|
|
||||||
</header>
|
|
||||||
<article class="post-article">
|
|
||||||
<h2>
|
|
||||||
<a
|
|
||||||
target="blank"
|
|
||||||
href="https://leetcode.com/problems/count-good-numbers/submissions/1605647445/?envType=daily-question&envId=2025-04-13"
|
|
||||||
>count good numbers</a
|
|
||||||
>
|
|
||||||
<span class="post-meta">
|
|
||||||
<time datetime="2024-04-13">04/13/2024</time>
|
|
||||||
</span>
|
|
||||||
</h2>
|
|
||||||
<div class="problem-content">
|
|
||||||
<h3>understanding the problem</h3>
|
|
||||||
<p>
|
|
||||||
p is a combinatoric problem at heart. You have some slots for
|
|
||||||
evens and some for primes, with a limited number of choices for
|
|
||||||
each. Leverage the multiplication rule, which states that if you
|
|
||||||
have \(n\) slots with \(x\) choices, you get \(x^n\) possible
|
|
||||||
outcomes.
|
|
||||||
</p>
|
|
||||||
<h3>doing it</h3>
|
|
||||||
<p>
|
|
||||||
So, what's the answer? If we know which slots we have and the
|
|
||||||
number of choices for them, we're done. Since this is leetcode,
|
|
||||||
they don't let you think—they just give you the answer. You
|
|
||||||
have 2 types of slots (even and odd indices) with 5
|
|
||||||
(\(\{0,2,4,6,8\}\)) and 4 (\(\{2,3,5,7\}\)) choices respectively.
|
|
||||||
Therefore, the answer is: \(5^{\text{# even slots}}\cdot4^{\text{#
|
|
||||||
odd slots}}\) By counting or with small cases, we have
|
|
||||||
\(\lceil\frac{n}{2}\rceil\) even slots and
|
|
||||||
\(\lfloor\frac{n}{2}\rfloor\) odd slots. Let's submit it!
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
And.... TLE. Checking <i>everything</i> when you submit your
|
|
||||||
code—in this case, constraint \(n\leq 10^{16}\) informs us
|
|
||||||
of something suspect. In the worst case, \(\frac{n}{2}\approx
|
|
||||||
n^14\). This is far too many multiplications, so we can leverage
|
|
||||||
binary exponentiation instead (and probably should've been the
|
|
||||||
whole time!). Don't forget the mod.
|
|
||||||
</p>
|
|
||||||
<div class="code" data-file="cgn.cpp"></div>
|
|
||||||
</div>
|
|
||||||
<h2>
|
|
||||||
<a
|
|
||||||
target="blank"
|
|
||||||
href="https://leetcode.com/problems/minimum-number-of-operations-to-make-elements-in-array-distinc"
|
|
||||||
>minimum number of operations to make array distinct</a
|
|
||||||
>
|
|
||||||
<span class="post-meta">
|
|
||||||
<time datetime="2024-04-09">04/09/2024</time>
|
|
||||||
</span>
|
|
||||||
</h2>
|
|
||||||
<div class="problem-content">
|
|
||||||
<h3>understanding the problem</h3>
|
|
||||||
<p>
|
|
||||||
You can remove elements in groups of 3 <i>solely</i> from the
|
|
||||||
beginning of the array. Perform this operation until there are no
|
|
||||||
more duplicates left, returning the number of times you had to
|
|
||||||
perform the operation.
|
|
||||||
</p>
|
|
||||||
<h3>solution: rephrase the question</h3>
|
|
||||||
<p>
|
|
||||||
Definitionally, you remove the <i>last</i> duplicate. If such
|
|
||||||
duplicate is at 0-indexed <code>i</code>, it belongs to the
|
|
||||||
\(\lceil \frac{i + 1}{3}\rceil\)th chunk of 3 (i.e. operation).
|
|
||||||
Find the last duplicate by leveraging a frequency map and
|
|
||||||
iterating backwards through the input.
|
|
||||||
</p>
|
|
||||||
<div class="code" data-file="mnootmad.cpp"></div>
|
|
||||||
<h3>asymptotic complexity</h3>
|
|
||||||
<p>
|
|
||||||
The solution is optimal, considering the least amount of elements
|
|
||||||
possible in:
|
|
||||||
</p>
|
|
||||||
<ul>
|
|
||||||
<li><u>Time Complexity</u>: \(\O(n)\)</li>
|
|
||||||
<li><u>Space Complexity</u>: \(\Theta(1)\)</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<h2>
|
|
||||||
<a
|
|
||||||
target="blank"
|
|
||||||
href="https://leetcode.com/problems/count-the-number-of-fair-pairs/"
|
|
||||||
>count the number of fair pairs</a
|
|
||||||
>
|
|
||||||
<span class="post-meta">
|
|
||||||
<time datetime="2024-09-13">09/13/2024</time>
|
|
||||||
</span>
|
|
||||||
</h2>
|
|
||||||
<div class="problem-content">
|
|
||||||
<h3>problem statement</h3>
|
|
||||||
<p>
|
|
||||||
Given an array <code>nums</code> of integers and upper/lower
|
|
||||||
integer bounds <code>upper</code>/<code>lower</code> respectively,
|
|
||||||
return the number of unique valid index pairs such that: \[i\neq
|
|
||||||
j,lower\leq nums[i]+nums[j]\leq upper\]
|
|
||||||
</p>
|
|
||||||
<h3>understanding the problem</h3>
|
|
||||||
<p>
|
|
||||||
This is another sleeper daily in which a bit of thinking in the
|
|
||||||
beginning pays dividends. Intuitively, I think it makes sense to
|
|
||||||
reduce the “dimensionality” of the problem. Choosing
|
|
||||||
both <code>i</code> and <code>j</code> concurrently seems tricky,
|
|
||||||
so let's assume we've found a valid <code>i</code>. What
|
|
||||||
must be true? Well: \[i\neq j,lower-nums[i]\leq nums[j]\leq
|
|
||||||
upper-nums[i]\]
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
It doesn't seem like we've made much progress. If nums
|
|
||||||
is a sequence of random integers,
|
|
||||||
<i
|
|
||||||
>there's truly no way to find all <code>j</code> satisfying
|
|
||||||
this condition efficiently</i
|
|
||||||
>.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
The following question naturally arises: can we modify our input
|
|
||||||
to find such <code>j</code> efficiently? Recall our goal: find the
|
|
||||||
smallest/largest j to fit within our altered bounds—in other
|
|
||||||
words, find the smallest \(x\) less/greater than or equal to a
|
|
||||||
number. If binary search bells aren't clanging in your head
|
|
||||||
right now, I'm not sure what to say besides keep practicing.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
So, it would be nice to sort <code>nums</code> to find such
|
|
||||||
<code>j</code> relatively quickly. However:
|
|
||||||
<i>are we actually allowed to do this?</i> This is the core
|
|
||||||
question I think everyone skips over. Maybe it is trivial but it
|
|
||||||
is important to emphasize:
|
|
||||||
</p>
|
|
||||||
<ul style="list-style: none">
|
|
||||||
<li>
|
|
||||||
<i>Yes, we are allowed to sort the input</i>. Re-frame the
|
|
||||||
problem: what we are actually doing is choosing distinct
|
|
||||||
<code>i</code>, <code>j</code> to satisfy some condition. The
|
|
||||||
order of <code>nums</code> does not matter—rather, its
|
|
||||||
contents do. Any input to this algorithm with
|
|
||||||
<code>nums</code> with the same contents will yield the same
|
|
||||||
result. If we were to modify <code>nums</code> instead of
|
|
||||||
rearrange it, this would be invalid because we could be
|
|
||||||
introducing/taking away valid index combinations.
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<p>
|
|
||||||
Let's consider our solution a bit more before implementing
|
|
||||||
it:
|
|
||||||
</p>
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
Is the approach feasible? We're sorting
|
|
||||||
<code>nums</code> then binary searching over it considering all
|
|
||||||
<code>i</code>, which will take around \(O(nlg(n))\) time.
|
|
||||||
<code>len(nums)</code>\(\leq10^5\), so this is fine.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
How do we avoid double-counting? The logic so far makes no
|
|
||||||
effort. If we consider making all pairs with indices
|
|
||||||
<i>less than</i> <code>i</code> for all
|
|
||||||
<code>i</code> left-to-right, we'll be considering all
|
|
||||||
valid pairs with no overlap. This is a common pattern—take
|
|
||||||
a moment to justify it to yourself.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<i>Exactly</i> how many elements do we count? Okay, we're
|
|
||||||
considering some rightmost index <code>i</code> and we've
|
|
||||||
found upper and lower index bounds <code>j</code> and
|
|
||||||
<code>k</code> respectively. We can pair
|
|
||||||
<code>nums[j]</code> with all elements up to an including
|
|
||||||
<code>nums[k]</code> (besides <code>nums[j]</code>). There are
|
|
||||||
exactly \(k-j\) of these. If the indexing confuses you, draw it
|
|
||||||
out and prove it to yourself.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
How do we get our final answer? Accumulate all
|
|
||||||
<code>k-j</code> for all <code>i</code>.
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<h3>carrying out the plan</h3>
|
|
||||||
<p>
|
|
||||||
The following approach implements our logic quite elegantly and
|
|
||||||
directly. The third and fourth arguments to the
|
|
||||||
<code>bisect</code> calls specify <code>lo</code> (inclusive) and
|
|
||||||
<code>hi</code> (exclusive) bounds for our search space, mirroring
|
|
||||||
the criteria that we search across all indices \(\lt i\).
|
|
||||||
</p>
|
|
||||||
<div class="code" data-file="cfps-naive.py"></div>
|
|
||||||
<h3>optimizing the approach</h3>
|
|
||||||
<p>
|
|
||||||
If we interpret the criteria this way, the above approach is
|
|
||||||
relatively efficient. To improve this approach, we'll need to
|
|
||||||
reinterpret the constraints. Forget about the indexing and
|
|
||||||
consider the constraint in aggregate. We want to find all \(i,j\)
|
|
||||||
with \(x=nums[i]+nums[j]\) such that \(i\neq j,lower\leq x\leq
|
|
||||||
upper\).
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
We <i>still</i> need to reduce the “dimensionality” of
|
|
||||||
the problem—there are just too many moving parts to consider
|
|
||||||
at once. This seems challening. Let's simplify the problem to
|
|
||||||
identify helpful ideas: pretend <code>lower</code> does not exist
|
|
||||||
(and, of course, that <code>nums</code> is sorted).
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
We're looking for all index pairs with sum \(\leq upper\).
|
|
||||||
And behold: (almost) two sum in the wild. This can be accomplished
|
|
||||||
with a two-pointers approach—this post is getting quite long
|
|
||||||
so we'll skip over why this is the case—but the main
|
|
||||||
win here is that we can solve this simplified version of our
|
|
||||||
problem in \(O(n)\).
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Are we any closer to actually solving the problem? Now, we have
|
|
||||||
the count of index pairs \(\leq upper\). Is this our answer?
|
|
||||||
No—some may be too small, namely, with sum \(\lt lower\).
|
|
||||||
Let's exclude those by running our two-pointer approach with
|
|
||||||
and upper bound of \(lower-1\) (we want to include \(lower\)).
|
|
||||||
Now, our count reflects the total number of index pairs with a sum
|
|
||||||
in our interval bound.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Note that this really is just running a prefix sum/using the
|
|
||||||
“inclusion-exclusion” principle/however you want to
|
|
||||||
phrase it.
|
|
||||||
</p>
|
|
||||||
<div class="code" data-file="cfps-twoptr.py"></div>
|
|
||||||
<h3>some more considerations</h3>
|
|
||||||
<p>
|
|
||||||
The second approach is <i>asymptotically</i> equivalent. However,
|
|
||||||
it's still worth considering for two reasons:
|
|
||||||
</p>
|
|
||||||
<ol>
|
|
||||||
<li>
|
|
||||||
If an interviewer says “assume <code>nums</code> is
|
|
||||||
sorted” or “how can we do
|
|
||||||
better?”—you're cooked.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
(Much) more importantly, it's extremely valuable to be able
|
|
||||||
to <i>reconceptualize</i> a problem and look at it from
|
|
||||||
different angles. Not being locked in on a solution shows
|
|
||||||
perseverance, curiosity, and strong problem-solving abilities.
|
|
||||||
</li>
|
|
||||||
</ol>
|
|
||||||
<h3>asymptotic complexity</h3>
|
|
||||||
<p>
|
|
||||||
<u>Time Complexity</u>: \(O(nlg(n))\) for both—\(O(n)\) if
|
|
||||||
<code>nums</code> is sorted with respect to the second approach.
|
|
||||||
</p>
|
|
||||||
<p><u>Space Complexity</u>: \(\Theta(1)\) for both.</p>
|
|
||||||
</div>
|
|
||||||
<h2>
|
|
||||||
<a
|
|
||||||
target="blank"
|
|
||||||
href="https://leetcode.com/problems/most-beautiful-item-for-each-query/description/"
|
|
||||||
>most beautiful item for each query</a
|
|
||||||
>
|
|
||||||
<span class="post-meta">
|
|
||||||
<time datetime="2024-09-12">09/12/2024</time>
|
|
||||||
</span>
|
|
||||||
</h2>
|
|
||||||
<div class="problem-content">
|
|
||||||
<h3>problem statement</h3>
|
|
||||||
<p>
|
|
||||||
Given an array <code>items</code> of \((price, beauty)\) tuples,
|
|
||||||
answer each integer query of \(queries\). The answer to some
|
|
||||||
<code>query[i]</code> is the maximum beauty of an item with
|
|
||||||
\(price\leq\)<code>items[i][0]</code>.
|
|
||||||
</p>
|
|
||||||
<h3>understanding the problem</h3>
|
|
||||||
<p>
|
|
||||||
Focus on one aspect of the problem at a time. To answer a query,
|
|
||||||
we need to have considered:
|
|
||||||
</p>
|
|
||||||
<ol>
|
|
||||||
<li>Items with a non-greater price</li>
|
|
||||||
<li>The beauty of all such items</li>
|
|
||||||
</ol>
|
|
||||||
<p>
|
|
||||||
Given some query, how can we <i>efficiently</i> identify the
|
|
||||||
“last” item with an acceptable price? Leverage the
|
|
||||||
most common pre-processing algorithm: sorting. Subsequently, we
|
|
||||||
can binary search <code>items</code> (keyed by price, of course)
|
|
||||||
to identify all considerable items in \(O(lg(n))\).
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Great. Now we need to find the item with the largest beauty.
|
|
||||||
Naïvely considering all the element is a
|
|
||||||
<i>correct</i> approach—but is it correct? Considering our
|
|
||||||
binary search \(O(lg(n))\) and beauty search \(O(n)\) across
|
|
||||||
\(\Theta(n)\) queries with
|
|
||||||
<code>len(items)<=len(queries)</code>\(\leq10^5\), an
|
|
||||||
\(O(n^2lg(n))\) approach is certainly unacceptable.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Consider alternative approaches to responding to our queries. It
|
|
||||||
is clear that answering them in-order yields no benefit (i.e. we
|
|
||||||
have to consider each item all over again, per query)—could
|
|
||||||
we answer them in another order to save computations?
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Visualizing our items from left-to-right, we's interested in
|
|
||||||
both increasing beauty and prices. If we can scan our items left
|
|
||||||
to right, we can certainly “accumulate” a running
|
|
||||||
maximal beauty. We can leverage sorting once again to answer our
|
|
||||||
queries left-to-right, then re-order them appropriately before
|
|
||||||
returning a final answer. Sorting both <code>queries</code> and
|
|
||||||
<code>items</code> with a linear scan will take \(O(nlg(n))\)
|
|
||||||
time, meeting the constraints.
|
|
||||||
</p>
|
|
||||||
<h3>carrying out the plan</h3>
|
|
||||||
<p>
|
|
||||||
A few specifics need to be understood before coding up the
|
|
||||||
approach:
|
|
||||||
</p>
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
Re-ordering the queries: couple <code>query[i]</code> with
|
|
||||||
<code>i</code>, then sort. When responding to queries in sorted
|
|
||||||
order, we know where to place them in an output
|
|
||||||
container—index <code>i</code>.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
The linear scan: accumulate a running maximal beauty, starting
|
|
||||||
at index <code>0</code>. For some query <code>query</code>, we
|
|
||||||
want to consider all items with price less than or equal to
|
|
||||||
<code>query</code>. Therefore, loop until this condition is
|
|
||||||
<i>violated</i>— the previous index will represent the
|
|
||||||
last considered item.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Edge cases: it's perfectly possible the last considered
|
|
||||||
item is invalid (consider a query cheaper than the cheapest
|
|
||||||
item). Return <code>0</code> as specified by the problem
|
|
||||||
constraints.
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<div class="code" data-file="beauty.cpp"></div>
|
|
||||||
<h3>asymptotic complexity</h3>
|
|
||||||
<p>
|
|
||||||
Let <code>n=len(items)</code> and <code>m=len(queries)</code>.
|
|
||||||
There may be more items than queries, or vice versa. Note that a
|
|
||||||
“looser” upper bound can be found by analyzing the
|
|
||||||
runtime in terms of \(max\{n,m\}\).
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
<u>Time Complexity</u>: \(O(nlg(n)+mlg(m)+m)\in
|
|
||||||
O(nlg(n)+mlg(m))\). An argument can be made that because
|
|
||||||
<code>queries[i],items[i][{0,1}]</code>\(\leq10^9\), radix sort
|
|
||||||
can be leveraged to achieve a time complexity of \(O(d \cdot (n +
|
|
||||||
k + m + k))\in O(9\cdot (n + m))\in O(n+m)\).
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
<u>Space Complexity</u>: \(\Theta(1)\), considering that \(O(m)\)
|
|
||||||
space must be allocated. If <code>queries</code>/<code
|
|
||||||
>items</code
|
|
||||||
>
|
|
||||||
cannot be modified in-place, increase the space complexity by
|
|
||||||
\(m\)/\(n\) respectively.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<h2>
|
|
||||||
<a
|
|
||||||
target="blank"
|
|
||||||
href="https://leetcode.com/problems/shortest-subarray-with-or-at-least-k-ii/description/"
|
|
||||||
>shortest subarray with or at least k ii</a
|
|
||||||
>
|
|
||||||
<span class="post-meta">
|
|
||||||
<time datetime="2024-09-11">09/11/2024</time>
|
|
||||||
</span>
|
|
||||||
</h2>
|
|
||||||
<div class="problem-content">
|
|
||||||
<h3>problem statement</h3>
|
|
||||||
<p>
|
|
||||||
Given an array of non-negative integers \(num\) and some \(k\),
|
|
||||||
find the length of the shortest non-empty subarray of nums such
|
|
||||||
that its element-wise bitwise OR is greater than or equal to
|
|
||||||
\(k\)—return -1 if no such array exists.
|
|
||||||
</p>
|
|
||||||
<h3>developing an approach</h3>
|
|
||||||
<p>Another convoluted, uninspired bitwise-oriented daily.</p>
|
|
||||||
<p>
|
|
||||||
Anways, we're looking for a subarray that satisfies a
|
|
||||||
condition. Considering all subarrays with
|
|
||||||
<code>len(nums)</code>\(\leq2\times10^5\) is impractical according
|
|
||||||
to the common rule of \(\approx10^8\) computations per second on
|
|
||||||
modern CPUs.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Say we's building some array <code>xs</code>. Adding another
|
|
||||||
element <code>x</code> to this sequence can only increase or
|
|
||||||
element-wise bitwise OR. Of course, it makes sense to do this.
|
|
||||||
However, consider <code>xs</code> after—it is certainly
|
|
||||||
possible that including <code>x</code> finally got us to at least
|
|
||||||
<code>k</code>. However, not all of the elements in the array are
|
|
||||||
useful now; we should remove some.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Which do we remove? Certainly not any from the
|
|
||||||
middle—we'd no longer be considering a subarray. We can
|
|
||||||
only remove from the beginning.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Now, how many times do we remove? While the element-wise bitwise
|
|
||||||
OR of <code>xs</code> is \(\geq k\), we can naïvely remove
|
|
||||||
from the start of <code>xs</code> to find the smallest subarray.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Lastly, what' the state of <code>xs</code> after these
|
|
||||||
removals? Now, we (may) have an answer and the element-wise
|
|
||||||
bitwise OR of <code>xs</code> is guaranteed to be \(\lt k\).
|
|
||||||
Inductively, expand the array to search for a better answer.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
This approach is generally called a variable-sized “sliding
|
|
||||||
window”. Every element of
|
|
||||||
<code>nums</code> is only added (considered in the element-wise
|
|
||||||
bitwise OR) or removed (discard) one time, yielding an
|
|
||||||
asymptotically linear time complexity. In other words, this is a
|
|
||||||
realistic approach for our constraints.
|
|
||||||
</p>
|
|
||||||
<h3>carrying out the plan</h3>
|
|
||||||
<p>Plugging in our algorithm to my sliding window framework:</p>
|
|
||||||
<div class="code" data-file="msl-naive.py"></div>
|
|
||||||
<p>Done, right? No. TLE.</p>
|
|
||||||
<p>
|
|
||||||
If you thought this solution would work, you move too fast.
|
|
||||||
Consider <i>every</i> aspect of an algorithm before implementing
|
|
||||||
it. In this case, we (I) overlooked one core question:
|
|
||||||
</p>
|
|
||||||
<ol style="list-style: none">
|
|
||||||
<li><i>How do we maintain our element-wise bitwise OR</i>?</li>
|
|
||||||
</ol>
|
|
||||||
<p>
|
|
||||||
Calculating it by directly maintaining a window of length \(n\)
|
|
||||||
takes \(n\) time—with a maximum window size of \(n\), this
|
|
||||||
solution is \(O(n^2)\).
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Let's try again. Adding an element is simple—OR it to
|
|
||||||
some cumulative value. Removing an element, not so much.
|
|
||||||
Considering some \(x\) to remove, we only unset one of its bits
|
|
||||||
from our aggregated OR if it's the “last” one of
|
|
||||||
these bits set across all numbers contributing to our aggregated
|
|
||||||
value.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Thus, to maintain our aggregate OR, we want to map bit
|
|
||||||
“indices” to counts. A hashmap (dictionary) or static
|
|
||||||
array will do just find. Adding/removing some \(x\) will
|
|
||||||
increment/decrement each the counter's bit count at its
|
|
||||||
respective position. I like to be uselessly specific
|
|
||||||
sometimes—choosing the latter approach, how big should our
|
|
||||||
array be? As many bits as represented by the largest of
|
|
||||||
\(nums\)—(or \(k\) itself): \[\lfloor \lg({max\{nums,k
|
|
||||||
\})}\rfloor+1\]
|
|
||||||
</p>
|
|
||||||
<p>Note that:</p>
|
|
||||||
<ol>
|
|
||||||
<li>
|
|
||||||
Below we use the
|
|
||||||
<a
|
|
||||||
target="_blank"
|
|
||||||
href="https://artofproblemsolving.com/wiki/index.php/Change_of_base_formula"
|
|
||||||
>change of base formula for logarithms</a
|
|
||||||
>
|
|
||||||
because \(log_2(x)\) is not available in python.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
It's certainly possible that \(max\{nums, k\}=0\). To avoid
|
|
||||||
the invalid calculation \(log(0)\), take the larger of \(1\) and
|
|
||||||
this calculation. The number of digits will then (correctly) be
|
|
||||||
\(1\) in this special case.
|
|
||||||
</li>
|
|
||||||
</ol>
|
|
||||||
<div class="code" data-file="msl-bitwise.py"></div>
|
|
||||||
<h3>asymptotic complexity</h3>
|
|
||||||
<p>
|
|
||||||
Note that the size of the frequency map is bounded by
|
|
||||||
\(lg_{2}({10^9})\approx30\).
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
<u>Space Complexity</u>: Thus, the window uses \(O(1)\) space.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
<u>Time Complexity</u>: \(\Theta(\)<code>len(nums)</code>\()\)
|
|
||||||
—every element of <code>nums</code> is considered at least
|
|
||||||
once and takes \(O(1)\) work each to find the element-wise bitwise
|
|
||||||
OR.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<h2>
|
|
||||||
<a
|
|
||||||
target="blank"
|
|
||||||
href="https://leetcode.com/problems/minimum-array-end/"
|
|
||||||
>minimum array end</a
|
|
||||||
>
|
|
||||||
<span class="post-meta">
|
|
||||||
<time datetime="2024-09-10">09/10/2024</time>
|
|
||||||
</span>
|
|
||||||
</h2>
|
|
||||||
<div class="problem-content">
|
|
||||||
<h3>problem statement</h3>
|
|
||||||
<p>
|
|
||||||
Given some \(x\) and \(n\), construct a strictly increasing array
|
|
||||||
(say
|
|
||||||
<code>nums</code>
|
|
||||||
) of length \(n\) such that
|
|
||||||
<code>nums[0] & nums[1] ... & nums[n - 1] == x</code>
|
|
||||||
, where
|
|
||||||
<code>&</code>
|
|
||||||
denotes the bitwise AND operator.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Finally, return the minimum possible value of
|
|
||||||
<code>nums[n - 1]</code>.
|
|
||||||
</p>
|
|
||||||
<h3>understanding the problem</h3>
|
|
||||||
<p>
|
|
||||||
The main difficulty in this problem lies in understanding what is
|
|
||||||
being asked (intentionally or not, the phrasing is terrible). Some
|
|
||||||
initial notes:
|
|
||||||
</p>
|
|
||||||
<ul>
|
|
||||||
<li>The final array need not be constructed</li>
|
|
||||||
<li>
|
|
||||||
If the element-wise bitwise AND of an array equals
|
|
||||||
<code>x</code> if and only if each element has
|
|
||||||
<code>x</code>'s bits set—and no other bit it set by
|
|
||||||
all elements
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
It makes sense to set <code>nums[0] == x</code> to ensure
|
|
||||||
<code>nums[n - 1]</code> is minimal
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<h3>developing an approach</h3>
|
|
||||||
<p>
|
|
||||||
An inductive approach is helpful. Consider the natural question:
|
|
||||||
“If I had correctly generated <code>nums[:i]</code>”,
|
|
||||||
how could I find <code>nums[i]</code>? In other words,
|
|
||||||
<i
|
|
||||||
>how can I find the next smallest number such that
|
|
||||||
<code>nums</code>
|
|
||||||
's element-wise bitwise AND is still \(x\)?</i
|
|
||||||
>
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Hmm... this is tricky. Let's think of a similar problem to
|
|
||||||
glean some insight: “Given some \(x\), how can I find the
|
|
||||||
next smallest number?”. The answer is, of course, add one
|
|
||||||
(bear with me here).
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
We also know that all of <code>nums[i]</code> must have at least
|
|
||||||
\(x\)'s bits set. Therefore, we need to alter the unset bits
|
|
||||||
of <code>nums[i]</code>.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
The key insight of this problem is combining these two ideas to
|
|
||||||
answer our question:
|
|
||||||
<i
|
|
||||||
>Just “add one” to <code>nums[i - 1]</code>'s
|
|
||||||
unset bits</i
|
|
||||||
>. Repeat this to find <code>nums[n - 1]</code>.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
One last piece is missing—how do we know the element-wise
|
|
||||||
bitwise AND is <i>exactly</i> \(x\)? Because
|
|
||||||
<code>nums[i > 0]</code> only sets \(x\)'s unset bits, every
|
|
||||||
number in <code>nums</code> will have at least \(x\)'s bits
|
|
||||||
set. Further, no other bits will be set because \(x\) has them
|
|
||||||
unset.
|
|
||||||
</p>
|
|
||||||
<h3>carrying out the plan</h3>
|
|
||||||
<p>Let's flesh out the remaining parts of the algorithm:</p>
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
<code>len(nums) == n</code> and we initialize
|
|
||||||
<code>nums[0] == x</code>. So, we need to “add one”
|
|
||||||
<code>n - 1</code> times
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
How do we carry out the additions? We could iterate \(n - 1\)
|
|
||||||
times and simulate them. However, we already know how we want to
|
|
||||||
alter the unset bits of <code>nums[0]</code> inductively—
|
|
||||||
(add one) <i>and</i> how many times we want to do this (\(n -
|
|
||||||
1\)). Because we're adding one \(n-1\) times to
|
|
||||||
\(x\)'s unset bits (right to left, of course), we simply
|
|
||||||
set its unset bits to those of \(n - 1\).
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<p>
|
|
||||||
The implementation is relatively straightfoward. Traverse \(x\)
|
|
||||||
from least-to-most significant bit, setting its \(i\)th unset bit
|
|
||||||
to \(n - 1\)'s \(i\)th bit. Use a bitwise mask
|
|
||||||
<code>mask</code> to traverse \(x\).
|
|
||||||
</p>
|
|
||||||
<div class="code" data-file="minend.cpp"></div>
|
|
||||||
<h3>asymptotic complexity</h3>
|
|
||||||
<p>
|
|
||||||
<u>Space Complexity</u>: \(\Theta(1)\)—a constant amount of
|
|
||||||
numeric variables are allocated regardless of \(n\) and \(x\).
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
<u>Time Complexity</u>: in the worst case, may need to traverse
|
|
||||||
the entirety of \(x\) to distribute every bit of \(n - 1\) to
|
|
||||||
\(x\). This occurs if and only if \(x\) is all ones (\(\exists
|
|
||||||
k\gt 0 : 2^k-1=x\))). \(x\) and \(n\) have \(lg(x)\) and \(lg(n)\)
|
|
||||||
bits respectively, so the solution is \(O(lg(x) + lg(n))\in
|
|
||||||
O(log(xn))\). \(1\leq x,n\leq 1e8\), so this runtime is bounded by
|
|
||||||
\(O(log(1e8^2))\in O(1)\).
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</article>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
<site-footer></site-footer>
|
|
||||||
<script src="/scripts/common.js"></script>
|
|
||||||
<script src="/scripts/post.js"></script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
@ -1,913 +0,0 @@
|
||||||
<!doctype html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
||||||
<link rel="stylesheet" href="/styles/common.css" />
|
|
||||||
<link rel="stylesheet" href="/styles/post.css" />
|
|
||||||
<link rel="stylesheet" href="/styles/graph.css" />
|
|
||||||
<link rel="icon" type="image/webp" href="/public/logo.webp" />
|
|
||||||
<script
|
|
||||||
src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"
|
|
||||||
async
|
|
||||||
></script>
|
|
||||||
<script src="/public/d3.js"></script>
|
|
||||||
<title>models of production</title>
|
|
||||||
</head>
|
|
||||||
<body class="graph-background">
|
|
||||||
<site-header></site-header>
|
|
||||||
<main class="main">
|
|
||||||
<div class="post-container">
|
|
||||||
<header class="post-header">
|
|
||||||
<h1 class="post-title">models of production</h1>
|
|
||||||
<p class="post-meta">
|
|
||||||
<time datetime="2024-06-22">22/06/2024</time>
|
|
||||||
</p>
|
|
||||||
</header>
|
|
||||||
<article class="post-article">
|
|
||||||
<p>
|
|
||||||
This post offers a basic introduction to the Solow, Romer, and
|
|
||||||
Romer-Solow economic models, as taught by
|
|
||||||
<a target="blank" href="https://www.vladimirsmirnyagin.com/"
|
|
||||||
>Vladimir Smirnyagin</a
|
|
||||||
>
|
|
||||||
and assisted by
|
|
||||||
<a target="blank" href="https://www.donghyunsuh.com/"
|
|
||||||
>Donghyun Suh</a
|
|
||||||
>
|
|
||||||
in Intermediate Macroeconomics (ECON 3020) during the Spring
|
|
||||||
semester of 2024 at the University of Virginia.
|
|
||||||
</p>
|
|
||||||
<h2>solow</h2>
|
|
||||||
<div>
|
|
||||||
<h3>introduction</h3>
|
|
||||||
<div>
|
|
||||||
<p>
|
|
||||||
The Solow Model is an economic model of production that
|
|
||||||
incorporates the incorporates the idea of capital accumulation.
|
|
||||||
Based on the
|
|
||||||
<a
|
|
||||||
target="blank"
|
|
||||||
href="https://en.wikipedia.org/wiki/Cobb%E2%80%93Douglas_production_function"
|
|
||||||
>Cobb-Douglas production function</a
|
|
||||||
>, the Solow Model describes production as follows:
|
|
||||||
\[Y_t=F(K_t,L_t)=\bar{A}K_t^\alpha L_t^{1-\alpha}\] With:
|
|
||||||
</p>
|
|
||||||
<ul style="list-style: unset">
|
|
||||||
<li>\(\bar{A}\): total factor productivity (TFP)</li>
|
|
||||||
<li>
|
|
||||||
\(\alpha\): capital's share of output—usually
|
|
||||||
\(1/3\) based on
|
|
||||||
<a target="blank" href="https://arxiv.org/pdf/1105.2123"
|
|
||||||
>empirical data</a
|
|
||||||
>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<p>
|
|
||||||
In this simple model, the following statements describe the
|
|
||||||
economy:
|
|
||||||
</p>
|
|
||||||
<ol>
|
|
||||||
<li>
|
|
||||||
Output is either saved or consumed; in other words, savings
|
|
||||||
equals investment
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Capital accumulates according to investment \(I_t\) and
|
|
||||||
depreciation \(\bar{d}\), beginning with \(K_0\) (often called
|
|
||||||
the
|
|
||||||
<u>Law of Capital Motion</u>)
|
|
||||||
</li>
|
|
||||||
<li>Labor \(L_t\) is time-independent</li>
|
|
||||||
<li>
|
|
||||||
A savings rate \(\bar{s}\) describes the invested portion of
|
|
||||||
total output
|
|
||||||
</li>
|
|
||||||
</ol>
|
|
||||||
<p>
|
|
||||||
Including the production function, these four ideas encapsulate
|
|
||||||
the Solow Model:
|
|
||||||
</p>
|
|
||||||
<div style="display: flex; justify-content: center">
|
|
||||||
<div style="padding-right: 50px">
|
|
||||||
<ol>
|
|
||||||
<li>\(C_t + I_t = Y_t\)</li>
|
|
||||||
<li>\(\Delta K_{t+1} = I_t - \bar{d} K_t\)</li>
|
|
||||||
</ol>
|
|
||||||
</div>
|
|
||||||
<div style="padding-left: 50px">
|
|
||||||
<ol start="3">
|
|
||||||
<li>\(L_t = \bar{L}\)</li>
|
|
||||||
<li>\(I_t = \bar{s} Y_t\)</li>
|
|
||||||
</ol>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<h3>solving the model</h3>
|
|
||||||
<div>
|
|
||||||
<p>
|
|
||||||
Visualizing the model, namely output as a function of capital,
|
|
||||||
provides helpful intuition before solving it.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Letting \((L_t,\alpha)=(\bar{L}, \frac{1}{3})\), it follows that
|
|
||||||
\(Y_t=F(K_t,L_t)=\bar{A}K_t^{\frac{1}{3}}
|
|
||||||
\bar{L}^{\frac{2}{3}}\). Utilizing this simplification and its
|
|
||||||
graphical representation below, output is clearly characterized
|
|
||||||
by the cube root of capital:
|
|
||||||
</p>
|
|
||||||
<div class="graph">
|
|
||||||
<div id="solow-visualization"></div>
|
|
||||||
</div>
|
|
||||||
<div class="sliders">
|
|
||||||
<div style="padding-right: 20px">
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderSA">\(\bar{A}:\)</label>
|
|
||||||
<span id="outputSA">1.00</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderSA"
|
|
||||||
min="0.1"
|
|
||||||
max="2"
|
|
||||||
step="0.01"
|
|
||||||
value="1"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderSd">\(\bar{d}:\)</label>
|
|
||||||
<span id="outputSd">0.50</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderSd"
|
|
||||||
min="0.01"
|
|
||||||
max="0.99"
|
|
||||||
step="0.01"
|
|
||||||
value="0.50"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div style="padding-left: 20px">
|
|
||||||
<ul start="3">
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderSs">\(\bar{s}:\)</label>
|
|
||||||
<span id="outputSs">0.50</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderSs"
|
|
||||||
min="0.01"
|
|
||||||
max="0.99"
|
|
||||||
step="0.01"
|
|
||||||
value="0.50"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderSalpha">\(\alpha:\)</label>
|
|
||||||
<span id="outputSalpha">0.33</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderSalpha"
|
|
||||||
min="0.01"
|
|
||||||
max="0.99"
|
|
||||||
step="0.01"
|
|
||||||
value="0.33"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<p>
|
|
||||||
When investment is completely disincentivized by depreciation
|
|
||||||
(in other words, \(sY_t=\bar{d}K_t\)), the economy equilibrates
|
|
||||||
at a so-called "steady-state" with equilibrium
|
|
||||||
\((K_t,Y_t)=(K_t^*,Y_t^*)\).
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Using this equilibrium condition, it follows that:
|
|
||||||
\[Y_t^*=\bar{A}{K_t^*}^\alpha\bar{L}^{1-\alpha} \rightarrow
|
|
||||||
\bar{d}K_t^*=\bar{s}\bar{A}{K_t^*}^\alpha\bar{L}^{1-\alpha}\]
|
|
||||||
\[\rightarrow
|
|
||||||
K^*=\bar{L}(\frac{\bar{s}\bar{A}}{\bar{d}})^\frac{1}{1-\alpha}\]
|
|
||||||
\[\rightarrow
|
|
||||||
Y^*=\bar{A}^\frac{1}{1-\alpha}(\frac{\bar{s}}{\bar{d}})^\frac{\alpha}{1-\alpha}\bar{L}\]
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Thus, the equilibrium intensive form (output per worker) of both
|
|
||||||
capital and output are summarized as follows:
|
|
||||||
\[(k^*,y^*)=(\frac{K^*}{\bar{L}},\frac{Y^*}{\bar{L}})
|
|
||||||
=((\frac{\bar{s}\bar{A}}{\bar{d}})^\frac{1}{1-\alpha},
|
|
||||||
\bar{A}^\frac{1}{1-\alpha}(\frac{\bar{s}}{\bar{d}})^\frac{\alpha}{1-\alpha})\]
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<h3>analysis</h3>
|
|
||||||
<div>
|
|
||||||
<p>
|
|
||||||
Using both mathematical intuition and manipulating the
|
|
||||||
visualization above, we find that:
|
|
||||||
</p>
|
|
||||||
<ul style="list-style: unset">
|
|
||||||
<li>
|
|
||||||
\(\bar{A}\) has a positive relationship with steady-state
|
|
||||||
output
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Capital is influenced by workforce size, TFP, and savings rate
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Capital output share's \(\alpha\) impact on output is twofold:
|
|
||||||
<ol>
|
|
||||||
<li>Directly through capital quantity</li>
|
|
||||||
<li>Indirectly through TFP</li>
|
|
||||||
</ol>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Large deviations in capital from steady-state \(K^*\) induce
|
|
||||||
net investments of larger magnitude, leading to an accelerated
|
|
||||||
reversion to the steady-state
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Economies stagnate at the steady-state
|
|
||||||
\((K^*,Y^*)\)—this model provides no avenues for
|
|
||||||
long-run growth.
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<p>
|
|
||||||
Lastly (and perhaps most importantly), exogenous parameters
|
|
||||||
\(\bar{s}, \bar{d}\), and \(\bar{A}\) all have immense
|
|
||||||
ramifications on economic status. For example, comparing the
|
|
||||||
difference in country \(C_1\)'s output versus
|
|
||||||
\(C_2\)'s using the Solow Model, we find that a difference
|
|
||||||
in economic performance can only be explained by these factors:
|
|
||||||
\[
|
|
||||||
\frac{Y_1}{Y_2}=\frac{\bar{A_1}}{\bar{A_2}}(\frac{\bar{s_1}}{\bar{s_2}})^\frac{\alpha}{1-\alpha}
|
|
||||||
\]
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
We see that TFP is more important in explaining the differences
|
|
||||||
in per capital output
|
|
||||||
(\(\frac{1}{1-\alpha}>\frac{\alpha}{1-\alpha},\alpha\in[0,1)\)).
|
|
||||||
<!-- TODO: poor phrasing -->
|
|
||||||
Notably, the Solow Model does not give any insights into how to
|
|
||||||
alter the most important predictor of output, TFP.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<h2>romer</h2>
|
|
||||||
<div>
|
|
||||||
<h3>introduction</h3>
|
|
||||||
<div>
|
|
||||||
<p>How, then, can we address these shortcomings?</p>
|
|
||||||
<p>
|
|
||||||
The Romer Model provides an answer by both modeling ideas
|
|
||||||
\(A_t\) (analagous to TFP in the Solow model) endogenously and
|
|
||||||
utilizing them to provide a justification for sustained long-run
|
|
||||||
growth.
|
|
||||||
</p>
|
|
||||||
<p>The Model divides the world into two parts:</p>
|
|
||||||
<ul style="list-style: unset">
|
|
||||||
<li>
|
|
||||||
<u>Objects</u>: finite resources, like capital and labor in
|
|
||||||
the Solow Model
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<u>Ideas</u>: infinite,
|
|
||||||
<a
|
|
||||||
target="blank"
|
|
||||||
href="https://en.wikipedia.org/wiki/Rivalry_(economics)"
|
|
||||||
>non-rivalrous</a
|
|
||||||
>
|
|
||||||
items leveraged in production (note that ideas may be
|
|
||||||
<a
|
|
||||||
href="blank"
|
|
||||||
href="https://www.wikiwand.com/en/Excludability"
|
|
||||||
>excludable</a
|
|
||||||
>, though)
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<p>
|
|
||||||
The Romer Models' production function can be modelled as:
|
|
||||||
\[Y_t=F(A_t,L_{yt})=A_tL_{yt}\] With:
|
|
||||||
</p>
|
|
||||||
<ul style="list-style: unset">
|
|
||||||
<li>\(A_t\): the amount of ideas \(A\) in period \(t\)</li>
|
|
||||||
<li>
|
|
||||||
\(L_{yt}\): the population working on production-facing
|
|
||||||
(output-driving) tasks
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<p>
|
|
||||||
Assuming \(L_t=\bar{L}\) people work in the economy, a
|
|
||||||
proportion \(\bar{l}\) of the population focuses on making
|
|
||||||
ideas: \(L_{at}=\bar{l}\bar{L}\rightarrow
|
|
||||||
L_{yt}=(1-\bar{l})\bar{L}\).
|
|
||||||
</p>
|
|
||||||
<!-- TODO: footnotes - dynamic JS? -->
|
|
||||||
<p>
|
|
||||||
Further, this economy garners ideas with time at rate
|
|
||||||
<u>\(\bar{z}\)</u>: the "speed of ideas". Now, we can
|
|
||||||
describe the quantity of ideas tomorrow as function of those of
|
|
||||||
today: the <u>Law of Ideal Motion</u> (I made that up).
|
|
||||||
\[A_{t+1}=A_t+\bar{z}A_tL_{at}\leftrightarrow\Delta
|
|
||||||
A_{t+1}=\bar{z}A_tL_{at}\]
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Analagously to capital in the solow model, ideas begin in the
|
|
||||||
economy with some \(\bar{A}_0\gt0\) and grow at an
|
|
||||||
<i>exponential</i> rate. At its core, this is because ideas are
|
|
||||||
non-rivalrous; more ideas bring about more ideas.
|
|
||||||
</p>
|
|
||||||
<p>Finally, we have a model:</p>
|
|
||||||
<div style="display: flex; justify-content: center">
|
|
||||||
<div style="padding-right: 50px">
|
|
||||||
<ol>
|
|
||||||
<li>\(Y_t=A_tL_{yt}\)</li>
|
|
||||||
<li>\(\Delta A_{t+1} = \bar{z}A_tL_{at}\)</li>
|
|
||||||
</ol>
|
|
||||||
</div>
|
|
||||||
<div style="padding-left: 50px">
|
|
||||||
<ol start="3">
|
|
||||||
<li>\(L_{yt}+L_{at}=\bar{L}\)</li>
|
|
||||||
<li>\(L_{at}=\bar{l}\bar{L}\)</li>
|
|
||||||
</ol>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<p>
|
|
||||||
A visualization of the Romer Model shows that the economy grows
|
|
||||||
exponentially—production knows no bounds (<a
|
|
||||||
target="blank"
|
|
||||||
href="https://en.wikipedia.org/wiki/Ceteris_paribus"
|
|
||||||
><i>ceteris parbibus</i></a
|
|
||||||
>, of course). A graph of \(log_{10}(Y_t)\) can be seen below:
|
|
||||||
</p>
|
|
||||||
<div class="graph">
|
|
||||||
<div id="romer-visualization"></div>
|
|
||||||
</div>
|
|
||||||
<div class="sliders">
|
|
||||||
<div style="padding-right: 20px">
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderRz">\(\bar{z}:\)</label>
|
|
||||||
<span id="outputRz">0.50</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderRz"
|
|
||||||
min="0.1"
|
|
||||||
max="0.99"
|
|
||||||
step="0.01"
|
|
||||||
value="0.50"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderRL">\(\bar{L}:\)</label>
|
|
||||||
<span id="outputRL">505</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderRL"
|
|
||||||
min="10"
|
|
||||||
max="1000"
|
|
||||||
step="19"
|
|
||||||
value="505"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div style="padding-left: 20px">
|
|
||||||
<ul start="3">
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderRl">\(\bar{l}:\)</label>
|
|
||||||
<span id="outputRl">0.50</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderRl"
|
|
||||||
min="0.01"
|
|
||||||
max="0.99"
|
|
||||||
step="0.01"
|
|
||||||
value="0.50"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderRA0">\(\bar{A}_0:\)</label>
|
|
||||||
<span id="outputRA0">500</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderRA0"
|
|
||||||
min="0"
|
|
||||||
max="1000"
|
|
||||||
step="100"
|
|
||||||
value="500"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<p>
|
|
||||||
Playing with the sliders, this graph may seem underwhelming in
|
|
||||||
comparison to the Solow Model. However, on a logarithmic scale,
|
|
||||||
small changes in the parameters lead to massive changes in the
|
|
||||||
growth rate of ideas and economices:
|
|
||||||
</p>
|
|
||||||
<div class="romer-table-container">
|
|
||||||
<table id="romer-table">
|
|
||||||
<thead>
|
|
||||||
<tr id="romer-table-header"></tr>
|
|
||||||
</thead>
|
|
||||||
<tbody>
|
|
||||||
<tr id="row-A_t"></tr>
|
|
||||||
<tr id="row-Y_t"></tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<h3>solving the model</h3>
|
|
||||||
<div>
|
|
||||||
<p>
|
|
||||||
To find the output in terms of exogenous parameters, first note
|
|
||||||
that \[L_t=\bar{L}\rightarrow L_{yt}=(1-\bar{l})\bar{L}\]
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Now, all that remains is to find ideas \(A_t\). It is assumed
|
|
||||||
that ideas grow at some rate \(g_A\): \[A_t=A_0(1+g_A)^t\]
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Using the growth rate formula, we find: \[g_A=\frac{\Delta
|
|
||||||
A_{t+1}-A_t}{A_t}=\frac{A_t+\bar{z}A_tL_{at}-A_t}{A_t}=\bar{z}L_{at}=\bar{z}\bar{l}\bar{L}\]
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Thus, ideas \(A_t=A_0(1+\bar{z}\bar{l}\bar{L})^t\). Finally,
|
|
||||||
output can be solved the production function: \[Y_t=A_t
|
|
||||||
L_{yt}=A_0(1+\bar{z}\bar{l}\bar{L})^t(1-\bar{l})\bar{L}\]
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<h3>analysis</h3>
|
|
||||||
<div>
|
|
||||||
<p>
|
|
||||||
We see the Romer model exhibits long-run growth because ideas
|
|
||||||
have non-diminishing returns due to their nonrival nature. In
|
|
||||||
this model, capital and income eventually slow but ideas
|
|
||||||
continue to yield increasing, unrestricted returns.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Further, all economy continually and perpetually grow along a
|
|
||||||
constant "Balanced Growth Path" as previously defined by \(Y_t\)
|
|
||||||
as a function of the endogenous variables. This directly
|
|
||||||
contrasts the Solow model, in which an economy converges to a
|
|
||||||
steady-state via transition dynamics.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Changes in the growth rate of ideas, then, alter the growth rate
|
|
||||||
of output itself—in this case, parameters \(\bar{l},
|
|
||||||
\bar{z}\), and \(\bar{L}\). This is best exemplified by
|
|
||||||
comparing the growth rate before and and after a parameter
|
|
||||||
changes. In the below example, a larger \(\bar{l}\) initially
|
|
||||||
drops output due to less workers being allocated to production.
|
|
||||||
Soon after, though, output recovers along a "higher"
|
|
||||||
Balanced Growth Path.
|
|
||||||
</p>
|
|
||||||
<div class="graph">
|
|
||||||
<div id="romer-lchange-visualization"></div>
|
|
||||||
</div>
|
|
||||||
<div class="sliders">
|
|
||||||
<div style="padding-right: 20px">
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderlChange">\(\bar{l}_1:\)</label>
|
|
||||||
<span id="outputlChange">0.50</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderlChange"
|
|
||||||
min="0.1"
|
|
||||||
max="0.99"
|
|
||||||
step="0.01"
|
|
||||||
value="0.50"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div style="padding-left: 20px">
|
|
||||||
<ul start="3">
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="slidert0">\(t_0:\)</label>
|
|
||||||
<span id="outputt0">50</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="slidert0"
|
|
||||||
min="1"
|
|
||||||
max="99"
|
|
||||||
step="1"
|
|
||||||
value="50"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<p>
|
|
||||||
Notably, while both the Romer and Solow Models help to analyze
|
|
||||||
growth across countries, they both are unable to resolve one
|
|
||||||
question: why can and do investment rates and TFP differ across
|
|
||||||
contries? This is a more fundamental economic question involving
|
|
||||||
culture, institutions, and social dynamics—one day I hope
|
|
||||||
we'll have an answer.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<h2>romer-solow</h2>
|
|
||||||
<div>
|
|
||||||
<h3>introduction</h3>
|
|
||||||
<div>
|
|
||||||
<p>
|
|
||||||
While the Romer Model provides an avenue for long-run economic
|
|
||||||
growth, it is anything but realistic—surely economies due
|
|
||||||
not grow at an ever-increasing blistering rate into perpetuity.
|
|
||||||
A model in which:
|
|
||||||
</p>
|
|
||||||
<ul style="list-style: unset">
|
|
||||||
<li>
|
|
||||||
Economies grow <i>faster</i> the further <i>below</i> they are
|
|
||||||
from their balanced growth path
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Economies grow <i>slower</i> the further <i>above</i> they are
|
|
||||||
from their balanced growth path
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<p>
|
|
||||||
would certainly be more pragmatic. The Solow Model's
|
|
||||||
capital dynamics do, in some sense, mirror part of this behavior
|
|
||||||
with respect to the steady-state (output converges
|
|
||||||
quicker/slower to the steady state the further/closer it is from
|
|
||||||
equilibrium).
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Combining the dynamics of the Romer Model's ideas and the
|
|
||||||
Solow Model's capital stock could yield the desired result.
|
|
||||||
Intuitively, incorporating capital into output via the Solow
|
|
||||||
Model's production function, as well as including the
|
|
||||||
<u>Law of Capital Motion</u> seems like one way to legitimately
|
|
||||||
create this so-called "Romer-Solow" model:
|
|
||||||
</p>
|
|
||||||
<div style="display: flex; justify-content: center">
|
|
||||||
<div style="padding-right: 50px">
|
|
||||||
<ol>
|
|
||||||
<li>\(Y_t=A_t K_t^\alpha L_{yt}^{1-\alpha}\)</li>
|
|
||||||
<li>\(\Delta K_{t+1}=\bar{s}Y_t-\bar{d}K_t\)</li>
|
|
||||||
<li>\(\Delta A_{t+1} = \bar{z}A_tL_{at}\)</li>
|
|
||||||
</ol>
|
|
||||||
</div>
|
|
||||||
<div style="padding-left: 50px">
|
|
||||||
<ol start="4">
|
|
||||||
<li>\(L_{yt}+L_{at}=\bar{L}\)</li>
|
|
||||||
<li>\(L_{at}=\bar{l}\bar{L}\)</li>
|
|
||||||
</ol>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<h3>solving the model</h3>
|
|
||||||
<div>
|
|
||||||
<p>
|
|
||||||
Based on the the motivations for creating this model, it is more
|
|
||||||
useful to first analyze the growth rates of equilibrium long run
|
|
||||||
output \(Y_t^*\).
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
According to the production function, \[g_Y=g_A+\alpha
|
|
||||||
g_K+(1-\alpha)g_{L_y}\]
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
From previous analysis it was found that
|
|
||||||
\(g_A=\bar{z}\bar{l}\bar{L}\).
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Based on the <u>Law of Capital Motion</u>, \[g_K=\frac{\Delta
|
|
||||||
K_{t+1}}{K_t}=\bar{s}\frac{Y_t}{K_t}-\bar{d}\]
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Because growth rates are constant on the Balanced Growth Path,
|
|
||||||
\(g_K\) must be constant as well. Thus, so is
|
|
||||||
\(\bar{s}\frac{Y_t}{K_t}-\bar{d}\); it must be that
|
|
||||||
\(g_K^*=g_Y^*\).
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
The model assumes population is constant, so
|
|
||||||
\(g_{\bar{L}}=0\rightarrow g_{\bar{L}_yt}=0\) as well.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Combining these terms, we find:
|
|
||||||
\[g_Y^*=\bar{z}\bar{l}\bar{L}+\alpha g_Y^*+(1-\alpha)\cdot 0\]
|
|
||||||
\[\rightarrow g_Y^*=\frac{\bar{z}\bar{l}\bar{L}}{1-\alpha}\]
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Solving for \(Y_t^*\) is trivial after discovering \(g_K=g_Y\)
|
|
||||||
must hold on a balanced growth path.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Invoking the <u>Law of Capital Motion</u> with magic chants,
|
|
||||||
\[g_K^*=\bar{s}\frac{Y_t^*}{K_t^*}-\bar{d}=g_Y^*\rightarrow
|
|
||||||
K_t^*=\frac{\bar{s}Y_t^*}{g_Y^*+\bar{d}}\]
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Isolating \(Y_t^*\), \[Y_t^*=A_t^*
|
|
||||||
(\frac{\bar{s}Y_t^*}{g_Y^*+\bar{d}})^\alpha
|
|
||||||
({(1-\bar{l})\bar{L}})^{1-\alpha}\] \[\rightarrow
|
|
||||||
{Y_t^*}^{1-\alpha}=A_t^*(\frac{\bar{s}}{g_Y^*+\bar{d}})^\alpha({(1-\bar{l})\bar{L}})^{1-\alpha}\]
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Plugging in the known expressions for \(A_t^*\) and \(g_Y^*\), a
|
|
||||||
final expression for the Balanced Growth Path output as a
|
|
||||||
function of the endogenous parameters and time is obtained: \[
|
|
||||||
Y_t^*={(A_0(1+\bar{z}\bar{l}\bar{L})^t})^\frac{1}{1-\alpha}(\frac{\bar{s}}{\frac{\bar{z}\bar{l}\bar{L}}{1-\alpha}+\bar{d}})^\frac{\alpha}{1-\alpha}(1-\bar{l})\bar{L}\]
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<h3>analysis</h3>
|
|
||||||
<div>
|
|
||||||
<p>
|
|
||||||
First looking at the growth rate of output,
|
|
||||||
\(g_Y^*=\frac{\bar{z}\bar{l}\bar{L}}{1-\alpha}\), idea-driving
|
|
||||||
factors and an increased allocation of labor to output increase
|
|
||||||
the equilibrium Balanced Growth Path—the
|
|
||||||
<i>level</i> of long-run growth. Thus, this model captures the
|
|
||||||
influences of both capital and ideas on economic growth.
|
|
||||||
<!-- TODO: t_0 graph break in romer-model and post -->
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Looking at \(Y_t^*\), ideas have both a direct and indirect
|
|
||||||
effect on output. Firstly, ideas raise output because they
|
|
||||||
increase productivity (directly); second, with the introduction
|
|
||||||
of capital stock, ideas also increase capital, in turn
|
|
||||||
increasing output further (indirectly). Mathematically, this is
|
|
||||||
evident in both instances of \(g_A^*\) in the formula for output
|
|
||||||
\(Y_t^*\)—note that
|
|
||||||
\(\frac{1}{1-\alpha},\frac{\alpha}{1-\alpha}>0\) for any
|
|
||||||
\(\alpha\in(0,1)\), so \(\frac{d}{dg_A^*}Y_t^*>0\).
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Expectedly, output has a positive relationship with the savings
|
|
||||||
rate and a negative relationship with the depreciation rate.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Using the visualization below, we see a growth pattern similar
|
|
||||||
to that of the Romer Model. However, the Romer-Solow economy
|
|
||||||
indeed grows at a faster rate than the Romer model—I had
|
|
||||||
to cap \(\bar{L}\) at \(400\) and \(\alpha\) at \(0.4\) because
|
|
||||||
output would be
|
|
||||||
<i> too large </i> for JavaScript to contain in a number (the
|
|
||||||
graph would disappear).
|
|
||||||
</p>
|
|
||||||
<div class="graph">
|
|
||||||
<div id="romer-solow-visualization"></div>
|
|
||||||
</div>
|
|
||||||
<div class="sliders">
|
|
||||||
<div style="padding-right: 20px">
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderRSz">\(\bar{z}:\)</label>
|
|
||||||
<span id="outputRSz">0.50</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderRSz"
|
|
||||||
min="0.1"
|
|
||||||
max="0.99"
|
|
||||||
step="0.01"
|
|
||||||
value="0.50"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderRSA0">\(A_0:\)</label>
|
|
||||||
<span id="outputRSA0">500</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderRSA0"
|
|
||||||
min="0"
|
|
||||||
max="1000"
|
|
||||||
step="10"
|
|
||||||
value="500"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderRSd">\(\bar{d}:\)</label>
|
|
||||||
<span id="outputRSd">0.50</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderRSd"
|
|
||||||
min="0.01"
|
|
||||||
max="0.99"
|
|
||||||
step="0.01"
|
|
||||||
value="0.50"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderRSs">\(\bar{s}:\)</label>
|
|
||||||
<span id="outputRSs">0.50</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderRSs"
|
|
||||||
min="0.01"
|
|
||||||
max="0.99"
|
|
||||||
step="0.01"
|
|
||||||
value="0.50"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div style="padding-left: 20px">
|
|
||||||
<ul start="3">
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderRSalpha">\(\alpha:\)</label>
|
|
||||||
<span id="outputRSalpha">0.33</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderRSalpha"
|
|
||||||
min="0.01"
|
|
||||||
max="0.40"
|
|
||||||
step="0.01"
|
|
||||||
value="0.33"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderRSl">\(\bar{l}:\)</label>
|
|
||||||
<span id="outputRSl">0.50</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderRSl"
|
|
||||||
min="0.01"
|
|
||||||
max="0.99"
|
|
||||||
step="0.01"
|
|
||||||
value="0.50"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderRSL">\(\bar{L}:\)</label>
|
|
||||||
<span id="outputRSL">200</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderRSL"
|
|
||||||
min="0"
|
|
||||||
max="400"
|
|
||||||
step="10"
|
|
||||||
value="200"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<p>
|
|
||||||
Playing with the parameters, the previous mathematical findings
|
|
||||||
are validated. For example, because
|
|
||||||
\(g_Y^*=\frac{\bar{z}\bar{l}\bar{L}}{1-\alpha}\), only changes
|
|
||||||
in parameters \(\alpha,\bar{z},\bar{l}\), and \(\bar{L}\) affect
|
|
||||||
the growth rate of output, manifesting as the y-axis scaling
|
|
||||||
up/down on a ratio scale.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
However, do economics grow <i>faster</i>/<i>slower</i> the
|
|
||||||
further <i>below</i>/<i>above</i> they are from their Balanced
|
|
||||||
Growth Path, as initially desired? While this can be
|
|
||||||
mathematically proven (of course), sometimes a visualization
|
|
||||||
helps.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
The graph below illustrates the transition dynamics of
|
|
||||||
Romer-Solow Model. Namely, \((\bar{z}, \bar{l}, \bar{L},
|
|
||||||
\alpha)=(0.5, 0.5, 100, 0.33)\forall t<t_0\), then update to
|
|
||||||
the slider values when \(t>t_0\).
|
|
||||||
</p>
|
|
||||||
<div class="graph">
|
|
||||||
<div id="romer-solow-change-visualization"></div>
|
|
||||||
</div>
|
|
||||||
<div class="sliders">
|
|
||||||
<div style="padding-right: 20px">
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderRSCz0">\(\bar{z}_0:\)</label>
|
|
||||||
<span id="outputRSCz0">0.50</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderRSCz0"
|
|
||||||
min="0.1"
|
|
||||||
max="0.99"
|
|
||||||
step="0.01"
|
|
||||||
value="0.50"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderRSCalpha0">\(\alpha_0:\)</label>
|
|
||||||
<span id="outputRSCalpha0">0.33</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderRSCalpha0"
|
|
||||||
min="0.01"
|
|
||||||
max="0.54"
|
|
||||||
step="0.01"
|
|
||||||
value="0.33"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderRSCL0">\(\bar{L}_0:\)</label>
|
|
||||||
<span id="outputRSCL0">100</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderRSCL0"
|
|
||||||
min="0"
|
|
||||||
max="200"
|
|
||||||
step="10"
|
|
||||||
value="100"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div style="padding-left: 20px">
|
|
||||||
<ul start="3">
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderRSCl0">\(\bar{l}_0:\)</label>
|
|
||||||
<span id="outputRSCl0">0.50</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderRSCl0"
|
|
||||||
min="0.01"
|
|
||||||
max="0.99"
|
|
||||||
step="0.01"
|
|
||||||
value="0.50"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<div class="slider">
|
|
||||||
<label for="sliderRSCt0">\(t_0:\)</label>
|
|
||||||
<span id="outputRSCt0">50</span>
|
|
||||||
<input
|
|
||||||
type="range"
|
|
||||||
id="sliderRSCt0"
|
|
||||||
min="0"
|
|
||||||
max="100"
|
|
||||||
step="1"
|
|
||||||
value="50"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<p>
|
|
||||||
Finally, it is clear that economies converge to their Balanced
|
|
||||||
Growth Path as desired—something slightly more convoluted
|
|
||||||
to prove from the complex expression for \(Y^*\) derived
|
|
||||||
earlier. For example, with an increase in \(\alpha_0\), output
|
|
||||||
grows at an increasing rate after the change, then increases at
|
|
||||||
a decreasing rate as it converges to the new higher Balanced
|
|
||||||
Growth Path. Increasing parameters \(\bar{z},\bar{l},\bar{L}\)
|
|
||||||
yield similar results, although the changes are visually less
|
|
||||||
obvious.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</article>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
<site-footer></site-footer>
|
|
||||||
<script src="/scripts/common.js"></script>
|
|
||||||
<script src="/scripts/post.js"></script>
|
|
||||||
<script src="/scripts/posts/models-of-production.js"></script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
@ -1,110 +0,0 @@
|
||||||
<!doctype html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
||||||
<link rel="stylesheet" href="/styles/common.css" />
|
|
||||||
<link rel="stylesheet" href="/styles/post.css" />
|
|
||||||
<link rel="icon" type="image/webp" href="/public/logo.webp" />
|
|
||||||
<link href="/public/prism/prism.css" rel="stylesheet" />
|
|
||||||
<link href="/public/prism/prism-theme.css" rel="stylesheet" />
|
|
||||||
<script defer src="/public/prism/prism.js"></script>
|
|
||||||
<script
|
|
||||||
src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"
|
|
||||||
async
|
|
||||||
></script>
|
|
||||||
<title>practice makes perfect</title>
|
|
||||||
</head>
|
|
||||||
<body class="graph-background">
|
|
||||||
<site-header></site-header>
|
|
||||||
<main class="main">
|
|
||||||
<div class="post-container">
|
|
||||||
<header class="post-header">
|
|
||||||
<h1 class="post-title">practice makes perfect</h1>
|
|
||||||
<p class="post-meta">
|
|
||||||
<time datetime="2025-05-07">05/07/2024</time>
|
|
||||||
</p>
|
|
||||||
</header>
|
|
||||||
<article class="post-article">
|
|
||||||
<div>
|
|
||||||
Today I improved my implementation skills with
|
|
||||||
<a
|
|
||||||
href="https://codeforces.com/contest/1833/problem/G"
|
|
||||||
target="_blank"
|
|
||||||
>Codeforces Round 874 Div. 3 Problem G</a
|
|
||||||
>. Despite not solving the problem after a full 45 minutes, I came
|
|
||||||
across to the following realizations:
|
|
||||||
</div>
|
|
||||||
<ol>
|
|
||||||
<li>
|
|
||||||
Don't jump into coding. <i>Fully</i> flesh out your implementation
|
|
||||||
in your head before you begin. This is tempting to do, especially
|
|
||||||
in a "competitive" environment. I tend to do this to avoid
|
|
||||||
thinking about troublesome aspects of the problem that I
|
|
||||||
<i>know</i> I'll have to face later. Going into problems with a
|
|
||||||
plan makes things much easier when coding but much harder up
|
|
||||||
front. It is easy (for me) to get lost in the black-boxing four
|
|
||||||
layers deep. Write it out, visualize it, and practice practice
|
|
||||||
practice.
|
|
||||||
<blockquote>
|
|
||||||
Considering my solution would've led to me uncover my core
|
|
||||||
misinterpretation of the problem:
|
|
||||||
<b>the tree does not have to binary</b>. I developed a solution
|
|
||||||
for binary trees but the greedy logic cannot be extended to
|
|
||||||
trees.
|
|
||||||
</blockquote>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Complex problems are, well, hard. You <i>have</i> to practice to
|
|
||||||
internalize patterns so you can focus on the <i>crux</i> of the
|
|
||||||
problem.
|
|
||||||
<blockquote>
|
|
||||||
I spent 10 minutes debugging retrieving the leaves of a tree
|
|
||||||
before even beginning to code the actual algorithm.
|
|
||||||
<b>1800 is out of my skill range</b> (for now!).
|
|
||||||
</blockquote>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<b>Do not let a single thought/assertion/fact go unturned</b>. I
|
|
||||||
made a litany of erroneous assertions in my time thinking about
|
|
||||||
this problem, some of which include:
|
|
||||||
</li>
|
|
||||||
<ul>
|
|
||||||
<li>The tree has to be binary (it does not).</li>
|
|
||||||
<li>
|
|
||||||
I can gather the leaves in arbitrary order (once again, this
|
|
||||||
doesn't generalize to trees).
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Ignore all cuts between identical nodes—it's fine! (I
|
|
||||||
didn't know why this was the case)
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
A set shouldn't be needed to track visited nodes in a
|
|
||||||
tree— slap it on anyway (this was superfluous and
|
|
||||||
should've immediately set off red flags that my parent-ignoring
|
|
||||||
policy in my BFS was wrong).
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
When processing a node in the "child-parent-child" pattern, just
|
|
||||||
pop off the next node from the queue (within binary/n-ary trees,
|
|
||||||
this is wrong—the leaves are gathered by <i>level</i>, so
|
|
||||||
the next node in the queue is not guaranteed to be the current's
|
|
||||||
sibling).
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<li>
|
|
||||||
Just because the solution passes the test cases does not mean it
|
|
||||||
is right. This specifically applies to problems near/outside your
|
|
||||||
skill range—create your own test cases.
|
|
||||||
</li>
|
|
||||||
</ol>
|
|
||||||
</article>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
<site-footer></site-footer>
|
|
||||||
<script src="/scripts/common.js"></script>
|
|
||||||
<script src="/scripts/post.js"></script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
@ -1,101 +0,0 @@
|
||||||
<!doctype html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
||||||
<link rel="stylesheet" href="/styles/common.css" />
|
|
||||||
<link rel="stylesheet" href="/styles/post.css" />
|
|
||||||
<link rel="icon" type="image/webp" href="/public/logo.webp" />
|
|
||||||
<script
|
|
||||||
src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"
|
|
||||||
async
|
|
||||||
></script>
|
|
||||||
<title>the problem with cs curricula</title>
|
|
||||||
</head>
|
|
||||||
<body class="graph-background">
|
|
||||||
<site-header></site-header>
|
|
||||||
<main class="main">
|
|
||||||
<div class="post-container">
|
|
||||||
<header class="post-header">
|
|
||||||
<h1 class="post-title">the problem with cs curricula</h1>
|
|
||||||
</header>
|
|
||||||
<article class="post-article">
|
|
||||||
<p>
|
|
||||||
Edsger Wybe Dijkstra's
|
|
||||||
<a
|
|
||||||
href="https://www.cs.utexas.edu/~EWD/transcriptions/EWD10xx/EWD1036.html"
|
|
||||||
target="_blank"
|
|
||||||
>"On the cruelty of really teaching computing science"</a
|
|
||||||
>
|
|
||||||
perfectly sums up my gripes with how Computer Science is taught at a
|
|
||||||
university level (at my school, at least).
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Succinctly put, my time learning computer science at my unnamed
|
|
||||||
college exemplified nearly everything he (and I) believe a CS
|
|
||||||
curriculum should <i>not do</i>:
|
|
||||||
</p>
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
Ignore the existential questions about computer programs (what are
|
|
||||||
they? why do they exist? can they want? what should they be used
|
|
||||||
for?)
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Ignore the notion of program behavior, i.e. provability (this is
|
|
||||||
set aside as an advanced core class, counterintuitively reserved
|
|
||||||
for a third or fourth year).
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Excessively simplify and frame new technologies with analogy,
|
|
||||||
effectively instilling maladaptive thinking patterns that fail to
|
|
||||||
extend to more novel problems
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Give up on doing the inverse of the above because it is too hard
|
|
||||||
for young students.
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<p>
|
|
||||||
Walking out of my third year, I left with the sad realization that I
|
|
||||||
got by the majority of my classes by only understanding things as
|
|
||||||
they pertained to assignments and exams.
|
|
||||||
<b>And by "got by", I mean straight A's</b>.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
I always knew something was wrong with how my school taught computer
|
|
||||||
science (despite it being the biggest major as of 2025). As of late,
|
|
||||||
though, I realized the gargantuan amount of damage it caused to my
|
|
||||||
reasoning abilities. Damage that I have to reverse by, essentially,
|
|
||||||
doing everything all over again.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
My
|
|
||||||
<a
|
|
||||||
href="https://barrettruth.com/posts/algorithms/competitive-programming-log.html"
|
|
||||||
target="_blank"
|
|
||||||
>competitive programming journey</a
|
|
||||||
>
|
|
||||||
epitomizes this point: to this day I struggle with reasoning,
|
|
||||||
argumentation, and understanding program behavior. I know how a
|
|
||||||
segment tree works but can't formalize the constraints of a problem.
|
|
||||||
I can do dynamic programming on trees but I can barely manipulate
|
|
||||||
and work with primitive mathematical concepts such as the \(gcd\)
|
|
||||||
function. I cannot think of a more useless skillset.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Nearly all of this is my fault. However,
|
|
||||||
<i
|
|
||||||
>it should not be possible for this to happen in a computer
|
|
||||||
science curriculum</i
|
|
||||||
>. In other words, Djikstra is right.
|
|
||||||
</p>
|
|
||||||
</article>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
<site-footer></site-footer>
|
|
||||||
<script src="/scripts/common.js"></script>
|
|
||||||
<script src="/scripts/post.js"></script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
@ -1,37 +0,0 @@
|
||||||
<!doctype html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
||||||
<link rel="stylesheet" href="/styles/common.css" />
|
|
||||||
<link rel="stylesheet" href="/styles/post.css" />
|
|
||||||
<link rel="icon" type="image/webp" href="/public/logo.webp" />
|
|
||||||
<link href="/public/prism/prism.css" rel="stylesheet" />
|
|
||||||
<link href="/public/prism/prism-theme.css" rel="stylesheet" />
|
|
||||||
<script defer src="/public/prism/prism.js"></script>
|
|
||||||
<title>building an os</title>
|
|
||||||
</head>
|
|
||||||
<body class="graph-background">
|
|
||||||
<site-header></site-header>
|
|
||||||
<main class="main">
|
|
||||||
<div class="post-container">
|
|
||||||
<header class="post-header">
|
|
||||||
<h1 class="post-title">building an os</h1>
|
|
||||||
<p class="post-meta">
|
|
||||||
<time datetime="2025-04-15">15/04/2025</time>
|
|
||||||
</p>
|
|
||||||
</header>
|
|
||||||
<article class="post-article">
|
|
||||||
<h2>introduction</h2>
|
|
||||||
<p>
|
|
||||||
wip
|
|
||||||
</p>
|
|
||||||
</article>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
<site-footer></site-footer>
|
|
||||||
<script src="/scripts/common.js"></script>
|
|
||||||
<script src="/scripts/post.js"></script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
@ -1,95 +0,0 @@
|
||||||
<!doctype html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
||||||
<link rel="stylesheet" href="/styles/common.css" />
|
|
||||||
<link rel="stylesheet" href="/styles/post.css" />
|
|
||||||
<link rel="icon" type="image/webp" href="/public/logo.webp" />
|
|
||||||
<title>designing this website</title>
|
|
||||||
</head>
|
|
||||||
<body class="graph-background">
|
|
||||||
<site-header></site-header>
|
|
||||||
<main class="main">
|
|
||||||
<div class="post-container">
|
|
||||||
<header class="post-header">
|
|
||||||
<h1 class="post-title">designing this website</h1>
|
|
||||||
<p class="post-meta">
|
|
||||||
<time datetime="2024-06-18">18/06/2024</time>
|
|
||||||
</p>
|
|
||||||
</header>
|
|
||||||
<article class="post-article">
|
|
||||||
<h2>HTML, JavaScript, and CSS</h2>
|
|
||||||
<p>That's all there is to it.</p>
|
|
||||||
<p>I thought about using the following frameworks:</p>
|
|
||||||
<ol>
|
|
||||||
<li><a target="blank" href="https://react.dev/">React.js</a></li>
|
|
||||||
<li><a target="blank" href="https://nextjs.org/">Next.js</a></li>
|
|
||||||
<li><a target="blank" href="https://gohugo.io/">Hugo</a></li>
|
|
||||||
<li><a target="blank" href="https://astro.build/">Astro</a></li>
|
|
||||||
</ol>
|
|
||||||
<p>
|
|
||||||
But I did not actually <i>need</i> any of them to make this site
|
|
||||||
look decent.
|
|
||||||
</p>
|
|
||||||
<h2>What I've Learned</h2>
|
|
||||||
<p>
|
|
||||||
Of course, most people build simple websites like these to learn a
|
|
||||||
new technology or framework, not to use an optimal tool. That's
|
|
||||||
actually why I
|
|
||||||
<a
|
|
||||||
target="blank"
|
|
||||||
href="/posts/software/from-github-pages-to-aws.html"
|
|
||||||
>hosted this website on AWS</a
|
|
||||||
>.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Building this website with truly bare-bones technologies has made me
|
|
||||||
appreciate <i>why</i> these web frameworks have emerged.
|
|
||||||
</p>
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
Writing JavaScript to manipulate the DOM works just fine but lacks
|
|
||||||
the readability and composability that many JavaScript frameworks
|
|
||||||
bring to the table.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Re-using code is odd. For example, I created a
|
|
||||||
"common.js" with general utilities—there is zero
|
|
||||||
indication (both to me and my language servers) that these
|
|
||||||
functions are exposed to other scripts included by the same HTML
|
|
||||||
file.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
JSX is great. Dynamically inserting HTML as raw strings or writing
|
|
||||||
them line by line with the DOM is a pain, and a verbose one at
|
|
||||||
that.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Similarly, CSS styling (inline/stylesheet) works at the small
|
|
||||||
scale. However, with styles being completely divorced from the
|
|
||||||
HTML itself, much is left to be desired.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Reusing HTML, styles, and JavaScript feels extremely fragile.
|
|
||||||
Innovative type-safe, optimized, and composable solutions
|
|
||||||
definitely have their place in the web.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<b>You can be efficient with HTML, JS, and CSS.</b> My iteration
|
|
||||||
speed on on this site versus other React.js/MDX blogs I have
|
|
||||||
worked on is the same if not faster. While this may be a testament
|
|
||||||
to my lack of JavaScript experience, I think people conclude too
|
|
||||||
early that their task is beyond the technologies that form the
|
|
||||||
foundation of the web today.
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</article>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
<site-footer></site-footer>
|
|
||||||
<script src="/scripts/common.js"></script>
|
|
||||||
<script src="/scripts/post.js"></script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
@ -1,142 +0,0 @@
|
||||||
<!doctype html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
||||||
<link rel="stylesheet" href="/styles/common.css" />
|
|
||||||
<link rel="stylesheet" href="/styles/post.css" />
|
|
||||||
<link rel="icon" type="image/webp" href="/public/logo.webp" />
|
|
||||||
<title>from github pages to AWS</title>
|
|
||||||
</head>
|
|
||||||
<body class="graph-background">
|
|
||||||
<site-header></site-header>
|
|
||||||
<main class="main">
|
|
||||||
<div class="post-container">
|
|
||||||
<header class="post-header">
|
|
||||||
<h1 class="post-title">from github pages to AWS</h1>
|
|
||||||
<p class="post-meta">
|
|
||||||
<time datetime="2024-06-15">15/06/2024</time>
|
|
||||||
</p>
|
|
||||||
</header>
|
|
||||||
<article class="post-article">
|
|
||||||
<h2>pages begone</h2>
|
|
||||||
<p>
|
|
||||||
Though GitHub Pages may work for hosting your small, internal,
|
|
||||||
static site, I don't think Pages is the right choice for
|
|
||||||
<i>anyone</i>:
|
|
||||||
</p>
|
|
||||||
<ol>
|
|
||||||
<li>Bandwidth caps: scale your software by default</li>
|
|
||||||
<li>
|
|
||||||
Limited SEO control: not a downside if you don't want want
|
|
||||||
traffic...
|
|
||||||
</li>
|
|
||||||
<li>Static & client-side only: keep your options open</li>
|
|
||||||
</ol>
|
|
||||||
<h2>why aws?</h2>
|
|
||||||
<p>
|
|
||||||
I used pages before because I had little knowledge of cloud
|
|
||||||
computing.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
This is not a justification—if you are a software developer,
|
|
||||||
learn it.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
Prior to hosting this site, I developed and hosted an internal
|
|
||||||
application with Google Cloud while working at
|
|
||||||
<a href="https://nthventure.com">nth Venture</a>. Getting a single
|
|
||||||
Compute Engine up and running made me step away from cloud for the
|
|
||||||
entire next year.
|
|
||||||
</p>
|
|
||||||
<p>AWS is:</p>
|
|
||||||
<ol>
|
|
||||||
<li>
|
|
||||||
Industry standard: not an actual reason but it convinced me
|
|
||||||
nonetheless
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Secure: soon to be used by VISA, which holds security to a nearly
|
|
||||||
stupid extent (seriously, I can't even clone a repository)
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Well-documented: everything in the documentation worked
|
|
||||||
<i>first try</i>. This is extremely rare in software, so
|
|
||||||
compliments to Chef Bezos.
|
|
||||||
</li>
|
|
||||||
</ol>
|
|
||||||
<h2>the setup</h2>
|
|
||||||
<p>
|
|
||||||
This website is pure HTML, CSS, and JavaScript.
|
|
||||||
</p>
|
|
||||||
<p>AWS-wise, I use:</p>
|
|
||||||
<ul>
|
|
||||||
<li>S3, to host the content (static for now)</li>
|
|
||||||
<li>CloudFront, to serve and cache said content</li>
|
|
||||||
<li>Route53, to manage routing</li>
|
|
||||||
<li>
|
|
||||||
GoDaddy, to reserve
|
|
||||||
<a target="_blank" href="https://barrettruth.com"
|
|
||||||
>barrettruth.com</a
|
|
||||||
>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<p>A user request can be modelled as follows:</p>
|
|
||||||
<ol>
|
|
||||||
<li>
|
|
||||||
A user accesses the website by typing barrettruth.com in their
|
|
||||||
browser.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
GoDaddy's DNS servers are queried, which translating the
|
|
||||||
domain name to my Route53's IP address.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Route53 then routes to the request to my CloudFront distribution
|
|
||||||
associated with my S3 bucket.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
CloudFront checks its edge caches for the requested content. If
|
|
||||||
the content is stale or not cached, CloudFront fetches the content
|
|
||||||
from S3. Otherwise, it uses the cached content from an edge
|
|
||||||
server.
|
|
||||||
</li>
|
|
||||||
<li>CloudFront returns the content to the user's browser.</li>
|
|
||||||
</ol>
|
|
||||||
<div style="display: flex; justify-content: center">
|
|
||||||
<img
|
|
||||||
width="50%"
|
|
||||||
src="/public/posts/website-design.webp"
|
|
||||||
alt="system design of my portfolio website"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<h2>difficulties</h2>
|
|
||||||
<p>
|
|
||||||
The hardest part of hosting this website was interfacing with
|
|
||||||
GoDaddy.
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
For example, configuring SSL certificates with GoDaddy is needlessly
|
|
||||||
challenging. Follow
|
|
||||||
<a
|
|
||||||
target="blank"
|
|
||||||
href="https://docs.aws.amazon.com/amplify/latest/userguide/to-add-a-custom-domain-managed-by-godaddy.html"
|
|
||||||
>AWS's guide</a
|
|
||||||
>
|
|
||||||
if you really want to. Otherwise,
|
|
||||||
<a
|
|
||||||
target="blank"
|
|
||||||
href="https://www.godaddy.com/help/edit-my-domain-nameservers-664"
|
|
||||||
>configure your GoDaddy nameservers</a
|
|
||||||
>
|
|
||||||
and point them to your own DNS service (like Route53) instead.
|
|
||||||
</p>
|
|
||||||
</article>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
<site-footer></site-footer>
|
|
||||||
<script src="/scripts/common.js"></script>
|
|
||||||
<script src="/scripts/post.js"></script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
@ -1,137 +0,0 @@
|
||||||
<!doctype html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
||||||
<link rel="stylesheet" href="/styles/common.css" />
|
|
||||||
<link rel="stylesheet" href="/styles/post.css" />
|
|
||||||
<link rel="icon" type="image/webp" href="/public/logo.webp" />
|
|
||||||
<link href="/public/prism/prism.css" rel="stylesheet" />
|
|
||||||
<link href="/public/prism/prism-theme.css" rel="stylesheet" />
|
|
||||||
<script defer src="/public/prism/prism.js"></script>
|
|
||||||
<title>hosting a git server</title>
|
|
||||||
</head>
|
|
||||||
<body class="graph-background">
|
|
||||||
<site-header></site-header>
|
|
||||||
<main class="main">
|
|
||||||
<div class="post-container">
|
|
||||||
<header class="post-header">
|
|
||||||
<h1 class="post-title">hosting a git server</h1>
|
|
||||||
<p class="post-meta">
|
|
||||||
<time datetime="2025-05-7">7/05/2025</time>
|
|
||||||
</p>
|
|
||||||
</header>
|
|
||||||
<article class="post-article">
|
|
||||||
<h2>why</h2>
|
|
||||||
<p>
|
|
||||||
No reason. Perhaps to host personal files in the future. AWS's
|
|
||||||
<a href="" target="_blank">micro free tier</a> is great, too.
|
|
||||||
</p>
|
|
||||||
<h2>what</h2>
|
|
||||||
<ul>
|
|
||||||
<li>Write my own git web ui</li>
|
|
||||||
<li>Support clones from my own website</li>
|
|
||||||
<li>Host private files on my git ui</li>
|
|
||||||
</ul>
|
|
||||||
<h2>the process</h2>
|
|
||||||
<ol>
|
|
||||||
<p>
|
|
||||||
I detail self-hosting a git server on an AWS t2.micro instance
|
|
||||||
("free" for 1 year) as of May 2025.
|
|
||||||
<a
|
|
||||||
href="https://git-scm.com/book/en/v2/Git-on-the-Server-The-Protocols"
|
|
||||||
target="_blank"
|
|
||||||
>Git's instructions</a
|
|
||||||
>
|
|
||||||
were vastly outdated so hopefully this saves a lucky reader some
|
|
||||||
time.
|
|
||||||
</p>
|
|
||||||
<li>
|
|
||||||
Create the ec2 instance with setup wizard and add {in,out}bound
|
|
||||||
rules for {SSH,HTTP,HTTPS,your ip} in the wizard security group.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Use an elastic ip (free) to address public ip
|
|
||||||
reassigning—this is a bother when ssh'ing (new verb?) into
|
|
||||||
the box locally and/or configuring an Apache HTTP server.
|
|
||||||
</li>
|
|
||||||
<li>Understand bare git repositories and the ssh protocol.</li>
|
|
||||||
<li>
|
|
||||||
Configure an keypair and ssh in (the official instructions are
|
|
||||||
fine for this). I moved it to <code>~/.ssh</code> and added an
|
|
||||||
alias in <code>~/.ssh/config</code> for convenience. Clone a repo
|
|
||||||
on the server to test.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Set up a git daemon for <code>git://</code> protocol cloning at
|
|
||||||
your own risk.
|
|
||||||
</li>
|
|
||||||
<li>Set up an Apache HTTPD server.</li>
|
|
||||||
<li>
|
|
||||||
Configure file permissions for the new user:
|
|
||||||
<ol>
|
|
||||||
<li><code>sudo chown -R git:git /srv/git</code></li>
|
|
||||||
<li><code>sudo chgrp -R apache /srv/git</code></li>
|
|
||||||
</ol>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
To deal with "dubious ownership" issues when cloning with HTTPS, I
|
|
||||||
needed to add <b>exactly</b> the following configuration to
|
|
||||||
<code>/etc/gitconfig</code>.
|
|
||||||
<i>No group permission finagling will work</i>! Git only allows
|
|
||||||
cloning repositories that are owned by the user. If you wish to
|
|
||||||
clone via SSH with, say, user A, this same user must also be
|
|
||||||
employed by your HTTP server to clone the files (customize
|
|
||||||
HTTPD/whatever you're using accordingly).
|
|
||||||
</li>
|
|
||||||
<div class="code" data-file="gitconfig.git"></div>
|
|
||||||
<li>
|
|
||||||
Security-wise, set up TLS/HTTPS with
|
|
||||||
<a href="https://letsencrypt.org/" target="_blank"
|
|
||||||
>Let's Encrypt</a
|
|
||||||
>. Further, only allow authorized people to actually
|
|
||||||
<i>push</i> to the server. The following is my HTTPD configuration
|
|
||||||
file
|
|
||||||
<code>/etc/apache/conf.d/git-server.conf</code>
|
|
||||||
hosting the web ui at the root and clone urls at
|
|
||||||
<code>/git</code>:
|
|
||||||
</li>
|
|
||||||
<div class="code" data-file="git-server.apacheconf"></div>
|
|
||||||
<li>
|
|
||||||
There are a variety of choices for web ui, including
|
|
||||||
<a href="https://git.zx2c4.com/cgit/" target="_blank">cgit</a>,
|
|
||||||
<a href="https://git-scm.com/docs/gitweb" target="_blank">gitweb</a>
|
|
||||||
(I do not recommend this—the scripts are ancient and require
|
|
||||||
manual tuning), and some even heavier options that allow for
|
|
||||||
further customization. I am not a fan of viewing code on the web,
|
|
||||||
so you cannot in
|
|
||||||
<a href="https://git.barrettruth.com" target="_blank"
|
|
||||||
>my custom ui</a
|
|
||||||
>. I spin up a simple python server to walk the projects in
|
|
||||||
<code>/srv/git</code> and configured a systemd service to run it
|
|
||||||
in the ec2 box:
|
|
||||||
</li>
|
|
||||||
<div class="code" data-file="git-server-ui.systemd">
|
|
||||||
</div>
|
|
||||||
</ol>
|
|
||||||
<h2>lessons</h2>
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
<b>It feels great to do things yourself</b>: I used GPT-4o for
|
|
||||||
linux server command help, that was about it
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<b>Always ask "what is this?" before using something</b>: this
|
|
||||||
would've saved me hours of realizing a 12 year old perl script
|
|
||||||
should not have been running my git ui.
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</article>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
<site-footer></site-footer>
|
|
||||||
<script src="/scripts/common.js"></script>
|
|
||||||
<script src="/scripts/post.js"></script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
@ -1,111 +0,0 @@
|
||||||
<!doctype html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
||||||
<link rel="stylesheet" href="/styles/common.css" />
|
|
||||||
<link rel="stylesheet" href="/styles/post.css" />
|
|
||||||
<link rel="icon" type="image/webp" href="/public/logo.webp" />
|
|
||||||
<title>my cp setup</title>
|
|
||||||
</head>
|
|
||||||
<body class="graph-background">
|
|
||||||
<site-header></site-header>
|
|
||||||
<main class="main">
|
|
||||||
<div class="post-container">
|
|
||||||
<header class="post-header">
|
|
||||||
<h1 class="post-title">my cp setup</h1>
|
|
||||||
<p class="post-meta">
|
|
||||||
<time datetime="2025-04-15">15/04/2025</time>
|
|
||||||
</p>
|
|
||||||
</header>
|
|
||||||
<article class="post-article">
|
|
||||||
<p>
|
|
||||||
Source code
|
|
||||||
<a
|
|
||||||
href="https://github.com/barrett-ruth/dots/blob/main/nvim/lua/cp.lua"
|
|
||||||
target="_blank"
|
|
||||||
>here</a
|
|
||||||
>.
|
|
||||||
</p>
|
|
||||||
<h2>my goals</h2>
|
|
||||||
<p>
|
|
||||||
I wanted the following features in my competitive programming (cp)
|
|
||||||
setup:
|
|
||||||
</p>
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
<u>Flexibility</u>: support various environments (codeforces,
|
|
||||||
USACO, cses, etc.) with ease
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<u>Speed</u>: instantaneous, non-blocking running/debugging;
|
|
||||||
automatic environment configuration and easy code testing
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<u>Editor-Agnostic</u>: while I do provide first-in-class NeoVim
|
|
||||||
integration for my setup, it should be easily portable to
|
|
||||||
<i>any</i> os/editor
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<h2>the solution</h2>
|
|
||||||
<p>
|
|
||||||
Some (POSIX-compliant, of course) scripts and a
|
|
||||||
<code>makefile</code> are more than enough. I created the following
|
|
||||||
intuitive way to interact with my CP setup:
|
|
||||||
</p>
|
|
||||||
<ol>
|
|
||||||
<li>
|
|
||||||
<code>make setup</code>: populate the environment with
|
|
||||||
configurations in <code>~/.config/cp-template</code> for
|
|
||||||
<code>clang-format</code> and <code>clangd</code>
|
|
||||||
</li>
|
|
||||||
<li><code>make run file</code></li>
|
|
||||||
<li><code>make debug file</code></li>
|
|
||||||
<li><code>make clean</code></li>
|
|
||||||
</ol>
|
|
||||||
<p>
|
|
||||||
That's it. The <code>makefile</code> relies on some scripts that
|
|
||||||
compile code and run the corresponding executables.
|
|
||||||
</p>
|
|
||||||
<h2>neovim integration</h2>
|
|
||||||
<div style="display: flex; justify-content: center">
|
|
||||||
<img
|
|
||||||
width="80%"
|
|
||||||
src="/public/posts/my-cp-setup/cp-setup.webp"
|
|
||||||
alt="screenshot of my neovim competitive programming setup"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<p>
|
|
||||||
Leveraging
|
|
||||||
<a href="https://github.com/L3MON4D3/LuaSnip" target="_blank"
|
|
||||||
>LuaSnip</a
|
|
||||||
>, a custom <code>CP</code> user command, and some scripting for
|
|
||||||
window management and asynchronous jobs, I'm able to:
|
|
||||||
</p>
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
Asynchronously format, run, and debug code (<code
|
|
||||||
>:h vim.system</code
|
|
||||||
>)
|
|
||||||
</li>
|
|
||||||
<li>Use a three-window (input, output, and code) view</li>
|
|
||||||
<li>Toggle between problems instantly (yes, the windows update)</li>
|
|
||||||
<li>
|
|
||||||
Automatically populate my coding buffers with competition-specific
|
|
||||||
templates (i.e. USACO, CSES, etc.)
|
|
||||||
</li>
|
|
||||||
<li>Run the code from the CLI in less than a second</li>
|
|
||||||
<li>
|
|
||||||
Easily tweak and change the setup—there's absolutely nothing
|
|
||||||
fancy.
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</article>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
<site-footer></site-footer>
|
|
||||||
<script src="/scripts/common.js"></script>
|
|
||||||
<script src="/scripts/post.js"></script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
1
public/d3.js
vendored
1
public/d3.js
vendored
|
|
@ -1,4 +1,3 @@
|
||||||
// https://d3js.org v7.9.0 Copyright 2010-2023 Mike Bostock
|
|
||||||
!(function (t, n) {
|
!(function (t, n) {
|
||||||
"object" == typeof exports && "undefined" != typeof module
|
"object" == typeof exports && "undefined" != typeof module
|
||||||
? n(exports)
|
? n(exports)
|
||||||
|
|
|
||||||
|
|
@ -1,132 +0,0 @@
|
||||||
/**
|
|
||||||
* Github Light theme for Prism.js
|
|
||||||
* Based on Github: https://github.com
|
|
||||||
* @author Katorly
|
|
||||||
*/
|
|
||||||
/* General */
|
|
||||||
pre[class*="language-"],
|
|
||||||
code[class*="language-"] {
|
|
||||||
color: #24292f;
|
|
||||||
font-size: 13px;
|
|
||||||
text-shadow: none;
|
|
||||||
font-family: Consolas, Monaco, "Andale Mono", "Ubuntu Mono", monospace;
|
|
||||||
direction: ltr;
|
|
||||||
text-align: left;
|
|
||||||
white-space: pre;
|
|
||||||
word-spacing: normal;
|
|
||||||
word-break: normal;
|
|
||||||
line-height: 1.5;
|
|
||||||
-moz-tab-size: 4;
|
|
||||||
-o-tab-size: 4;
|
|
||||||
tab-size: 4;
|
|
||||||
-webkit-hyphens: none;
|
|
||||||
-moz-hyphens: none;
|
|
||||||
-ms-hyphens: none;
|
|
||||||
hyphens: none;
|
|
||||||
}
|
|
||||||
pre[class*="language-"]::selection,
|
|
||||||
code[class*="language-"]::selection,
|
|
||||||
pre[class*="language-"]::mozselection,
|
|
||||||
code[class*="language-"]::mozselection {
|
|
||||||
text-shadow: none;
|
|
||||||
background: #9fc6e9;
|
|
||||||
}
|
|
||||||
@media print {
|
|
||||||
pre[class*="language-"],
|
|
||||||
code[class*="language-"] {
|
|
||||||
text-shadow: none;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pre[class*="language-"] {
|
|
||||||
padding: 1em;
|
|
||||||
margin: 0.5em 0;
|
|
||||||
overflow: auto;
|
|
||||||
background: #f4f4f4;
|
|
||||||
}
|
|
||||||
:not(pre) > code[class*="language-"] {
|
|
||||||
padding: 0.1em 0.3em;
|
|
||||||
border-radius: 0.3em;
|
|
||||||
color: #24292f;
|
|
||||||
background: #eff1f3;
|
|
||||||
}
|
|
||||||
/* Line highlighting */
|
|
||||||
pre[data-line] {
|
|
||||||
position: relative;
|
|
||||||
}
|
|
||||||
pre[class*="language-"] > code[class*="language-"] {
|
|
||||||
position: relative;
|
|
||||||
z-index: 1;
|
|
||||||
}
|
|
||||||
.line-highlight {
|
|
||||||
position: absolute;
|
|
||||||
left: 0;
|
|
||||||
right: 0;
|
|
||||||
padding: inherit 0;
|
|
||||||
margin-top: 1em;
|
|
||||||
background: #fff8c5;
|
|
||||||
box-shadow: inset 5px 0 0 #eed888;
|
|
||||||
z-index: 0;
|
|
||||||
pointer-events: none;
|
|
||||||
line-height: inherit;
|
|
||||||
white-space: pre;
|
|
||||||
}
|
|
||||||
/* Tokens */
|
|
||||||
.namespace {
|
|
||||||
opacity: 0.7;
|
|
||||||
}
|
|
||||||
.token.comment,
|
|
||||||
.token.prolog,
|
|
||||||
.token.doctype,
|
|
||||||
.token.cdata {
|
|
||||||
color: #6e7781;
|
|
||||||
}
|
|
||||||
.token.punctuation {
|
|
||||||
color: #24292f;
|
|
||||||
}
|
|
||||||
.token.property,
|
|
||||||
.token.tag,
|
|
||||||
.token.boolean,
|
|
||||||
.token.number,
|
|
||||||
.token.constant,
|
|
||||||
.token.symbol,
|
|
||||||
.token.deleted {
|
|
||||||
color: #0550ae;
|
|
||||||
}
|
|
||||||
.token.selector,
|
|
||||||
.token.attr-name,
|
|
||||||
.token.string,
|
|
||||||
.token.char,
|
|
||||||
.token.builtin,
|
|
||||||
.token.inserted {
|
|
||||||
color: #0a3069;
|
|
||||||
}
|
|
||||||
.token.operator,
|
|
||||||
.token.entity,
|
|
||||||
.token.url,
|
|
||||||
.language-css .token.string,
|
|
||||||
.style .token.string {
|
|
||||||
color: #0550ae;
|
|
||||||
}
|
|
||||||
.token.atrule,
|
|
||||||
.token.attr-value,
|
|
||||||
.token.keyword {
|
|
||||||
color: #cf222e;
|
|
||||||
}
|
|
||||||
.token.function {
|
|
||||||
color: #8250df;
|
|
||||||
}
|
|
||||||
.token.regex,
|
|
||||||
.token.important,
|
|
||||||
.token.variable {
|
|
||||||
color: #0a3069;
|
|
||||||
}
|
|
||||||
.token.important,
|
|
||||||
.token.bold {
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
.token.italic {
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
.token.entity {
|
|
||||||
cursor: help;
|
|
||||||
}
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
/* PrismJS 1.30.0
|
|
||||||
https://prismjs.com/download.html#themes=prism&languages=markup+css+clike+javascript+apacheconf+bash+c+cpp+editorconfig+git+latex+lua+python+systemd+typoscript+vim+yaml&plugins=line-numbers */
|
|
||||||
code[class*=language-],pre[class*=language-]{color:#000;background:0 0;text-shadow:0 1px #fff;font-family:Consolas,Monaco,'Andale Mono','Ubuntu Mono',monospace;font-size:1em;text-align:left;white-space:pre;word-spacing:normal;word-break:normal;word-wrap:normal;line-height:1.5;-moz-tab-size:4;-o-tab-size:4;tab-size:4;-webkit-hyphens:none;-moz-hyphens:none;-ms-hyphens:none;hyphens:none}code[class*=language-] ::-moz-selection,code[class*=language-]::-moz-selection,pre[class*=language-] ::-moz-selection,pre[class*=language-]::-moz-selection{text-shadow:none;background:#b3d4fc}code[class*=language-] ::selection,code[class*=language-]::selection,pre[class*=language-] ::selection,pre[class*=language-]::selection{text-shadow:none;background:#b3d4fc}@media print{code[class*=language-],pre[class*=language-]{text-shadow:none}}pre[class*=language-]{padding:1em;margin:.5em 0;overflow:auto}:not(pre)>code[class*=language-],pre[class*=language-]{background:#f5f2f0}:not(pre)>code[class*=language-]{padding:.1em;border-radius:.3em;white-space:normal}.token.cdata,.token.comment,.token.doctype,.token.prolog{color:#708090}.token.punctuation{color:#999}.token.namespace{opacity:.7}.token.boolean,.token.constant,.token.deleted,.token.number,.token.property,.token.symbol,.token.tag{color:#905}.token.attr-name,.token.builtin,.token.char,.token.inserted,.token.selector,.token.string{color:#690}.language-css .token.string,.style .token.string,.token.entity,.token.operator,.token.url{color:#9a6e3a;background:hsla(0,0%,100%,.5)}.token.atrule,.token.attr-value,.token.keyword{color:#07a}.token.class-name,.token.function{color:#dd4a68}.token.important,.token.regex,.token.variable{color:#e90}.token.bold,.token.important{font-weight:700}.token.italic{font-style:italic}.token.entity{cursor:help}
|
|
||||||
pre[class*=language-].line-numbers{position:relative;padding-left:3.8em;counter-reset:linenumber}pre[class*=language-].line-numbers>code{position:relative;white-space:inherit}.line-numbers .line-numbers-rows{position:absolute;pointer-events:none;top:0;font-size:100%;left:-3.8em;width:3em;letter-spacing:-1px;border-right:1px solid #999;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.line-numbers-rows>span{display:block;counter-increment:linenumber}.line-numbers-rows>span:before{content:counter(linenumber);color:#999;display:block;padding-right:.8em;text-align:right}
|
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -1,31 +1,10 @@
|
||||||
@font-face {
|
@font-face {
|
||||||
font-family: "Signifier";
|
font-family: "Signifier";
|
||||||
src: url("public/signifier/Signifier-Regular.ttf");
|
src: url("/signifier/Signifier-Regular.ttf");
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
}
|
}
|
||||||
|
|
||||||
@font-face {
|
|
||||||
font-family: "Signifier";
|
|
||||||
src: url("public/signifier/Signifier-Italic.ttf");
|
|
||||||
font-weight: normal;
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
|
|
||||||
@font-face {
|
|
||||||
font-family: "Signifier";
|
|
||||||
src: url("public/signifier/Signifier-Bold.ttf");
|
|
||||||
font-weight: bold;
|
|
||||||
font-style: normal;
|
|
||||||
}
|
|
||||||
|
|
||||||
@font-face {
|
|
||||||
font-family: "Signifier";
|
|
||||||
src: url("public/signifier/Signifier-BoldItalic.ttf");
|
|
||||||
font-weight: bold;
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
|
|
||||||
html,
|
html,
|
||||||
body {
|
body {
|
||||||
font-family: "Signifier", serif;
|
font-family: "Signifier", serif;
|
||||||
63
public/styles/mdx.css
Normal file
63
public/styles/mdx.css
Normal file
|
|
@ -0,0 +1,63 @@
|
||||||
|
article h1, article h2, article h3,
|
||||||
|
.post-article h1, .post-article h2, .post-article h3 {
|
||||||
|
font-weight: normal;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
article h1, .post-article h1 {
|
||||||
|
padding-left: 1.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
article h2, .post-article h2 {
|
||||||
|
padding-left: 2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
article h3, .post-article h3 {
|
||||||
|
padding-left: 2.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
article h1::before, .post-article h1::before {
|
||||||
|
content: "#";
|
||||||
|
}
|
||||||
|
|
||||||
|
article h2::before, .post-article h2::before {
|
||||||
|
content: "##";
|
||||||
|
}
|
||||||
|
|
||||||
|
article h3::before, .post-article h3::before {
|
||||||
|
content: "###";
|
||||||
|
}
|
||||||
|
|
||||||
|
article h1::before, article h2::before, article h3::before,
|
||||||
|
.post-article h1::before, .post-article h2::before, .post-article h3::before {
|
||||||
|
position: absolute;
|
||||||
|
left: 0;
|
||||||
|
color: var(--topic-color, #000);
|
||||||
|
margin-right: 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
article img {
|
||||||
|
display: block;
|
||||||
|
margin: 2rem auto;
|
||||||
|
max-width: 100%;
|
||||||
|
height: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
article pre {
|
||||||
|
padding: 1rem;
|
||||||
|
overflow-x: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Heading with date styling */
|
||||||
|
article h2.heading-with-date, article h3.heading-with-date {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
article h2.heading-with-date .date, article h3.heading-with-date .date {
|
||||||
|
font-size: 0.8em;
|
||||||
|
color: #000;
|
||||||
|
font-weight: normal;
|
||||||
|
margin-left: 1rem;
|
||||||
|
}
|
||||||
|
|
@ -124,6 +124,5 @@ pre * {
|
||||||
.language-py,
|
.language-py,
|
||||||
.language-cc,
|
.language-cc,
|
||||||
.language-cpp {
|
.language-cpp {
|
||||||
/* override prism.js styles */
|
|
||||||
font-size: 0.8em !important;
|
font-size: 0.8em !important;
|
||||||
}
|
}
|
||||||
|
|
@ -1,136 +0,0 @@
|
||||||
const TERMINAL_PROMPT = "barrett@ruth:~$ ";
|
|
||||||
let clearing = false;
|
|
||||||
|
|
||||||
class SiteHeader extends HTMLElement {
|
|
||||||
connectedCallback() {
|
|
||||||
const path = window.location.pathname;
|
|
||||||
const isHome = path === "/" || path === "/index.html";
|
|
||||||
const topic = this.getTopic();
|
|
||||||
|
|
||||||
const promptText = topic ? `barrett@ruth:~$ ${topic}` : "barrett@ruth:~$";
|
|
||||||
|
|
||||||
const clickHandler = isHome ? "refresh(event)" : "goHome(event)";
|
|
||||||
|
|
||||||
this.innerHTML = `
|
|
||||||
<header>
|
|
||||||
<a href="/" style="text-decoration: none; color: inherit" onclick="${clickHandler}">
|
|
||||||
<div class="terminal-container">
|
|
||||||
<span class="terminal-prompt">${promptText}</span>
|
|
||||||
<span class="terminal-cursor"></span>
|
|
||||||
</div>
|
|
||||||
</a>
|
|
||||||
<div class="header-links">
|
|
||||||
<a target="_blank" href="/public/resume.pdf">resume</a>
|
|
||||||
<a target="_blank" href="/public/transcript.pdf">transcript</a>
|
|
||||||
<a href="/about.html">about</a>
|
|
||||||
</div>
|
|
||||||
</header>
|
|
||||||
`;
|
|
||||||
}
|
|
||||||
|
|
||||||
getTopic() {
|
|
||||||
const pathname = window.location.pathname.split("/");
|
|
||||||
|
|
||||||
if (pathname.length === 2 && pathname[1].endsWith(".html")) {
|
|
||||||
return "/" + pathname[1].replace(".html", "");
|
|
||||||
} else if (pathname.length >= 3) {
|
|
||||||
return "/" + pathname.slice(2, -1).join("/").replace(".html", "");
|
|
||||||
}
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class SiteFooter extends HTMLElement {
|
|
||||||
connectedCallback() {
|
|
||||||
this.innerHTML = `
|
|
||||||
<footer>
|
|
||||||
<span class="greek-delta">Δ</span>
|
|
||||||
<div class="footer-links">
|
|
||||||
<a target="_blank" href="https://git.barrettruth.com">git</a>
|
|
||||||
<a target="_blank" href="https://www.linkedin.com/in/barrett-ruth/">linkedin</a>
|
|
||||||
<a target="_blank" href="mailto:br.barrettruth@gmail.com">email</a>
|
|
||||||
</div>
|
|
||||||
</footer>
|
|
||||||
`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
customElements.define("site-header", SiteHeader);
|
|
||||||
customElements.define("site-footer", SiteFooter);
|
|
||||||
|
|
||||||
document.addEventListener("DOMContentLoaded", function () {
|
|
||||||
if (!document.querySelector("style#dynamic-styles")) {
|
|
||||||
const style = document.createElement("style");
|
|
||||||
style.id = "dynamic-styles";
|
|
||||||
style.innerHTML = `
|
|
||||||
footer {
|
|
||||||
padding: 20px;
|
|
||||||
font-size: 1.5em;
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: space-between;
|
|
||||||
}
|
|
||||||
|
|
||||||
.greek-delta {
|
|
||||||
font-family: "Times New Roman", Times, serif;
|
|
||||||
font-size: 1.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.header-links a,
|
|
||||||
.footer-links a {
|
|
||||||
margin-left: 25px;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
document.head.appendChild(style);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
function clearPrompt(delay, callback) {
|
|
||||||
if (clearing) return;
|
|
||||||
clearing = true;
|
|
||||||
|
|
||||||
const terminalPrompt = document.querySelector(".terminal-prompt");
|
|
||||||
const topicLength = terminalPrompt.innerHTML.length - TERMINAL_PROMPT.length;
|
|
||||||
let i = 0;
|
|
||||||
|
|
||||||
function removeChar() {
|
|
||||||
if (i++ < topicLength) {
|
|
||||||
terminalPrompt.textContent = terminalPrompt.textContent.slice(0, -1);
|
|
||||||
setTimeout(removeChar, delay / topicLength);
|
|
||||||
} else {
|
|
||||||
i = 0;
|
|
||||||
clearing = false;
|
|
||||||
callback && callback();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
removeChar();
|
|
||||||
}
|
|
||||||
|
|
||||||
function goHome(e) {
|
|
||||||
e.preventDefault();
|
|
||||||
|
|
||||||
clearPrompt(500, () => (window.location.href = "/"));
|
|
||||||
}
|
|
||||||
|
|
||||||
const getTopicColor = (topicName) => {
|
|
||||||
switch (topicName) {
|
|
||||||
case "software":
|
|
||||||
return "#0073e6";
|
|
||||||
case "operating-systems":
|
|
||||||
return "#009975";
|
|
||||||
case "algorithms":
|
|
||||||
return "#d50032";
|
|
||||||
case "meditations":
|
|
||||||
return "#6a0dad";
|
|
||||||
default:
|
|
||||||
return "#000000";
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const urlToTopic = () => {
|
|
||||||
const pathname = window.location.pathname.split("/");
|
|
||||||
if (pathname.length < 3) return "DNE";
|
|
||||||
return pathname[2];
|
|
||||||
};
|
|
||||||
|
|
@ -1,10 +1,3 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
aws s3 sync . s3://barrettruth.com --delete \
|
pnpm build && aws s3 sync ./dist/ s3://barrettruth.com --delete
|
||||||
--exclude ".git/*" \
|
|
||||||
--exclude ".github/*" \
|
|
||||||
--exclude "readme.md" \
|
|
||||||
--exclude ".DS_Store" \
|
|
||||||
--exclude ".gitignore" \
|
|
||||||
--exclude "scripts/*.sh" \
|
|
||||||
--exclude "files/*"
|
|
||||||
|
|
|
||||||
142
scripts/index.js
142
scripts/index.js
|
|
@ -1,142 +0,0 @@
|
||||||
const postMapping = new Map([
|
|
||||||
[
|
|
||||||
"software",
|
|
||||||
[
|
|
||||||
"hosting a git server",
|
|
||||||
"my cp setup",
|
|
||||||
"from github pages to aws",
|
|
||||||
"designing this website",
|
|
||||||
],
|
|
||||||
],
|
|
||||||
["operating systems", ["building an os"]],
|
|
||||||
[
|
|
||||||
"algorithms",
|
|
||||||
[
|
|
||||||
"competitive programming log",
|
|
||||||
"leetcode daily",
|
|
||||||
"practice makes perfect",
|
|
||||||
"extrema circular buffer",
|
|
||||||
"models of production",
|
|
||||||
],
|
|
||||||
],
|
|
||||||
["meditations", ["the problem with cs curricula"]],
|
|
||||||
]);
|
|
||||||
|
|
||||||
function refresh(e) {
|
|
||||||
if (window.location.pathname !== "/") e.preventDefault();
|
|
||||||
|
|
||||||
const topics = document.querySelectorAll(".topic a");
|
|
||||||
|
|
||||||
topics.forEach((topic) => {
|
|
||||||
topic.classList.remove("active");
|
|
||||||
topic.style.color = "";
|
|
||||||
});
|
|
||||||
|
|
||||||
document.getElementById("posts").innerHTML = "";
|
|
||||||
|
|
||||||
clearPrompt(500);
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderPosts(topic) {
|
|
||||||
const posts = document.getElementById("posts");
|
|
||||||
posts.innerHTML = "";
|
|
||||||
|
|
||||||
// Normalize topic for lookup (in case it has spaces, like "operating systems")
|
|
||||||
const normalizedTopic = topic.trim();
|
|
||||||
|
|
||||||
// Get posts for this topic
|
|
||||||
const topicPosts = postMapping.get(normalizedTopic);
|
|
||||||
|
|
||||||
if (!topicPosts) {
|
|
||||||
console.error(`No posts found for topic: ${normalizedTopic}`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
topicPosts.forEach((postName) => {
|
|
||||||
if (typeof postName !== "string") return;
|
|
||||||
|
|
||||||
const post = document.createElement("div");
|
|
||||||
post.classList.add("post");
|
|
||||||
|
|
||||||
const link = document.createElement("a");
|
|
||||||
const postLink = postName.toLowerCase().replace(/\s+/g, "-");
|
|
||||||
|
|
||||||
// Convert topic to URL-friendly format
|
|
||||||
const topicSlug = normalizedTopic.toLowerCase().replace(/\s+/g, "-");
|
|
||||||
link.href = `/posts/${topicSlug}/${postLink}.html`;
|
|
||||||
link.textContent = postName;
|
|
||||||
|
|
||||||
link.style.textDecoration = "underline";
|
|
||||||
|
|
||||||
post.appendChild(link);
|
|
||||||
posts.appendChild(post);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let typing = false;
|
|
||||||
|
|
||||||
function typechars(e) {
|
|
||||||
e.preventDefault();
|
|
||||||
|
|
||||||
if (e.target.classList.contains("active")) return;
|
|
||||||
if (typing) return;
|
|
||||||
typing = true;
|
|
||||||
|
|
||||||
const topic = e.target.textContent;
|
|
||||||
const terminalText = ` /${topic.toLowerCase()}`;
|
|
||||||
const terminalPrompt = document.querySelector(".terminal-prompt");
|
|
||||||
const delay =
|
|
||||||
terminalPrompt.innerHTML.length > TERMINAL_PROMPT.length ? 250 : 500;
|
|
||||||
|
|
||||||
clearPrompt(delay, () => {
|
|
||||||
let i = 0;
|
|
||||||
function typechar() {
|
|
||||||
if (i < terminalText.length) {
|
|
||||||
terminalPrompt.innerHTML += terminalText.charAt(i++);
|
|
||||||
setTimeout(typechar, delay / terminalText.length);
|
|
||||||
} else {
|
|
||||||
renderPosts(topic);
|
|
||||||
typing = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
typechar();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
window.addEventListener("beforeunload", () => {
|
|
||||||
document.querySelector(".terminal-prompt").innerHTML = TERMINAL_PROMPT;
|
|
||||||
});
|
|
||||||
|
|
||||||
document.addEventListener("DOMContentLoaded", function () {
|
|
||||||
const topics = document.querySelectorAll(".topic a");
|
|
||||||
|
|
||||||
topics.forEach((topic) => {
|
|
||||||
const topicName = topic.parentElement.className.split(" ")[1];
|
|
||||||
|
|
||||||
topic.addEventListener("mouseenter", () => {
|
|
||||||
topic.style.color = getTopicColor(topicName);
|
|
||||||
});
|
|
||||||
|
|
||||||
topic.addEventListener("mouseleave", () => {
|
|
||||||
if (!topic.classList.contains("active")) {
|
|
||||||
topic.style.color = "";
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
topic.addEventListener("click", (e) => {
|
|
||||||
e.preventDefault();
|
|
||||||
|
|
||||||
if (topic.classList.contains("active")) return;
|
|
||||||
|
|
||||||
topics.forEach((t) => {
|
|
||||||
t.classList.remove("active");
|
|
||||||
t.style.color = "";
|
|
||||||
});
|
|
||||||
|
|
||||||
topic.classList.add("active");
|
|
||||||
document.getElementById("posts").innerHTML = "";
|
|
||||||
topic.style.color = getTopicColor(topicName);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
@ -6,5 +6,5 @@ if [ -z "$1" ]; then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
aws cloudfront create-invalidation \
|
aws cloudfront create-invalidation \
|
||||||
--distribution-id YOUR_DISTRIBUTION_ID \
|
--distribution-id "$1" \
|
||||||
--paths "/*"
|
--paths "/*"
|
||||||
|
|
|
||||||
|
|
@ -1,62 +0,0 @@
|
||||||
const tagToHeader = new Map([
|
|
||||||
["H2", "#"],
|
|
||||||
["H3", "##"],
|
|
||||||
]);
|
|
||||||
|
|
||||||
const setStyle = (h) => {
|
|
||||||
const mdHeading = document.createElement("span");
|
|
||||||
const header = tagToHeader.has(h.tagName) ? tagToHeader.get(h.tagName) : "";
|
|
||||||
mdHeading.textContent = `${header} `;
|
|
||||||
mdHeading.style.color = getTopicColor(urlToTopic());
|
|
||||||
h.prepend(mdHeading);
|
|
||||||
};
|
|
||||||
|
|
||||||
document.addEventListener("DOMContentLoaded", () => {
|
|
||||||
document.documentElement.style.setProperty(
|
|
||||||
"--topic-color",
|
|
||||||
getTopicColor(urlToTopic()),
|
|
||||||
);
|
|
||||||
|
|
||||||
document.querySelectorAll(".post-article h2").forEach(setStyle);
|
|
||||||
document.querySelectorAll(".post-article h3").forEach(setStyle);
|
|
||||||
});
|
|
||||||
|
|
||||||
document.addEventListener("DOMContentLoaded", () => {
|
|
||||||
document.querySelectorAll(".code").forEach((e) => {
|
|
||||||
e.style.display = "flex";
|
|
||||||
e.style["justify-content"] = "center";
|
|
||||||
loadCode(e);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
async function loadCode(e) {
|
|
||||||
const file = e.dataset.file;
|
|
||||||
const language = file.substring(file.lastIndexOf(".") + 1);
|
|
||||||
let [_, __, topic, post] = window.location.pathname.split("/");
|
|
||||||
post = post.substring(0, post.lastIndexOf("."));
|
|
||||||
|
|
||||||
try {
|
|
||||||
const path = `/public/code/${topic}/${post}/${file}`;
|
|
||||||
const response = await fetch(path);
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text();
|
|
||||||
throw new Error(
|
|
||||||
`Failed to fetch ${path}: ${response.status} ${response.statusText}\n${errorText}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const code = await response.text();
|
|
||||||
|
|
||||||
const pre = document.createElement("pre");
|
|
||||||
const codeElement = document.createElement("code");
|
|
||||||
codeElement.className = `language-${language}`;
|
|
||||||
codeElement.textContent = code;
|
|
||||||
|
|
||||||
pre.appendChild(codeElement);
|
|
||||||
e.appendChild(pre);
|
|
||||||
|
|
||||||
Prism.highlightElement(codeElement);
|
|
||||||
} catch (error) {
|
|
||||||
console.error(error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,696 +0,0 @@
|
||||||
function setUpParameters(render, parameters, modelPrefix) {
|
|
||||||
parameters.forEach((param) => {
|
|
||||||
const slider = document.getElementById(`slider${modelPrefix}${param}`);
|
|
||||||
slider.oninput = function () {
|
|
||||||
slider.previousElementSibling.innerText = this.value;
|
|
||||||
render();
|
|
||||||
};
|
|
||||||
});
|
|
||||||
return parameters.map((param) => {
|
|
||||||
return parseFloat(
|
|
||||||
document.getElementById(`output${modelPrefix}${param}`).textContent,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function drawSolowGraph() {
|
|
||||||
const L = 150,
|
|
||||||
K_MAX = 500,
|
|
||||||
margin = { top: 20, right: 30, bottom: 20, left: 50 };
|
|
||||||
|
|
||||||
const [A, d, s, alpha] = setUpParameters(
|
|
||||||
drawSolowGraph,
|
|
||||||
["A", "d", "s", "alpha"],
|
|
||||||
"S",
|
|
||||||
);
|
|
||||||
const solowOutput = (K) => A * Math.pow(K, alpha) * Math.pow(L, 1 - alpha);
|
|
||||||
const solowDepreciation = (K) => d * K;
|
|
||||||
const solowInvestment = (Y) => s * Y;
|
|
||||||
|
|
||||||
const container = document.getElementById("solow-visualization");
|
|
||||||
const width = container.clientWidth - margin.left - margin.right;
|
|
||||||
const height = container.clientHeight - margin.top - margin.bottom;
|
|
||||||
|
|
||||||
container.innerHTML = "";
|
|
||||||
|
|
||||||
const svg = d3
|
|
||||||
.select("#solow-visualization")
|
|
||||||
.append("svg")
|
|
||||||
.attr("width", width + margin.left + margin.right)
|
|
||||||
.attr("height", height + margin.top + margin.bottom)
|
|
||||||
.append("g")
|
|
||||||
.attr("transform", `translate(${margin.left}, ${margin.top})`);
|
|
||||||
|
|
||||||
const x = d3.scaleLinear().domain([0, K_MAX]).range([0, width]);
|
|
||||||
svg
|
|
||||||
.append("g")
|
|
||||||
.attr("transform", `translate(0, ${height})`)
|
|
||||||
.call(d3.axisBottom(x))
|
|
||||||
.append("text")
|
|
||||||
.attr("fill", "#000")
|
|
||||||
.attr("x", width + 10)
|
|
||||||
.attr("y", -10)
|
|
||||||
.style("text-anchor", "end")
|
|
||||||
.style("font-size", "1.5em")
|
|
||||||
.text("K");
|
|
||||||
|
|
||||||
const Y_MAX = solowOutput(K_MAX) + K_MAX / 10;
|
|
||||||
const y = d3.scaleLinear().domain([0, Y_MAX]).range([height, 0]);
|
|
||||||
svg
|
|
||||||
.append("g")
|
|
||||||
.call(d3.axisLeft(y))
|
|
||||||
.append("text")
|
|
||||||
.attr("fill", "#000")
|
|
||||||
.attr("x", 0)
|
|
||||||
.attr("y", -10)
|
|
||||||
.style("text-anchor", "start")
|
|
||||||
.style("font-size", "1.5em")
|
|
||||||
.text("Y");
|
|
||||||
|
|
||||||
const outputData = Array.from({ length: K_MAX }, (_, k) => ({
|
|
||||||
K: k,
|
|
||||||
Y: solowOutput(k),
|
|
||||||
}));
|
|
||||||
svg
|
|
||||||
.append("path")
|
|
||||||
.datum(outputData)
|
|
||||||
.attr("fill", "none")
|
|
||||||
.attr("stroke", getTopicColor(urlToTopic()))
|
|
||||||
.attr("stroke-width", 2)
|
|
||||||
.attr(
|
|
||||||
"d",
|
|
||||||
d3
|
|
||||||
.line()
|
|
||||||
.x((d) => x(d.K))
|
|
||||||
.y((d) => y(d.Y)),
|
|
||||||
);
|
|
||||||
svg
|
|
||||||
.append("foreignObject")
|
|
||||||
.attr("width", "2em")
|
|
||||||
.attr("height", "2em")
|
|
||||||
.attr("x", x(K_MAX))
|
|
||||||
.attr("y", y(outputData[K_MAX - 1].Y))
|
|
||||||
.append("xhtml:body")
|
|
||||||
.style("font-size", "0.75em")
|
|
||||||
.html(`\\(Y\\)`);
|
|
||||||
|
|
||||||
const depreciationData = Array.from({ length: K_MAX }, (_, k) => ({
|
|
||||||
K: k,
|
|
||||||
Y: solowDepreciation(k),
|
|
||||||
}));
|
|
||||||
svg
|
|
||||||
.append("path")
|
|
||||||
.datum(depreciationData)
|
|
||||||
.attr("fill", "none")
|
|
||||||
.attr("stroke", "red")
|
|
||||||
.attr("stroke-width", 2)
|
|
||||||
.attr(
|
|
||||||
"d",
|
|
||||||
d3
|
|
||||||
.line()
|
|
||||||
.x((d) => x(d.K))
|
|
||||||
.y((d) => y(d.Y)),
|
|
||||||
);
|
|
||||||
|
|
||||||
svg
|
|
||||||
.append("foreignObject")
|
|
||||||
.attr("width", "2em")
|
|
||||||
.attr("height", "2em")
|
|
||||||
.attr("x", x(K_MAX))
|
|
||||||
.attr("y", y(depreciationData[K_MAX - 1].Y))
|
|
||||||
.append("xhtml:body")
|
|
||||||
.style("font-size", "0.75em")
|
|
||||||
.append("xhtml:div")
|
|
||||||
.html("\\(\\bar{d}K\\)");
|
|
||||||
|
|
||||||
const investmentData = outputData.map((d) => ({
|
|
||||||
K: d.K,
|
|
||||||
Y: solowInvestment(d.Y),
|
|
||||||
}));
|
|
||||||
svg
|
|
||||||
.append("path")
|
|
||||||
.datum(investmentData)
|
|
||||||
.attr("fill", "none")
|
|
||||||
.attr("stroke", "purple")
|
|
||||||
.attr("stroke-width", 2)
|
|
||||||
.attr(
|
|
||||||
"d",
|
|
||||||
d3
|
|
||||||
.line()
|
|
||||||
.x((d) => x(d.K))
|
|
||||||
.y((d) => y(d.Y)),
|
|
||||||
);
|
|
||||||
|
|
||||||
svg
|
|
||||||
.append("foreignObject")
|
|
||||||
.attr("width", "1em")
|
|
||||||
.attr("height", "2em")
|
|
||||||
.attr("x", x(K_MAX))
|
|
||||||
.attr("y", y(investmentData[K_MAX - 1].Y))
|
|
||||||
.append("xhtml:body")
|
|
||||||
.style("font-size", "0.75em")
|
|
||||||
.html("\\(I\\)");
|
|
||||||
|
|
||||||
const k_star = L * Math.pow((s * A) / d, 1 / (1 - alpha));
|
|
||||||
svg
|
|
||||||
.append("line")
|
|
||||||
.attr("x1", x(k_star))
|
|
||||||
.attr("y1", y((d * k_star) / s))
|
|
||||||
.attr("x2", x(k_star))
|
|
||||||
.attr("y2", y(0))
|
|
||||||
.attr("stroke", "black")
|
|
||||||
.attr("stroke-width", 1)
|
|
||||||
.attr("stroke-dasharray", "5,5");
|
|
||||||
|
|
||||||
const y_star = solowOutput(k_star);
|
|
||||||
svg
|
|
||||||
.append("foreignObject")
|
|
||||||
.attr("width", "20em")
|
|
||||||
.attr("height", "2em")
|
|
||||||
.attr("x", x(k_star) - 40)
|
|
||||||
.attr("y", y(y_star) - 40)
|
|
||||||
.append("xhtml:body")
|
|
||||||
.style("font-size", "0.75em")
|
|
||||||
.html(`(${k_star.toFixed(0)}, ${y_star.toFixed(0)})`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const formatNumber = (num) => {
|
|
||||||
return `~${num.toExponential(0)}`;
|
|
||||||
};
|
|
||||||
|
|
||||||
const normalFont = `style="font-weight: normal"`;
|
|
||||||
|
|
||||||
const updateRomerTable = (romerData) => {
|
|
||||||
const tableHeader = document.getElementById("romer-table-header");
|
|
||||||
const rowA_t = document.getElementById("row-A_t");
|
|
||||||
const rowY_t = document.getElementById("row-Y_t");
|
|
||||||
|
|
||||||
tableHeader.innerHTML = `<th ${normalFont}>t</th>`;
|
|
||||||
rowA_t.innerHTML = `<td class="romer-table-at">A_t</td>`;
|
|
||||||
rowY_t.innerHTML = `<td class="romer-table-yt">Y_t</td>`;
|
|
||||||
|
|
||||||
romerData.forEach((d) => {
|
|
||||||
if (d.year % 20 === 0 || d.year === 1) {
|
|
||||||
tableHeader.innerHTML += `<th ${normalFont}>${d.year}</th>`;
|
|
||||||
rowA_t.innerHTML += `<td>${formatNumber(d.A)}</td>`;
|
|
||||||
rowY_t.innerHTML += `<td>${formatNumber(d.Y)}</td>`;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
function drawRomerGraph() {
|
|
||||||
const T_MAX = 100,
|
|
||||||
margin = { top: 20, right: 100, bottom: 20, left: 50 };
|
|
||||||
|
|
||||||
const [z, L, l, A0] = setUpParameters(
|
|
||||||
drawRomerGraph,
|
|
||||||
["z", "L", "l", "A0"],
|
|
||||||
"R",
|
|
||||||
);
|
|
||||||
|
|
||||||
const container = document.getElementById("romer-visualization");
|
|
||||||
const width = container.clientWidth - margin.left - margin.right;
|
|
||||||
const height = container.clientHeight - margin.top - margin.bottom;
|
|
||||||
|
|
||||||
container.innerHTML = "";
|
|
||||||
|
|
||||||
const svg = d3
|
|
||||||
.select("#romer-visualization")
|
|
||||||
.append("svg")
|
|
||||||
.attr("width", width + margin.left + margin.right)
|
|
||||||
.attr("height", height + margin.top + margin.bottom)
|
|
||||||
.append("g")
|
|
||||||
.attr("transform", `translate(${margin.left}, ${margin.top})`);
|
|
||||||
|
|
||||||
let A = A0;
|
|
||||||
const romerData = [];
|
|
||||||
|
|
||||||
for (let t = 1; t <= T_MAX; ++t) {
|
|
||||||
const A_t = A * (1 + z * l * L);
|
|
||||||
const Y_t = A_t * (1 - l) * L;
|
|
||||||
romerData.push({ year: t, A: A_t, Y: Math.log10(Y_t) });
|
|
||||||
A = A_t;
|
|
||||||
}
|
|
||||||
|
|
||||||
const x = d3.scaleLinear().domain([1, T_MAX]).range([0, width]);
|
|
||||||
svg
|
|
||||||
.append("g")
|
|
||||||
.attr("transform", `translate(0, ${height})`)
|
|
||||||
.call(d3.axisBottom(x))
|
|
||||||
.append("text")
|
|
||||||
.attr("fill", "#000")
|
|
||||||
.attr("x", width + 10)
|
|
||||||
.attr("y", -10)
|
|
||||||
.style("text-anchor", "end")
|
|
||||||
.style("font-size", "1.5em")
|
|
||||||
.text("t");
|
|
||||||
|
|
||||||
const y = d3
|
|
||||||
.scaleLinear()
|
|
||||||
.domain([0, romerData[romerData.length - 1].Y])
|
|
||||||
.range([height, 0]);
|
|
||||||
svg
|
|
||||||
.append("g")
|
|
||||||
.call(d3.axisLeft(y).ticks(10, d3.format(".1s")))
|
|
||||||
.append("text")
|
|
||||||
.attr("fill", "#000")
|
|
||||||
.attr("x", 0)
|
|
||||||
.attr("y", -10)
|
|
||||||
.style("text-anchor", "start")
|
|
||||||
.style("font-size", "1.5em")
|
|
||||||
.text("log(Y)");
|
|
||||||
|
|
||||||
svg
|
|
||||||
.append("path")
|
|
||||||
.datum(romerData)
|
|
||||||
.attr("fill", "none")
|
|
||||||
.attr("stroke", getTopicColor(urlToTopic()))
|
|
||||||
.attr("stroke-width", 2)
|
|
||||||
.attr(
|
|
||||||
"d",
|
|
||||||
d3
|
|
||||||
.line()
|
|
||||||
.x((d) => x(d.year))
|
|
||||||
.y((d) => y(d.Y)),
|
|
||||||
);
|
|
||||||
|
|
||||||
svg
|
|
||||||
.append("foreignObject")
|
|
||||||
.attr("width", "4em")
|
|
||||||
.attr("height", "2em")
|
|
||||||
.attr("x", x(T_MAX))
|
|
||||||
.attr("y", y(romerData[T_MAX - 1].Y))
|
|
||||||
.append("xhtml:body")
|
|
||||||
.style("font-size", "0.75em")
|
|
||||||
.html(`\\(log_{10}Y\\)`);
|
|
||||||
|
|
||||||
updateRomerTable(romerData);
|
|
||||||
}
|
|
||||||
|
|
||||||
function drawRomerlGraph() {
|
|
||||||
const T_MAX = 100,
|
|
||||||
z = 0.01,
|
|
||||||
L = 50,
|
|
||||||
A0 = 50,
|
|
||||||
margin = { top: 20, right: 100, bottom: 20, left: 50 };
|
|
||||||
|
|
||||||
const [l, t0] = setUpParameters(drawRomerlGraph, ["lChange", "t0"], "");
|
|
||||||
|
|
||||||
const container = document.getElementById("romer-lchange-visualization");
|
|
||||||
const width = container.clientWidth - margin.left - margin.right;
|
|
||||||
const height = container.clientHeight - margin.top - margin.bottom;
|
|
||||||
|
|
||||||
container.innerHTML = "";
|
|
||||||
|
|
||||||
const svg = d3
|
|
||||||
.select("#romer-lchange-visualization")
|
|
||||||
.append("svg")
|
|
||||||
.attr("width", width + margin.left + margin.right)
|
|
||||||
.attr("height", height + margin.top + margin.bottom)
|
|
||||||
.append("g")
|
|
||||||
.attr("transform", `translate(${margin.left}, ${margin.top})`);
|
|
||||||
|
|
||||||
let A = A0,
|
|
||||||
l_ = 0.1;
|
|
||||||
const romerData = [];
|
|
||||||
|
|
||||||
for (let t = 1; t <= t0; ++t) {
|
|
||||||
const A_t = A * (1 + z * l_ * L);
|
|
||||||
const Y_t = A_t * (1 - l_) * L;
|
|
||||||
romerData.push({ year: t, A: A_t, Y: Math.log10(Y_t) });
|
|
||||||
A = A_t;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let t = t0 + 1; t <= T_MAX; ++t) {
|
|
||||||
const A_t = A * (1 + z * l * L);
|
|
||||||
const Y_t = A_t * (1 - l) * L;
|
|
||||||
romerData.push({ year: t, A: A_t, Y: Math.log10(Y_t) });
|
|
||||||
A = A_t;
|
|
||||||
}
|
|
||||||
|
|
||||||
const x = d3.scaleLinear().domain([1, T_MAX]).range([0, width]);
|
|
||||||
svg
|
|
||||||
.append("g")
|
|
||||||
.attr("transform", `translate(0, ${height})`)
|
|
||||||
.call(d3.axisBottom(x))
|
|
||||||
.append("text")
|
|
||||||
.attr("fill", "#000")
|
|
||||||
.attr("x", width + 10)
|
|
||||||
.attr("y", -10)
|
|
||||||
.style("text-anchor", "end")
|
|
||||||
.style("font-size", "1.5em")
|
|
||||||
.text("t");
|
|
||||||
|
|
||||||
const y = d3
|
|
||||||
.scaleLinear()
|
|
||||||
.domain([0, romerData[romerData.length - 1].Y])
|
|
||||||
.range([height, 0]);
|
|
||||||
svg
|
|
||||||
.append("g")
|
|
||||||
.call(d3.axisLeft(y).ticks(10, d3.format(".1s")))
|
|
||||||
.append("text")
|
|
||||||
.attr("fill", "#000")
|
|
||||||
.attr("x", 0)
|
|
||||||
.attr("y", -10)
|
|
||||||
.style("text-anchor", "start")
|
|
||||||
.style("font-size", "1.5em")
|
|
||||||
.text("log(Y)");
|
|
||||||
|
|
||||||
svg
|
|
||||||
.append("path")
|
|
||||||
.datum(romerData)
|
|
||||||
.attr("fill", "none")
|
|
||||||
.attr("stroke", getTopicColor(urlToTopic()))
|
|
||||||
.attr("stroke-width", 2)
|
|
||||||
.attr(
|
|
||||||
"d",
|
|
||||||
d3
|
|
||||||
.line()
|
|
||||||
.x((d) => x(d.year))
|
|
||||||
.y((d) => y(d.Y)),
|
|
||||||
);
|
|
||||||
|
|
||||||
svg
|
|
||||||
.append("line")
|
|
||||||
.attr("x1", x(t0))
|
|
||||||
.attr("y1", y(romerData[T_MAX - 1].Y))
|
|
||||||
.attr("x2", x(t0))
|
|
||||||
.attr("y2", height)
|
|
||||||
.attr("stroke", "black")
|
|
||||||
.attr("stroke-width", 1)
|
|
||||||
.attr("stroke-dasharray", "4");
|
|
||||||
|
|
||||||
svg
|
|
||||||
.append("foreignObject")
|
|
||||||
.attr("width", "5em")
|
|
||||||
.attr("height", "2em")
|
|
||||||
.attr("x", x(0) + 15)
|
|
||||||
.attr("y", y(romerData[0].Y))
|
|
||||||
.append("xhtml:body")
|
|
||||||
.style("font-size", "0.6em")
|
|
||||||
.html(`\\(\\bar{l}_0=${l_}\\)`);
|
|
||||||
|
|
||||||
svg
|
|
||||||
.append("foreignObject")
|
|
||||||
.attr("width", "5em")
|
|
||||||
.attr("height", "2em")
|
|
||||||
.attr("x", x(t0) + 15)
|
|
||||||
.attr("y", y(romerData[t0].Y))
|
|
||||||
.append("xhtml:body")
|
|
||||||
.style("font-size", "0.6em")
|
|
||||||
.html(`\\(\\bar{l}_1=${l}\\)`);
|
|
||||||
|
|
||||||
svg
|
|
||||||
.append("foreignObject")
|
|
||||||
.attr("width", "4em")
|
|
||||||
.attr("height", "2em")
|
|
||||||
.attr("x", x(T_MAX))
|
|
||||||
.attr("y", y(romerData[T_MAX - 1].Y))
|
|
||||||
.append("xhtml:body")
|
|
||||||
.style("font-size", "0.75em")
|
|
||||||
.html(`\\(log_{10}Y\\)`);
|
|
||||||
}
|
|
||||||
|
|
||||||
function calculateRomerSolowData(
|
|
||||||
T_MAX,
|
|
||||||
L,
|
|
||||||
l,
|
|
||||||
A0,
|
|
||||||
alpha,
|
|
||||||
s,
|
|
||||||
d,
|
|
||||||
z,
|
|
||||||
t0 = Infinity,
|
|
||||||
L0,
|
|
||||||
l0,
|
|
||||||
alpha0,
|
|
||||||
z0,
|
|
||||||
) {
|
|
||||||
let A = A0,
|
|
||||||
K_t = 1,
|
|
||||||
romerSolowData = [];
|
|
||||||
|
|
||||||
for (let t = 1; t <= T_MAX; ++t) {
|
|
||||||
if (t > t0) {
|
|
||||||
alpha = alpha0;
|
|
||||||
z = z0;
|
|
||||||
l = l0;
|
|
||||||
L = L0;
|
|
||||||
}
|
|
||||||
|
|
||||||
const Y_t = A * Math.pow(K_t, alpha) * Math.pow((1 - l) * L, 1 - alpha);
|
|
||||||
const A_t = A * (1 + z * l * L);
|
|
||||||
K_t = K_t + s * Y_t - d * K_t;
|
|
||||||
romerSolowData.push({ year: t, A: A_t, K: K_t, Y: Math.log10(Y_t) });
|
|
||||||
A = A_t;
|
|
||||||
}
|
|
||||||
|
|
||||||
return romerSolowData;
|
|
||||||
}
|
|
||||||
|
|
||||||
function drawRomerSolowGraph() {
|
|
||||||
const T_MAX = 100,
|
|
||||||
margin = { top: 20, right: 100, bottom: 20, left: 50 };
|
|
||||||
|
|
||||||
const [z, l, L, A0, s, d, alpha] = setUpParameters(
|
|
||||||
drawRomerSolowGraph,
|
|
||||||
["z", "l", "L", "A0", "s", "d", "alpha"],
|
|
||||||
"RS",
|
|
||||||
);
|
|
||||||
|
|
||||||
const container = document.getElementById("romer-solow-visualization");
|
|
||||||
const width = container.clientWidth - margin.left - margin.right;
|
|
||||||
const height = container.clientHeight - margin.top - margin.bottom;
|
|
||||||
|
|
||||||
container.innerHTML = "";
|
|
||||||
|
|
||||||
const svg = d3
|
|
||||||
.select("#romer-solow-visualization")
|
|
||||||
.append("svg")
|
|
||||||
.attr("width", width + margin.left + margin.right)
|
|
||||||
.attr("height", height + margin.top + margin.bottom)
|
|
||||||
.append("g")
|
|
||||||
.attr("transform", `translate(${margin.left}, ${margin.top})`);
|
|
||||||
|
|
||||||
const romerSolowData = calculateRomerSolowData(
|
|
||||||
T_MAX,
|
|
||||||
L,
|
|
||||||
l,
|
|
||||||
A0,
|
|
||||||
alpha,
|
|
||||||
s,
|
|
||||||
d,
|
|
||||||
z,
|
|
||||||
);
|
|
||||||
|
|
||||||
const x = d3.scaleLinear().domain([1, T_MAX]).range([0, width]);
|
|
||||||
svg
|
|
||||||
.append("g")
|
|
||||||
.attr("transform", `translate(0, ${height})`)
|
|
||||||
.call(d3.axisBottom(x))
|
|
||||||
.append("text")
|
|
||||||
.attr("fill", "#000")
|
|
||||||
.attr("x", width + 10)
|
|
||||||
.attr("y", -10)
|
|
||||||
.style("text-anchor", "end")
|
|
||||||
.style("font-size", "1.5em")
|
|
||||||
.text("t");
|
|
||||||
|
|
||||||
const y = d3
|
|
||||||
.scaleLinear()
|
|
||||||
.domain([0, romerSolowData[romerSolowData.length - 1].Y])
|
|
||||||
.range([height, 0]);
|
|
||||||
svg
|
|
||||||
.append("g")
|
|
||||||
.call(d3.axisLeft(y).ticks(10, d3.format(".1s")))
|
|
||||||
.append("text")
|
|
||||||
.attr("fill", "#000")
|
|
||||||
.attr("x", 0)
|
|
||||||
.attr("y", -10)
|
|
||||||
.style("text-anchor", "start")
|
|
||||||
.style("font-size", "1.5em")
|
|
||||||
.text("log(Y)");
|
|
||||||
|
|
||||||
svg
|
|
||||||
.append("path")
|
|
||||||
.datum(romerSolowData)
|
|
||||||
.attr("fill", "none")
|
|
||||||
.attr("stroke", getTopicColor(urlToTopic()))
|
|
||||||
.attr("stroke-width", 2)
|
|
||||||
.attr(
|
|
||||||
"d",
|
|
||||||
d3
|
|
||||||
.line()
|
|
||||||
.x((d) => x(d.year))
|
|
||||||
.y((d) => y(d.Y)),
|
|
||||||
);
|
|
||||||
|
|
||||||
svg
|
|
||||||
.append("foreignObject")
|
|
||||||
.attr("width", "4em")
|
|
||||||
.attr("height", "2em")
|
|
||||||
.attr("x", x(T_MAX))
|
|
||||||
.attr("y", y(romerSolowData[T_MAX - 1].Y))
|
|
||||||
.append("xhtml:body")
|
|
||||||
.style("font-size", "0.75em")
|
|
||||||
.html(`\\(log_{10}Y\\)`);
|
|
||||||
}
|
|
||||||
|
|
||||||
function drawRomerSolowChangeGraph() {
|
|
||||||
const T_MAX = 100,
|
|
||||||
margin = { top: 20, right: 100, bottom: 20, left: 50 },
|
|
||||||
s = 0.2,
|
|
||||||
d = 0.2,
|
|
||||||
A0 = 50,
|
|
||||||
alpha = 0.33,
|
|
||||||
l = 0.5,
|
|
||||||
L = 100,
|
|
||||||
z = 0.5;
|
|
||||||
|
|
||||||
const [z0, l0, L0, alpha0, t0] = setUpParameters(
|
|
||||||
drawRomerSolowChangeGraph,
|
|
||||||
["z0", "l0", "L0", "alpha0", "t0"],
|
|
||||||
"RSC",
|
|
||||||
);
|
|
||||||
|
|
||||||
const container = document.getElementById("romer-solow-change-visualization");
|
|
||||||
const width = container.clientWidth - margin.left - margin.right;
|
|
||||||
const height = container.clientHeight - margin.top - margin.bottom;
|
|
||||||
|
|
||||||
container.innerHTML = "";
|
|
||||||
|
|
||||||
const svg = d3
|
|
||||||
.select("#romer-solow-change-visualization")
|
|
||||||
.append("svg")
|
|
||||||
.attr("width", width + margin.left + margin.right)
|
|
||||||
.attr("height", height + margin.top + margin.bottom)
|
|
||||||
.append("g")
|
|
||||||
.attr("transform", `translate(${margin.left}, ${margin.top})`);
|
|
||||||
|
|
||||||
const romerSolowData = calculateRomerSolowData(
|
|
||||||
T_MAX,
|
|
||||||
L,
|
|
||||||
l,
|
|
||||||
A0,
|
|
||||||
alpha,
|
|
||||||
s,
|
|
||||||
d,
|
|
||||||
z,
|
|
||||||
t0,
|
|
||||||
L0,
|
|
||||||
l0,
|
|
||||||
alpha0,
|
|
||||||
z0,
|
|
||||||
);
|
|
||||||
|
|
||||||
const x = d3.scaleLinear().domain([1, T_MAX]).range([0, width]);
|
|
||||||
svg
|
|
||||||
.append("g")
|
|
||||||
.attr("transform", `translate(0, ${height})`)
|
|
||||||
.call(d3.axisBottom(x))
|
|
||||||
.append("text")
|
|
||||||
.attr("fill", "#000")
|
|
||||||
.attr("x", width + 10)
|
|
||||||
.attr("y", -10)
|
|
||||||
.style("text-anchor", "end")
|
|
||||||
.style("font-size", "1.5em")
|
|
||||||
.text("t");
|
|
||||||
|
|
||||||
const y = d3
|
|
||||||
.scaleLinear()
|
|
||||||
.domain([0, romerSolowData[romerSolowData.length - 1].Y])
|
|
||||||
.range([height, 0]);
|
|
||||||
svg
|
|
||||||
.append("g")
|
|
||||||
.call(d3.axisLeft(y).ticks(10, d3.format(".1s")))
|
|
||||||
.append("text")
|
|
||||||
.attr("fill", "#000")
|
|
||||||
.attr("x", 0)
|
|
||||||
.attr("y", -10)
|
|
||||||
.style("text-anchor", "start")
|
|
||||||
.style("font-size", "1.5em")
|
|
||||||
.text("log(Y)");
|
|
||||||
|
|
||||||
svg
|
|
||||||
.append("path")
|
|
||||||
.datum(romerSolowData)
|
|
||||||
.attr("fill", "none")
|
|
||||||
.attr("stroke", getTopicColor(urlToTopic()))
|
|
||||||
.attr("stroke-width", 2)
|
|
||||||
.attr(
|
|
||||||
"d",
|
|
||||||
d3
|
|
||||||
.line()
|
|
||||||
.x((d) => x(d.year))
|
|
||||||
.y((d) => y(d.Y)),
|
|
||||||
);
|
|
||||||
|
|
||||||
svg
|
|
||||||
.append("line")
|
|
||||||
.attr("x1", x(t0))
|
|
||||||
.attr("y1", y(romerSolowData[T_MAX - 1].Y))
|
|
||||||
.attr("x2", x(t0))
|
|
||||||
.attr("y2", height)
|
|
||||||
.attr("stroke", "black")
|
|
||||||
.attr("stroke-width", 1)
|
|
||||||
.attr("stroke-dasharray", "4");
|
|
||||||
|
|
||||||
svg
|
|
||||||
.append("foreignObject")
|
|
||||||
.attr("width", "4em")
|
|
||||||
.attr("height", "2em")
|
|
||||||
.attr("x", x(T_MAX))
|
|
||||||
.attr("y", y(romerSolowData[T_MAX - 1].Y))
|
|
||||||
.append("xhtml:body")
|
|
||||||
.style("font-size", "0.75em")
|
|
||||||
.html(`\\(log_{10}Y\\)`);
|
|
||||||
}
|
|
||||||
|
|
||||||
document.addEventListener("DOMContentLoaded", function () {
|
|
||||||
drawSolowGraph();
|
|
||||||
window.onresize = drawSolowGraph;
|
|
||||||
|
|
||||||
drawRomerGraph();
|
|
||||||
window.onresize = drawRomerGraph;
|
|
||||||
|
|
||||||
drawRomerlGraph();
|
|
||||||
window.onresize = drawRomerlGraph;
|
|
||||||
|
|
||||||
drawRomerSolowGraph();
|
|
||||||
window.onresize = drawRomerSolowGraph;
|
|
||||||
|
|
||||||
drawRomerSolowChangeGraph();
|
|
||||||
window.onresize = drawRomerSolowChangeGraph();
|
|
||||||
});
|
|
||||||
|
|
||||||
document.addEventListener("DOMContentLoaded", function () {
|
|
||||||
// wait for mathjax
|
|
||||||
if (typeof MathJax !== "undefined") {
|
|
||||||
MathJax.typeset();
|
|
||||||
initSliderEvents();
|
|
||||||
} else {
|
|
||||||
window.MathJax = {
|
|
||||||
startup: {
|
|
||||||
pageReady: function () {
|
|
||||||
return MathJax.startup.defaultPageReady().then(function () {
|
|
||||||
initSliderEvents();
|
|
||||||
});
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function initSliderEvents() {
|
|
||||||
document.querySelectorAll(".sliders").forEach((slidersDiv) => {
|
|
||||||
slidersDiv.addEventListener("input", function (event) {
|
|
||||||
const graphDiv = slidersDiv.previousElementSibling;
|
|
||||||
if (graphDiv && graphDiv.querySelector("svg")) {
|
|
||||||
const svg = graphDiv.querySelector("svg");
|
|
||||||
svg.querySelectorAll("foreignObject body").forEach((body) => {
|
|
||||||
MathJax.typesetPromise([body]);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
31
src/components/Footer.astro
Normal file
31
src/components/Footer.astro
Normal file
|
|
@ -0,0 +1,31 @@
|
||||||
|
---
|
||||||
|
---
|
||||||
|
|
||||||
|
<footer>
|
||||||
|
<span class="greek-delta">Δ</span>
|
||||||
|
<div class="footer-links">
|
||||||
|
<a target="_blank" href="https://git.barrettruth.com">git</a>
|
||||||
|
<a target="_blank" href="https://www.linkedin.com/in/barrett-ruth/">linkedin</a>
|
||||||
|
<a target="_blank" href="mailto:br.barrettruth@gmail.com">email</a>
|
||||||
|
</div>
|
||||||
|
</footer>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
footer {
|
||||||
|
padding: 20px;
|
||||||
|
font-size: 1.5em;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
}
|
||||||
|
|
||||||
|
.greek-delta {
|
||||||
|
font-family: "Times New Roman", Times, serif;
|
||||||
|
font-size: 1.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footer-links a {
|
||||||
|
margin-left: 25px;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
113
src/components/Header.astro
Normal file
113
src/components/Header.astro
Normal file
|
|
@ -0,0 +1,113 @@
|
||||||
|
---
|
||||||
|
const path = Astro.url.pathname;
|
||||||
|
const isHome = path === "/" || path === "/index.html";
|
||||||
|
|
||||||
|
// Determine topic from path
|
||||||
|
function getTopic() {
|
||||||
|
const pathname = path.split("/");
|
||||||
|
|
||||||
|
if (pathname.length === 2 && pathname[1].endsWith(".html")) {
|
||||||
|
return "/" + pathname[1].replace(".html", "");
|
||||||
|
} else if (pathname.length >= 3) {
|
||||||
|
return "/" + pathname.slice(2, -1).join("/").replace(".html", "");
|
||||||
|
}
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
const topic = getTopic();
|
||||||
|
const promptText = topic ? `barrett@ruth:~$ ${topic}` : "barrett@ruth:~$";
|
||||||
|
---
|
||||||
|
|
||||||
|
<header>
|
||||||
|
<a href="/" style="text-decoration: none; color: inherit">
|
||||||
|
<div class="terminal-container">
|
||||||
|
<span class="terminal-prompt">{promptText}</span>
|
||||||
|
<span class="terminal-cursor"></span>
|
||||||
|
</div>
|
||||||
|
</a>
|
||||||
|
<div class="header-links">
|
||||||
|
<a target="_blank" href="/resume.pdf">resume</a>
|
||||||
|
<a target="_blank" href="/transcript.pdf">transcript</a>
|
||||||
|
<a href="/about">about</a>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
// Terminal functionality
|
||||||
|
const TERMINAL_PROMPT = "barrett@ruth:~$ ";
|
||||||
|
let clearing = false;
|
||||||
|
|
||||||
|
// Clear the terminal prompt with animation
|
||||||
|
function clearPrompt(delay, callback) {
|
||||||
|
if (clearing) return;
|
||||||
|
clearing = true;
|
||||||
|
|
||||||
|
const terminalPrompt = document.querySelector(".terminal-prompt");
|
||||||
|
if (!terminalPrompt) {
|
||||||
|
clearing = false;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const topicLength = terminalPrompt.innerHTML.length - TERMINAL_PROMPT.length;
|
||||||
|
let i = 0;
|
||||||
|
|
||||||
|
function removeChar() {
|
||||||
|
if (i++ < topicLength) {
|
||||||
|
terminalPrompt.textContent = terminalPrompt.textContent.slice(0, -1);
|
||||||
|
setTimeout(removeChar, delay / topicLength);
|
||||||
|
} else {
|
||||||
|
i = 0;
|
||||||
|
clearing = false;
|
||||||
|
callback && callback();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
removeChar();
|
||||||
|
}
|
||||||
|
|
||||||
|
function goHome(e) {
|
||||||
|
e.preventDefault();
|
||||||
|
clearPrompt(500, () => (window.location.href = "/"));
|
||||||
|
}
|
||||||
|
|
||||||
|
document.addEventListener('DOMContentLoaded', () => {
|
||||||
|
window.TERMINAL_PROMPT = TERMINAL_PROMPT;
|
||||||
|
window.clearPrompt = clearPrompt;
|
||||||
|
window.goHome = goHome;
|
||||||
|
|
||||||
|
const homeLink = document.querySelector('header a[href="/"]');
|
||||||
|
if (homeLink) {
|
||||||
|
const path = window.location.pathname;
|
||||||
|
const isHome = path === "/" || path === "/index.html";
|
||||||
|
|
||||||
|
if (isHome) {
|
||||||
|
homeLink.addEventListener('click', (e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
const topics = document.querySelectorAll(".topic a");
|
||||||
|
topics.forEach((topic) => {
|
||||||
|
topic.classList.remove("active");
|
||||||
|
topic.style.color = "";
|
||||||
|
});
|
||||||
|
document.getElementById("posts").innerHTML = "";
|
||||||
|
clearPrompt(500);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
homeLink.addEventListener('click', goHome);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
header {
|
||||||
|
padding: 20px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-links a {
|
||||||
|
margin-left: 25px;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
15
src/content/config.ts
Normal file
15
src/content/config.ts
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
import { defineCollection, z } from "astro:content";
|
||||||
|
|
||||||
|
const postsCollection = defineCollection({
|
||||||
|
type: "content",
|
||||||
|
schema: z.object({
|
||||||
|
title: z.string(),
|
||||||
|
description: z.string().optional(),
|
||||||
|
date: z.string().optional(),
|
||||||
|
useKatex: z.boolean().optional().default(false),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const collections = {
|
||||||
|
posts: postsCollection,
|
||||||
|
};
|
||||||
337
src/content/posts/algorithms/competitive-programming-log.mdx
Normal file
337
src/content/posts/algorithms/competitive-programming-log.mdx
Normal file
|
|
@ -0,0 +1,337 @@
|
||||||
|
---
|
||||||
|
title: "competitive programming log"
|
||||||
|
date: "2025-05-14"
|
||||||
|
useKatex: true
|
||||||
|
---
|
||||||
|
|
||||||
|
## [903 (div. 3)](https://usaco.guide/bronze/intro-sorting) 14/05/2025
|
||||||
|
|
||||||
|
First contest in a while. Implementation lacked heavily but solving harder problems made easier problems easier.
|
||||||
|
|
||||||
|
- A: rust immediately showed with the brute force. Since $n\cdot m\leq 25$, at most $\lceil\log_2(25)\rceil=5$ concatenations need to be made. **Slow down and consider constrains**.
|
||||||
|
- B: Similarly, mathematical insight and just *playing with parameters*/quantifying the effects of operations is important. **Test your conjectures, they may be right/helpful** (ie. "I must maintain the shortest length thread").
|
||||||
|
- C: implementation weak. Simplify, step back, simplify, continuously. I stopped considering altering the grid and used a pair of coordinates but deriving the others inline, _if you trust your mathematics_, is way simpler.
|
||||||
|
- D: formalize your answer better. Understand prime factorization more. Improve/memorize asymptotic bounds of factoring and prime counting. Don't overcomplicate the problem—here, I erroneously thought it was asking for minimum operations, not possibility. In reality, all this problem asks is: "Are the total number of each factor greater than one divisible by $n$?"
|
||||||
|
- E: dp rust. Simplify your thought process—look back into SRTBOT (i.e. define the subproblem). If the subproblems depend rightward, iterate right to left. That simple.
|
||||||
|
- F: coming back for you!
|
||||||
|
|
||||||
|
I must heed the advice of Colin Galen. I rush trivial problems because they're boring, then forget an edge case. I get overwhelmed by hard problems because I don't know how to think. I've been working on my discipline and thinking but I need to improve a lot.
|
||||||
|
|
||||||
|
These problems are (mostly) easy but they help me prefer convenient implementation. It is time to be done with bronze.
|
||||||
|
|
||||||
|
Implementation, loops, invariants. I still struggle to get a grasp.I struggled with [this problem](https://usaco.org/index.php?page=viewproblem2&cpid=592) just figuring out how to do the loops and invariants. I do a loop of "this works"$\rightarrow$ "let's simplify" $\rightarrow$ "that doesn't work" $\rightarrow ...$ over and over again... _even when the right solution comes across my mind_. In this case, choosing an invariant was just overwhelming. **The simplest correct solution is always the right one**.
|
||||||
|
|
||||||
|
## [usaco bronze: complete search](https://usaco.guide/bronze/intro-complete) 04/27/2025
|
||||||
|
|
||||||
|
Implementation wise, two-pointers boundary conditions trip me up. Spend more time on problems statements and go for the easiest solution.
|
||||||
|
|
||||||
|
cowntact tracing was the absolute worst. I misinterpreted the problem constraints (i.e. thought hoof shakes in last $k$ time units, not last $k$ shakes) then failed to identify _all_ parts of my code that had erroneous logic. Then I did this 3 times over.
|
||||||
|
|
||||||
|
Once a crux observation/error is realized, reexamine all parts of the code/plan and see how they're impacted.
|
||||||
|
|
||||||
|
## [usaco bronze: simulation](https://usaco.guide/bronze/simulation) 26/04/2025
|
||||||
|
|
||||||
|
These problems are relatively easy but exposed my poor implementation abilities. A lack of implementation implies a lack of understanding. Specifically, I encountered the following problems:
|
||||||
|
|
||||||
|
Base cases and calculations—initializing state and transitions along with exit conditions. Oftentimes I simply _misinterpret_ the simulation (**take your time**), fail to outline specific criteria, and don't walk through edge cases. For example, [speeding ticket](https://usaco.org/index.php?page=viewproblem2&cpid=568) took me forever (>20 minutes) to model the ideal $O(n)$ solution. Initial states and base cases took me forever, when I should've just modeled the transitions explicitly, considering start/end states separately rather than getting overwhelmed by everything at once. Overcomplicating/wasting time on solutions that aren't necessitated by lax constraints. I further get confused (although my black-box ability is improving) on problems with many moving parts.
|
||||||
|
|
||||||
|
## [1020 (div. 3)](https://codeforces.com/contest/2106) 25/04/2025
|
||||||
|
|
||||||
|
Unfortunately, the moment this contest started I'd already given up. In contest my main weakness is that I just rush and panic. If my next contest results in me wasting 2 hours I'm going to take a step back and just do intentional practice.
|
||||||
|
|
||||||
|
I learned nothing. I didn't read problem statements nor prove my answers.
|
||||||
|
|
||||||
|
## [970 (div. 3)](https://codeforces.com/contest/2008) 21/04/2025
|
||||||
|
|
||||||
|
~1450 performance. OK contest, not very exhilirating. Hedonistic treadmill at work with me solving A-F on Div. 3 when a 2 weeks ago I couldn't even do that on Div. 4. Definitely need to upsolve this to improve my ability at expressing ideas simply.
|
||||||
|
|
||||||
|
From now on, prioritize actually _learning_ and problem-solving in a pressurized format (we all die eventually, everything is under pressure).
|
||||||
|
|
||||||
|
- Never report rating in these logs
|
||||||
|
- Implement all dsa from scratch (modular arithmetic, data structures, etc.)
|
||||||
|
|
||||||
|
- A: somehow got a wrong answer. unbelievable, brute forced it. math is poor. why reason/be ensure when you can brute force?
|
||||||
|
- B: didn't outline a simple strategy and got cooked. For example, I didn't check that the grid was a square after redoing my implementation for the third time.
|
||||||
|
- C: trivial
|
||||||
|
- D: tried a dsu approach before realizing the graph is not direction. DP + DFS + DSU weakness all combined to confuse me. In retrospect, just think about solving the problem in one way. Model it as a graph, do tortoise and hare, or do DP. **One approach at a time**. I tangibly need to improve my reasoning on harder problems when there are many moving parts. However, I'm unsure of how to move forward here because I'm fighting an uphill battle against a) my basic understanding of algorithms and b) my actual reasoning ability. I need to improve both the basics and my ability to think when facing _new_ problems/frameworks—that's the sign of a good problem-solver.
|
||||||
|
- E: realized the greedy approach + pref/post-fix greedy on deletion but implementation absolutely annahilated me here. Further, **my exchange argument/greedy proofs are nightmarishly bad** and I resulted in just "trusting" the two-max greedy approach. This also harms my ability/intuition for dp/not doing dp. Work on this.
|
||||||
|
- F: easy, counting pairs and modular arithmetic. However, I had to copy the modular arithmetic online. My grasp of this is still not good enough because I haven't practiced enough number theory.
|
||||||
|
|
||||||
|
## [937 (div. 4)](https://codeforces.com/contest/2094) 16/04/16/2025
|
||||||
|
|
||||||
|
~1300 performance. Consistently finishing E, getting F occasionally.
|
||||||
|
|
||||||
|
- B: took me way too long because I just wanted to code. Wasted 5 minutes when I know better—patience and a formula got me there in 30 seconds.
|
||||||
|
|
||||||
|
> Impatience is holding me back. Lack of discipline and focus is holding me back. It doesn't just harm my ranking problem-solving ability but it _prevents me from improving_.
|
||||||
|
|
||||||
|
- C: submitted without rigorously checking all edge cases.
|
||||||
|
- D: misunderstood the problem statement twice. Polya and Zeitz both advise to fully understand exactly what the problem statement is asking, whether through direct analysis or examples. Then, I messed up the brute force. However, I did notice that raw recursion would not TLE based on the fact that each number can be divided in only a few ways.
|
||||||
|
- E: I knew a number didn't have that many divisors but the implementation took me too long. **Pay closer attention to the constraints and just solve the problem.**. Spend a bit more time on implementation, _even if you know a way that works_. Here, I knew an approach to code but it was easily error-prone. Immediately came up with the idea but was unable to express it in code. This means I did not fully understand the problem, namely with some core mistakes:1. The tree it not necessarily binary
|
||||||
|
- C's cannot have children, so tracking depth/available nodes to fill per level is crucial
|
||||||
|
Coming up with the expression $a=c+1$ (and ensuring the input conforms to that) is a lot easier than cusotm coding logic to ensure levels are filled out properly. The core problem is that I lack the mathematical prowess to be certain of what exactly I need to check after I make that assertion and why. In this case, it means that the number of C's is appropriate, _so I never even need to check them_—I just need to make sure that the rest of the solution is valid.
|
||||||
|
|
||||||
|
## [1017 (div. 4)](https://codeforces.com/contest/2094) 14/04/2025
|
||||||
|
|
||||||
|
Decent contest. Lost focus near the end and was not paying attention at the start (I was working on my makefile). This is a telltale sign that Div. 4 is a bit too easy. F & G should've been lightwork.
|
||||||
|
|
||||||
|
1. D: submitted what I knew to be incorrect. Can't account for laziness.
|
||||||
|
2. E: long long overflow. Laziness.
|
||||||
|
F: lost focus here and did not prove the correctness. Was confused by the grid—simply breaking down and _experimenting_ in terms of simple patterns would help isolate that a mutation after $m%K==0$ is key. Then, a subsequent rigorous proof of the modulo shift would help.4. G: gave up after a few insights and did not persevere to find the simple mathematical equation for updating the score on reversal although I got the deque intution. **Sometimes, there's nothing to learn besides improving your discipline.**
|
||||||
|
My math intuition needs to improve. I see something hard (i.e. dividing out a number repeatedly) then think "this is impossible" rather than "this seems hard, but is it feasible? Is it computationally practical?" In this case, I know the solution rests on the fact of only a logarithmic (?) amount of numbers can end up dividing $a$i$$. Time to upsolve later.
|
||||||
|
|
||||||
|
## [799 (div. 4)](https://codeforces.com/contest/1692) 10/04/2025
|
||||||
|
|
||||||
|
Improvement is marginal. My desire for rating is unquenchable (joke). Really, though, I'm improving slower than I like. 1400 performance, my best yet—I'll do one more Div. 4 then I need to upgrade to Div. {2,3}. I think the most core realization I made after this contest was:
|
||||||
|
|
||||||
|
Separate what your code does from what you think it should do. Conceptualize an approach, then put it aside. Implement exactly what the approach says to do (not what you think it should say). If the approach left something out, _step away from implementing, reconsider the approach, then resume implementing_. **Never reconsider and alter your strategy while coding.**
|
||||||
|
|
||||||
|
1. B: realized parity-based removal immediately. Still, I once again didn't initially do what the problem asked (i.e. I returned the number of operations, not array length).
|
||||||
|
|
||||||
|
> Never run code until you're confident it does what you want.
|
||||||
|
|
||||||
|
3. C: Blindly plugged in incorrect deltas to detect the "X" pattern of "#" characters before getting the right answer.
|
||||||
|
|
||||||
|
I consistently tune out when I'm bored/think I've discovered the insight/done the problem before. This results in me solving a different problem, expressing right ideas incorrectly, or expressing wrong ideas correctly—for example, thinking I find a core insight, believing it sounds right, and just _not checking its validity_. I don't know what to say besides progressively overload your discipline and focus.
|
||||||
|
|
||||||
|
D: my proof that the time must repeat was complete garbage and complex. I've been reading AOPS and wanted to go with a math-y approach. However, consider the following simple proof:The current time is $s$ and you look at the clock every $x$ minutes. $1440 \cdot x\equiv 0\pmod{1400}$, so the time must repeat after at most 1440 looks.This is enough to brute force. There are alternative easier approaches to brute-force, but for an easy problem like this speed comes above all else (special case).E: **Submitted and hoped for correctness multiple times**. I was lazy and didn't want to implement the binary search approach. Ironically, after turning my brain on, I quickly found a $\Theta(n)$ solution. However, I rushed to code it up thinking about the time I had wasted and frustration from WA (more from myself than from WA). This caused me to forget invariants and implement what I thought was right, not the algorithm I designed (forgetting invariants). Walking through an example (as I advised myself yesterday) reminded me to iterate the right pointer past any zeroes. Good, specific improvement from yesterday, but I still should've slowed down and asked "where should the right pointer move? what are the invariants? why? what about if if goes off the end of the array?" **Only look forward**.F: Came up with fancy equations because I'm taking the wrong core lessons from AOPS. The example problems there are complex, while these are simple—therefore, I should prioritize simplicity. Concretely, the following phrasing massively simplifies this problem:I'm looking for 3 numbers $a$i$,a$j$,a$k$, i\neq j\neq k$ such that $a$i$+a$j$+a$k$\equiv 3\pmod{10}$. WLOG, assume $i\lt j,k$. We only want a pair of remainders that satisfy the above inequality. Since there are only 10 unique remainders, brute force all 100 remaining modulo pairs. If such a pair exists in $a$i+1:$\forall$ $i\in\{1,...n-2\}$, the answer is yes. This can be calculated with a postfix frequency map of remainders, with special care taken for when the remainders are equal.G: good deduction and simplicity but I once again read the instructions too fast and skipped that the array is size $k+1$. Also, **my sliding window implementation ability is horrendous**. I spent nearly 10 minutes just debugging a "right" idea because I couldn't code up a basic thing.
|
||||||
|
|
||||||
|
## [849 (div. 4)](https://codeforces.com/contest/1791) 04/09/2025
|
||||||
|
|
||||||
|
Defeating. My speed is improving but I completely wasted my focus. Div. 4 is too easy for me to take seriously now. This itself, though, is a problem with discipline.
|
||||||
|
|
||||||
|
I only practice for 2 hours a day. There's no point in practicing problems and not trying—just go do something else.
|
||||||
|
|
||||||
|
- A = B = trivial
|
||||||
|
- C: lost focus on what was being asked, returning the number of removals rather than the minimal length of the string. I want to write code that works first try—**validate your code is actually doing what you're saying before running, both conceptually and in the fine-grained details**. I move too fast. Still, I justified $l\lt r$ instead of $l\leq r$, a small improvement.
|
||||||
|
- D: Instantly saw the solution but rushed the implementation. Eventually I slowed down and then considered the valid split indices. **Consider edge cases first. Go through one iteration in your head before coding.**
|
||||||
|
- E: Utter disaster. **Did not read the problem statement and answered a similar (but very different) problem I'd done in the past**. By the time I saw this after impatiently submitting ~5 WA, I had lost mental focus. Never submit and hope for a correct answer—_know it_.
|
||||||
|
- F: Then, I let my previous failure carry through to the next problem. This happened in my last contest but I also dealt with it well:
|
||||||
|
|
||||||
|
When you're done with a problem (in contest), whether because you solved it or simply gave up, forget about it. Don't obsess about a better solution or put yourself down. Ceaselessly move forward with confidence at all times.
|
||||||
|
|
||||||
|
## [1016 (div. 3)](https://codeforces.com/contest/2093) 04/08/2025
|
||||||
|
|
||||||
|
Horrendous competition but I refrain from cringing for the sake of improvement.
|
||||||
|
|
||||||
|
- A: trivial
|
||||||
|
- B: took me a while and I still don't get the proof. Good resilience in trying to formally prove it then detecting a pattern (remove all non-zero/zero digits after/before the last non-zero digit).
|
||||||
|
- C: **math skills remain weak but pattern recognition is improving**. I "guess and checked" that any $x+x$ is not prime when $x\neq1$. However, after catching 2 edge cases $x=k=1$, $k=1$, I gave up, ignoring $x=1,k=2$, causing a WA. This is the downside of lacking a formal proof/understanding the math—while in retrospect I can say "consider a few more edge cases," I can't tell when to stop investigating. Still, I should've separated out cases $k=1$, $k\neq1$ and exhaustively proved $k=1$, which is remarkably facile. I still don't know how to factorize numbers in $O(\sqrt{n})$ and copy from AI (allegedly).
|
||||||
|
- D: got the idea but it was very abstract. Rushed to implementation, wasted time—same old story. Got it after contest. **After failing to implement for a long time, abandon the approach and start from scratch—99% of the time you're not going to get it.**
|
||||||
|
- E: failed implementation. At least I saw binary search after stepping back. I tried to find an upper bound on $x$ by creating _exactly_ $k$ groups but for ease of implementation I should've went for $\geq k$ since any sequence with MEX $x$ can be extended to have MEX $\geq x$ with the addition of any number.Specifically define _everything_. What am I binary searching over (in this case, forming some $x$ with _at least_ $k$ groups)? What are the bounds? Is the search space monotonic? Why?
|
||||||
|
- F: I had seen a bitwise trie before and didn't review the problem. I didn't upsolve then, so I couldn't upsolve now, and getting this problem right would've made a massive difference in my performance. These are the consequences—**upsolving is goated**.
|
||||||
|
|
||||||
|
## [898 (div. 4)](https://codeforces.com/contest/1873/) 04/03/2025
|
||||||
|
|
||||||
|
Placed top 2000 but did not learn much. Was distracted (thinking about writing this post itself) and was not taking the problems seriously because they were not that challenging in the first place.
|
||||||
|
|
||||||
|
Take problems seriously or you're wasting your time.
|
||||||
|
|
||||||
|
I was also continually nervous/pressuring myself and thought about my own thought process. The time for analysis is after, not during.
|
||||||
|
|
||||||
|
Pressure ruins performance.
|
||||||
|
|
||||||
|
- A: couldn't come up with the extensible solution and checked all permutations. Step back and use explicit criteria, i.e. "what qualifies being able to make it in one swap?" (at most 2 characters being off).
|
||||||
|
- B: good math proof WLOG. Learning. Took a step back to architect the easiest way to count the score. However, **only after erroneously calculating the score with an approach I had already deemed incorrect**.
|
||||||
|
- C: Trivial but should've been more patient.
|
||||||
|
- D: binary search took me a second, but ok.
|
||||||
|
- E: sliding window but erroneously checked $a$\{l,r-1\}$ \% a$\{l+1,r\}$\neq0$ _without_ confirming it was in the sliding window first. Err on the side of caution (i.e. if checks, explicit edge cases, etc). Debugging could be improved—target the likely differences between what you _think_ you're doing and what you're _actually_ doing.
|
||||||
|
- G: tried and didn't fully invest my time. For some reason my subconscious still thinks getting lucky is a valid option.
|
||||||
|
- H: _much_ easier than G. Formulated the solution formally before coding, allowing me to implicitly catch a lot of edge cases: "whether V can get to the node closest to it in the graph's cycle strictly before M."
|
||||||
|
|
||||||
|
## [871 (div. 4)](https://codeforces.com/contest/1892/) 03/28/2025
|
||||||
|
|
||||||
|
Div. 4 to practice implemenation skills + mathematical observations thanks to [Paul Zeitz](http://www.gang.umass.edu/~franz/Paul_Zeitz_The_Art_and_Craft_of_Problem_SolvingBookosorg.pdf). From now on, I will only note useful problems.
|
||||||
|
|
||||||
|
- B: typo, costing a few minutes. Go slower. Declare variables. Think consistently through approach.
|
||||||
|
- C: paused an found a general implementation as Zeitz advises (starting with a general, non-mathematical solution: "The solution is the earliest to get either both 1s at once or each over two strings"). **I still rushed**, incorrectly computing the result as `min(first, second)` instead of `first + second`.
|
||||||
|
- D: I looked up the recurrence relation $T(n)=T(n/3)+T(2n/3)$ to ensure it was sub-linear time.
|
||||||
|
|
||||||
|
Gain the mathematical skills to both analyze and derive (i.e. if you forget) recurrence relations.
|
||||||
|
|
||||||
|
- E: spent _forever_ fanagling with my Disjoint Set Union template. I made countless errors including:
|
||||||
|
|
||||||
|
1. Not inserting nodes into the data structure before joining them
|
||||||
|
2. Joining zero/non-zero nodes, corrupting the component graph
|
||||||
|
3. Erroneously updating component sums by double counting (i.e. using a mutated value).
|
||||||
|
|
||||||
|
Understand data structures before you use them. Despite what Colin Galen says, black-boxed knowledge often isn't enough. Improve knowledge of Fenwick Trees, Segment Trees, and Union Find.
|
||||||
|
|
||||||
|
- F: elegant approach and reduction, eliminating the need for a graph traversal.
|
||||||
|
- G: _intentionally_ chose the mathematical/indexing method to improve my skills. I'd also like to use `{upper,lower}_bound` more often with a lambda than manually code binary search. **Once again, I practiced in a fundamentally flawed way, choosing an approach I knew would be error prone and difficult**.
|
||||||
|
|
||||||
|
Don't be scared if you initilly can't find an easy implementation. _Reconceptualize, visualize, and reframe_ until something comes up. Sometimes, this takes too long—however, my threshold for that realization is MUCH too low. **Spend a longer time developing/considering approaches even if you already know a feasible solution**. Here, I spent ~5 minutes developing a solution, then 20 minutes coding it. Instead, another 5 minute allocation of time could lead me to the prefix sum solution, likely saving ~10 minutes.
|
||||||
|
|
||||||
|
## [895 (div. 3)](https://codeforces.com/contest/1872/) 03/26/2025
|
||||||
|
|
||||||
|
Decent.
|
||||||
|
|
||||||
|
- A: math intuition building. Jumped to assuming the problem statement but it was much simpler. **Answer the problem only.**
|
||||||
|
- B: textbook simple problem that I struggle to mathematically quantify being distracted by many components. In retrospect, I should interpret the problem simply like:
|
||||||
|
|
||||||
|
Each trap has a known time I must return by. The answer is therefore the minimum of these.
|
||||||
|
|
||||||
|
I also just plug in $ceil$ and $floor$ until I find the right answer (I'm not lying). Instead, note that for $s,k\in\mathbb{Z}$, $\frac{s}{2}>k\leftrightarrow\lfloor\frac{s-1}{2}\rfloor\geq k$. This simply "edges out" the fractional term to line up cleanly with the divisor.
|
||||||
|
|
||||||
|
- C: cooked. **Practice number theory.**
|
||||||
|
- D: took me a while because I was distracted with the moving parts. Specifically, I forgot that I could choose the permutation and that the question was merely asking to pick the largest/smallest numbers on $x$/$y$ slots respectively. End solution was expressive and elegant. **Express the question and reframe the constraints in simple but accurate terms**.\_ E: Black-boxed a lazy segment tree (with the help of AI, I must admit—I need to make a template).
|
||||||
|
|
||||||
|
_Everything I did here was wrong and this problem showed an embarrassingly fundamental flaw in my practice strategy._
|
||||||
|
|
||||||
|
Namely, I should divide up practice time into:
|
||||||
|
|
||||||
|
1. Contests, emphasizing speed and implementation
|
||||||
|
2. Single problems, emphasizing specific learning objectives
|
||||||
|
|
||||||
|
In this problem, I immediately saw the application of the lazy segment tree but decided to hold off on it, failing to find the simpler prefix-XOR solution. Therefore, I not only wasted my time, but also cemented in unrealistic practice (I would never do this in a real contest) and worsened my virtual contest performance. As for the prefix-XOR solution, focusing on just one/zero corresponding elements and **walking through small examples** (i.e. "what happens when $l=r$?") would've help me pick up the pattern.
|
||||||
|
|
||||||
|
## [1013 (div. 3)](https://codeforces.com/contest/2091) 03/25/2025
|
||||||
|
|
||||||
|
Solved in a coffee shop. More locked in than before. My best performance yet. I'm changing my philosophy in these contests—I want to be able to code nearly everything (except for, for example, a lazy-propagation segment tree) from scratch. These contests should test my ability to _code_ and I mean the whole package. Lastly, my skills in math and implementation are improving bit by bit. No stopping here.
|
||||||
|
|
||||||
|
- A: rushed and panicked for no reason. Took me a few minutes to realize the triviality of the solution. **Calm down!**
|
||||||
|
- B: failed to prove solution before testing, resulting in a time waste of around 10 minutes. Collected myself and proved it, though **rigor could be improved**.
|
||||||
|
- C: noticed a pattern in the examples after rotating them and instantly submitted. **Risky decision!** The problem is, I'm unsure if I am even capable of proving the validity of the solution in the first place.
|
||||||
|
- D: afk for ~45 minutes but still heavily struggled with the solution, _even after reducing the problem_ to maximally spreading out $\lceil\frac{k}{n}\rceil$ columns. Recollected myself and came up with a solution that worked for me in minutes.
|
||||||
|
- E: played with the numbers and realized the prime reduction. **Number theory very weak**—this is most likely the hardest problem in the category I've ever solved. Still had to google sieve of eratosthenes (is this cheating?) (**contest as a test of implementation skills**).
|
||||||
|
|
||||||
|
## [900 (div. 3)](https://codeforces.com/contest/1878/) 03/22/2025
|
||||||
|
|
||||||
|
Solved in a coffee shop. Used AI for smaller things (otherwise I'd have no idea).
|
||||||
|
|
||||||
|
- A: Solved a much harder problem related to majority element paths on tree—realized the solution after a minute.
|
||||||
|
- B: was confused for about 7 minutes but realized some properties of divisibility and odd numbers. **Math is still a weakness. Take simpler approaches to complex constraints, such as considering parity.**
|
||||||
|
Went off of gut instinct that it is always possible to form the given $x$ if encompassed in the range of numbers. **Failed to prove this mathematical validity but had fair intuition** (i.e. just "take off one" if too big/small). This is acceptable, though not perfect.
|
||||||
|
- D: cooked. Solved E first and had mentally given up by this point. **If you've given up, just stop trying and take a break/do something else. You're wasting your time.**
|
||||||
|
- E: incredibly easy with segtree. Realized the lower bound/walk solution after 2-3 minutes. Binary search indexing can be improved (i.e. which pointer to return?) as well as realizing one binary search is necessary across both arrays. Good mathematical deduction to realize relationship between input arrays. Revisit sparse table + simpler solution—_don't be content with an advanced solution when a simpler/elegant idea also suffices_.
|
||||||
|
|
||||||
|
## [891 (div. 3)](https://codeforces.com/contest/1857/) 05/06/2025
|
||||||
|
|
||||||
|
Solved partially on the plane and at home. Best round in terms of acceptance rate. After bombing another codeforces contest & more CSES work, it's clear that my biggest weakness is **algorithmic correctness and certainty**. To remedy:
|
||||||
|
|
||||||
|
- Test edge cases (boundary + tricky)
|
||||||
|
- Don't submit unless you're _certain_ you're right
|
||||||
|
- In analysis, leave no stone unturned (be thorough)
|
||||||
|
- Don't rely on the given test cases as proof of correctness.
|
||||||
|
|
||||||
|
- A: ironically, although I got AC in ~1 minute, this is exactly where I go wrong. I did not check the actual correctness of my parity checks but submitted anyway.
|
||||||
|
- B: AC once again but **implementation is still a weakness**. In this case, it was due to a lack of problem understanding, i.e. rounding up sets all right digits to 0, but carries can still affect the number after that.
|
||||||
|
- C: skipped and came back later. The idea of sorting came up but I was overwhelmed. My end solution was a bit overcomplicated but still logically sound. I'm getting better at making observations, in this case explicitly identifying the fact that the largest element must have one more element greater than or equal to itself, permitting you to build the answer in reverse order. **Did not prove the correctness of this**.
|
||||||
|
- D: skipped and came back later. Given the large amount of vertices I did a good job of rephrasing the data (thanks [George Pólya](https://www.google.com/search?q=how+to+solve+it)) and **proving** the correctness with transitivity/contradiction. Initial solution complex—ponder implementation before going at it.
|
||||||
|
- E: took me a while (I believe) because I was on a plane without a notebook and visualizing was hard. Good mathematical formulation, but initially returned the answer in wrong order. **Relied on the test cases to save me—ensure you're solving the actual problem** (in this case, returning the queries in the right order).
|
||||||
|
- F: Quadratic equation flew over my head. I was unsure but should've just played with the numbers—if you plug them in, you derive the quadratic equation relevant to the problem. I then (maybe) could've gotten an answer. Instead, I was intimidated because I thought I needed some fancy DSA/dp/two-pointers/two-sum thing. Just have to build more intuition. Also, totally did not know how to count number of distinct pairs $i, j$ with $i\lt j$ for two elements. I missed the case when the elements are equal and, yes, seriously forgot it is just the product (I do not want to talk about it but thanks Deepseek). **Overflow, again. rly dude?**
|
||||||
|
- G: doing this later, got the core insight of the minimal weight edge path. Once again, for surveying path
|
||||||
|
|
||||||
|
## cses (range queries, sorting and searching) 03/01/2025
|
||||||
|
|
||||||
|
A good review and challenge of data strucures. I've become even more of a fan of CSES. On codeforces, the application of fenwick trees and segment trees are usually on problems out of my reach, where I'm battling both the problem and the data structures.
|
||||||
|
|
||||||
|
1. static range minimum queries: sparse table. copy-pasted from template, should be able to derive
|
||||||
|
2. range update queries: fenwick tree difference array. **understanding of fenwick trees fundamentally flawed. "guessed and checked" on the ranges to update.**
|
||||||
|
3. forest queries: inclusion-exclusion principles. **think before implement.**
|
||||||
|
4. hotel queries: fun question, derived segment tree `{lower,upper}_bound` (a "walk", apparently).
|
||||||
|
5. list removals: overcomplicated it a lot. Note that an index can be interpreted as a relative position into an array, so an indexed set works perfectly fine. **Reinterpret problem constraints.** **Extreme lack of familiarity with PBDS/STL APIs**. I constantly confuse `find_by_order`, `order_of_key`, `erase(*it)` vs. `erase(it)`.
|
||||||
|
6. traffic lights: destroyed. I'm at the point where I thought "oh, there's no way I'd need two data structures. Too complicated, let me see the solution." **Trust your intuition a bit more, especially if you know your solution will lead to a right answer**. The offline solution also fully went over my head, in which answering queries backwards presents a 4x (performance-wise) faster solution. **Answer the question—you can do so by any means necessary**. In this case, reinterpreting the key insight that adding a traffic like can only shrink intervals to removing a traffic light can only increase them isn't only enough. I knew handling the first case simply was too hard but I needed to **dig deeper into ideas**. In this case, asking "ok, well is it easier to answer the queries in reverse order? Well, yes, because removing the `i`th light can either create a larger gap around it, which I can easily find, or its the same gap as before" would've saved me.
|
||||||
|
|
||||||
|
## [1006 (div. 3)](https://codeforces.com/contest/2072) 02/25/2025
|
||||||
|
|
||||||
|
- A: easy, messed up on the math a bit for a second
|
||||||
|
- B: for the second contest in a row, missed a B and solved E or later. Solved ~2 min after the contest ended. **Prove mathematical correctness.** Here, I did not and still don't fully understand why a configuration like: “hyphens...underscores...hyphens>” is even optimal... **Still, I was mad that I couldn't get it and submitted solutions that I had nowhere near certainty of their correctness. If you aren't sure, you're better off skipping it**. Fortunately, in this case, maximizing your codeforces ranking also coincides wiht optimal problem-solving: don't guess.
|
||||||
|
- C: knew a solution almost instantly but got held up nearly 30 minutes on small implementation details and edge cases. Ended up submitted something I wasn't certain was correct. Taking an explicit extra minute to consider: “what do I print for the last case? I must maximize the MEX and ensure the bitwise OR equals x. If the MEX hits x, then print it. Otherwise, print x” would've saved me upwards of 10 minutes.
|
||||||
|
- E: masterclass in throwing. I knew the algorithm pretty much immediately (manhattan distances and euclidean distances must be equals means at least one coordinate must be the same between each point). I then broke it down into building pairs column-wise, the correct approach. Then, I simply forgot to check a core constraint of the problem: place $\leq500$ staffs. I decided to brute force the last pairs rather than repeat the strategy, which actually runs in logarithmic time (I also didn't prove this). The final product was elegant, at least.
|
||||||
|
|
||||||
|
## sorting and searching 02/24/2025
|
||||||
|
|
||||||
|
A lot of these problems I'd seen before but this is good practice anyway. This really is a great problem set. After being stuck on implementation details, I took less time banging my head against the wall and just looked at the solution.
|
||||||
|
|
||||||
|
- [distinct numbers](https://cses.fi/problemset/task/1621): unordered classes are exploitable and nearly always tle. Keep it simple, use a map or PBDS.
|
||||||
|
- [apartments](https://cses.fi/problemset/task/1084): distracted working on this during class but figured it out. **prove statements and use descriptive variable names.**
|
||||||
|
- [ferris wheel](https://cses.fi/problemset/task/1090): leetcode copy from people fitting in boats. Can't say much because I already did it.
|
||||||
|
- [concert tickets](https://cses.fi/problemset/task/1091): totally used PBDS, which is most likely way overkill. **if it works, it works**.
|
||||||
|
- [restaurant customers](https://cses.fi/problemset/task/1619): already seen it (line sweep)
|
||||||
|
- [movie festival](https://cses.fi/problemset/task/1629): already seen it but **improve greedy/exchange arguments**
|
||||||
|
- [missing coin sum](https://cses.fi/problemset/task/2216): **I still don't get this. Write it out.**
|
||||||
|
- [collecting numbers ii](https://cses.fi/problemset/task/2217): I had the exactly correct idea but I thought it was too complex. Practice will improve me developing my better sense of this. Still, I didn't _completely_ understand my idea, which lowered my confidence.
|
||||||
|
|
||||||
|
## more cses 02/22/2025
|
||||||
|
|
||||||
|
- [gray code](https://cses.fi/problemset/task/2205): Missed the pattern + **gave up too _late_**
|
||||||
|
- [towers of hanoi](https://cses.fi/problemset/task/2165): **Recursive grasp is limp**—missed the idea. **Math/proof grasp too**—still don't understand how its $2^n$.
|
||||||
|
- [apple division](https://cses.fi/problemset/task/1623): I got distracted by the idea that it was NP-hard. Even when Sam Altman told me it was DP, I failed to simplify it to "add every element either to one or the other set".
|
||||||
|
- [digit queries](https://cses.fi/problemset/task/2431): got the idea + time complexity quickly, but the **math-based implementation is weak**. Jumped into the code _before_ outlining a strict plan.
|
||||||
|
|
||||||
|
## cses 02/21/2025
|
||||||
|
|
||||||
|
Everyone recommends CSES so I started with it, doing the first 8 problems.
|
||||||
|
|
||||||
|
1. [weird algorithm](https://cses.fi/problemset/task/1068): Trivial, but I forgot to print 1 at the end. **Return the exactly correct answer.**
|
||||||
|
2. [missing number](https://cses.fi/problemset/task/1083) : N/A
|
||||||
|
3. [repetitions](https://cses.fi/problemset/task/1069) : Use invariants.
|
||||||
|
4. [increasing array](https://cses.fi/problemset/task/1094) : Run through one iteration of the algorithm. Here, I erroneously added `x - last` to a quantity, _after manipulating `x`_.
|
||||||
|
5. [permutations](https://cses.fi/problemset/task/1070/): I'd seen this problem before yet struggled. **Fully understand the problem constraints**. In this case, While I understood the definition of a permissible permutation, I didn't fully internalize that you could place number _wherever_ you want. Instead, I was locked in on placing some `x` at `i, i + 2, i + 4, ...`. Further, the fact that I didn't immediately recognize this solution means I need to improve at **upsolving and reviewing problems**.
|
||||||
|
6. [permutations](https://cses.fi/problemset/task/1071): Absolutely disastrous. I continually just f\*dged with the offsets I was adding to my strategy until I happened to get the answer right. **Don't guess**. Also, **don't be lazy—if an algorithm works, focus, write it out, and enjoy being correct**.
|
||||||
|
[two knights](https://cses.fi/problemset/task/1072): Required 2 hints from Sam Altman. **git gud at combinatorics**. Use the paradigm "count good, remove bad." Lock in less on counting specifics—instead, consider what objects _mean in aggregate_. In this case, a $2\times3$ grid represents an "area" of attack, contributing 2 bad knight pairs. This is much easier to digest then attempting to remove overcounting per-knight. Fundamentally, the problem involves placing 2 knights, so breaking it down 2 knights at a time is the most intuitive take.[two sets](https://cses.fi/problemset/task/1092): **Don't lock in on one approach**. Here, this is dp. The fact that I knew the idea of partitioning the first $n$ numbers into two groups of size $\frac{n(n+1)}{4}$ but failed to recognize the greedy approach means I didn't grasp the fundamental arithmetic of the problem, nor the greedy idea: every number must go into a set. If you add the largest number possible to set 1 to not exceed the target, this number can always be formed in the other set by choosing $1$ and $x-1$. **git gud at greedy**.
|
||||||
|
|
||||||
|
## [938 (div. 3)](https://codeforces.com/contest/1955) 02/15/2025
|
||||||
|
|
||||||
|
What would've been my best contest. Unfortunately, CodeForces decided to go down for TREE$3$ centuries, which absolutely ruined my groove in the contest and terminated my virtual. No excuses, though, as I set a timer and finished up later.
|
||||||
|
|
||||||
|
### A
|
||||||
|
|
||||||
|
Brute-forced it but it still took me a few minutes.
|
||||||
|
|
||||||
|
1. Read (and exploit) problem constraints
|
||||||
|
2. Go back and derive the linear optimization (choosing the one with better marginal utility)
|
||||||
|
3. If you have a (simple enough) solution, just go with it.
|
||||||
|
|
||||||
|
### B
|
||||||
|
|
||||||
|
Easily recognized how to form the matrix (i.e. smallest element first with positive integers $c,d$) but tripped up on the implementation.
|
||||||
|
|
||||||
|
1. Flesh out the steps before coding (i.e. walk through iterations in head, transitions, edge cases on the rows and columns, i.e. checking if `i==n-1`) _especially_ on implementation-heavy problems
|
||||||
|
|
||||||
|
### C
|
||||||
|
|
||||||
|
Did a horrific (but correct) binary search solution. Tripped up by specifics of `std::{upper,lower}_bound` regardless. Technically, generating the prefix and postfix arrays takes two passes and two binary searches to find the answer but this is still more inefficient than the trivial linear scan.
|
||||||
|
|
||||||
|
1. THE INT OVERFLOW INCIDENT
|
||||||
|
2. Deepen understanding of binary search & STL functions to the point that it is second nature
|
||||||
|
3. Consider simple solutions first.
|
||||||
|
|
||||||
|
### D
|
||||||
|
|
||||||
|
Instantly recognized sliding window but struggled with minor details (i.e. keeping track of match count) by rushing to the solution.
|
||||||
|
|
||||||
|
1. Problem statement took a long time to grasp. Look at examples and just read through slower (don't rush!)
|
||||||
|
2. Sliding window grasp isn't _rigorous_—improve this later
|
||||||
|
3. When you don't remember 100% of how an algorithm works, **mentally walk through a few iterations**
|
||||||
|
4. Improve PBDS API familiarity (practice)
|
||||||
|
|
||||||
|
### E
|
||||||
|
|
||||||
|
I had mentally tapped out by this point (I submitted a TLE $O(n^2k)$ solution without using my brain). I solved F first, then took a look at G _before_ coming back to E, robbing me of 10 minutes that could've been the difference between another solve.
|
||||||
|
|
||||||
|
1. You're not like that. Solve problems in order (most of the time, although skipping to F first was a wise decision).
|
||||||
|
2. Consider ideas _fully_ before dropping them. I considered the difference array, then _discarded_ it, erroneously believing a boolean was sufficient and completely forgetting that the concept of ranges complicates flipping.
|
||||||
|
3. Formalize constraints more clearly to help form a solution. For example, the idea that flipping things twice makes no difference, permitting the use of a boolean difference array.
|
||||||
|
4. Didn't get it, still don't get it, don't know why. Way easier than D.
|
||||||
|
5. Prove correctness. I didn't prove that iterating left to right, toggling a range of k actually would always give a correct answer.
|
||||||
|
|
||||||
|
### F
|
||||||
|
|
||||||
|
Had the solution quickly but overcomplicated the implementation. Walked through the examples and took my time.
|
||||||
|
|
||||||
|
1. Failed to formalize the answer to the problem. I noticed patterns but should've strictly defined the following rule: "Every even count of a number contributes one to the score. Further, one triple of 1, 2, 3 also contributes one." Ultimately, I ended up submitting something I wasn't certain would be correct.
|
||||||
|
|
||||||
|
### G
|
||||||
|
|
||||||
|
Wasted time believing this was primitive DP, when it totally wasn't.
|
||||||
|
|
||||||
|
1. You're not that guy (yet >:))
|
||||||
|
2. Prove optimal substructure and overlapping subproblems before using DP & walk through the test cases. In this case, test case 3 immediately disproves dp.
|
||||||
|
|
||||||
|
## the beginning 02/12/2025
|
||||||
|
|
||||||
|
This marks the (true) beginning of my competitive programming journey. By "true" I mean intentional, focused, daily practice. Driven by my admiration for competitive programmers, love of challenge, and desire for a decent new-grad job, I'm excited to start putting in the work.
|
||||||
|
|
||||||
|
This webpage will be an archive of everything related to this process, including my practice strategies, setup, shortcomings, logs, and more. For now, I'll be practicing on [CodeForces](https://codeforces.com) (account [frozenpipe](https://codeforces.com/profile/frozenpipe)) and [CSES](https://cses.fi), using the [CP Handbook](https://cses.fi/book/book.pdf) and browsing by related problem tags with ever-increasing difficulty.
|
||||||
224
src/content/posts/algorithms/extrema-circular-buffer.mdx
Normal file
224
src/content/posts/algorithms/extrema-circular-buffer.mdx
Normal file
|
|
@ -0,0 +1,224 @@
|
||||||
|
---
|
||||||
|
title: "extrema circular buffer"
|
||||||
|
date: "2024-07-30"
|
||||||
|
useKatex: true
|
||||||
|
---
|
||||||
|
|
||||||
|
## context
|
||||||
|
|
||||||
|
While working for [TRB Capital Management](https://trbcap.com/), certain strategies necessitated finding the minimum and maximum of a moving window of prices.
|
||||||
|
|
||||||
|
## problem statement
|
||||||
|
|
||||||
|
Design a data structure supporting the following operations:
|
||||||
|
|
||||||
|
- `build(size_t capacity)` : initialize the data structure with capacity/window size `capacity`
|
||||||
|
The data structure must always hold $\leq$ `capacity` prices.
|
||||||
|
- `void push_back(double value)`
|
||||||
|
- `void pop_front()` : remove the price from the front of the window
|
||||||
|
- `size_t size()` : return the number of prices in the data structure
|
||||||
|
- `double get()` : return the extrema (min or max)
|
||||||
|
|
||||||
|
## solution
|
||||||
|
|
||||||
|
Try to solve it yourself first. The point of this exercise it to create the most theoretically optimal solution you can, not brute-force and move on.
|
||||||
|
|
||||||
|
### naïve solution
|
||||||
|
|
||||||
|
One can design a data structure meeting these requirements through simulating the operations directly with a [`std::deque<double>`](https://en.cppreference.com/w/cpp/container/deque).
|
||||||
|
|
||||||
|
On the upside, this approach is simple to understand. Further, operations are all $O(1)$ time—that is, nearly all operations. The minimum/maximum element must be found via a linear scan in $O(n)$ time, certainly far from optimal.
|
||||||
|
|
||||||
|
```cpp
|
||||||
|
#include <algorithm>
|
||||||
|
#include <deque>
|
||||||
|
#include <stdexcept>
|
||||||
|
|
||||||
|
class ExtremaCircularBuffer {
|
||||||
|
public:
|
||||||
|
ExtremaCircularBuffer(size_t capacity) : capacity(capacity) {}
|
||||||
|
|
||||||
|
void push_back(double value) {
|
||||||
|
if (prices.size() == capacity) {
|
||||||
|
prices.pop_front();
|
||||||
|
}
|
||||||
|
|
||||||
|
prices.push_back(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
void pop_front() {
|
||||||
|
if (prices.empty()) {
|
||||||
|
throw std::out_of_range("Cannot pop_front() from empty buffer");
|
||||||
|
}
|
||||||
|
|
||||||
|
prices.pop_front();
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t size() const { return prices.size(); }
|
||||||
|
|
||||||
|
double get() const {
|
||||||
|
if (prices.empty()) {
|
||||||
|
throw std::out_of_range("Cannot find max() of empty buffer");
|
||||||
|
}
|
||||||
|
|
||||||
|
return *std::max_element(prices.begin(), prices.end());
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
std::deque<double> prices;
|
||||||
|
size_t capacity;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### optimizing the approach
|
||||||
|
|
||||||
|
Rather than bear the brunt of the work finding extrema in calls to `get()`, we can distribute it across the data structure as it is built.
|
||||||
|
|
||||||
|
Maintaining the prices in a sorted order seems to suffice, and gives access to both max _and_ min in $O(1)$ time. However, all of the problem constraints have not been addressed. Adhering to the interface of a circular buffer is another challenge.
|
||||||
|
|
||||||
|
Fortunately, pairing each element with a count allows intelligent removal/insertion of elements—if an element has a count of $0$, remove it from the list of sorted prices. A [std::map](https://en.cppreference.com/w/cpp/container/map) allows us to do all of this.
|
||||||
|
|
||||||
|
Now, we can access extrema instantly. Insertion and deletion take $O(log(n))$ time thanks to the map—but we can do better.
|
||||||
|
|
||||||
|
```cpp
|
||||||
|
#include <deque>
|
||||||
|
#include <map>
|
||||||
|
#include <stdexcept>
|
||||||
|
|
||||||
|
class ExtremaCircularBuffer {
|
||||||
|
public:
|
||||||
|
void push_back(double value) {
|
||||||
|
if (prices.size() == capacity) {
|
||||||
|
double front = prices.front();
|
||||||
|
|
||||||
|
if (--sorted_prices[front] == 0)
|
||||||
|
sorted_prices.erase(front);
|
||||||
|
prices.pop_front();
|
||||||
|
}
|
||||||
|
|
||||||
|
prices.push_back(value);
|
||||||
|
++sorted_prices[value];
|
||||||
|
}
|
||||||
|
|
||||||
|
void pop_front() {
|
||||||
|
if (prices.empty()) {
|
||||||
|
throw std::out_of_range("Cannot pop_front() from empty buffer");
|
||||||
|
}
|
||||||
|
|
||||||
|
double front = prices.front();
|
||||||
|
|
||||||
|
if (--sorted_prices[front] == 0)
|
||||||
|
sorted_prices.erase(front);
|
||||||
|
prices.pop_front();
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t size() const { return prices.size(); }
|
||||||
|
|
||||||
|
double get_max() const {
|
||||||
|
if (prices.empty()) {
|
||||||
|
throw std::out_of_range("Cannot find max() of empty buffer");
|
||||||
|
}
|
||||||
|
|
||||||
|
return sorted_prices.rbegin()->first;
|
||||||
|
}
|
||||||
|
|
||||||
|
double get_min() const {
|
||||||
|
if (prices.empty()) {
|
||||||
|
throw std::out_of_range("Cannot find min() of empty buffer");
|
||||||
|
}
|
||||||
|
|
||||||
|
return sorted_prices.begin()->first;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* methods & fields omitted for brevity */
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### monotonic queues deques
|
||||||
|
|
||||||
|
Thinking a bit deeper about the problem constraints, it is clear that:
|
||||||
|
|
||||||
|
- If an extrema is pushed onto the data structure, all previously pushed elements are irrelevant to any further operations.
|
||||||
|
|
||||||
|
Elements are processed in FIFO order, enabling this observation to be exploited. This is the foundationl idea of the [monotone priority queue](https://www.wikiwand.com/en/Monotone_priority_queue) data structure. So, for maintaining a minimum/maximum, the data structure will store a monotonically increasing/decreasing double-ended queue.
|
||||||
|
|
||||||
|
This solution does not satisfy a circular buffer inherently. If an arbitrary number of elements are removed from the data structure when an extrema is added, it is certainly not possible to maintain a window of fixed size.
|
||||||
|
|
||||||
|
Thus, we make one more observation to meet this criterion:
|
||||||
|
|
||||||
|
- If each price (extrema) on the monotonic double-ended queue also maintains a count of _previously popped elements_, we can deduce the proper action to take when the data structure reaches capacity.
|
||||||
|
|
||||||
|
1. If elements were previously popped before this extrema was added to the data structure, decrement the price's count of popped elements and do nothing.
|
||||||
|
2. Otherwise, either no elements were pushed before this extrema or they've all been popped. Remove (pop) this element from the deque.
|
||||||
|
|
||||||
|
This approach supports all operations in amortized $O(1)$ time (with a monotonic sequence, elements are added or removed at least once; across a sequence of $n$ operations, $n$ total $O(1)$ operations will be executed).
|
||||||
|
|
||||||
|
```cpp
|
||||||
|
#include <deque>
|
||||||
|
#include <stdexcept>
|
||||||
|
#include <utility>
|
||||||
|
|
||||||
|
class ExtremaCircularBuffer {
|
||||||
|
public:
|
||||||
|
void push_back(double value) {
|
||||||
|
if (prices.size() == capacity) {
|
||||||
|
double front_value = prices.front();
|
||||||
|
pop_max(front_value);
|
||||||
|
prices.pop_front();
|
||||||
|
}
|
||||||
|
|
||||||
|
prices.push_back(value);
|
||||||
|
push_max(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
void pop_front() {
|
||||||
|
if (prices.empty()) {
|
||||||
|
throw std::out_of_range("Cannot pop_front() from empty buffer");
|
||||||
|
}
|
||||||
|
|
||||||
|
double front_value = prices.front();
|
||||||
|
pop_max(front_value);
|
||||||
|
prices.pop_front();
|
||||||
|
}
|
||||||
|
|
||||||
|
double get_max() const {
|
||||||
|
if (prices.empty()) {
|
||||||
|
throw std::out_of_range("Cannot find max() of empty buffer");
|
||||||
|
}
|
||||||
|
|
||||||
|
return maxs.front().first;
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
void push_max(double value) {
|
||||||
|
size_t popped = 0;
|
||||||
|
|
||||||
|
while (!maxs.empty() && maxs.back().first < value) {
|
||||||
|
popped += maxs.back().second + 1;
|
||||||
|
maxs.pop_back();
|
||||||
|
}
|
||||||
|
|
||||||
|
maxs.emplace_back(value, popped);
|
||||||
|
}
|
||||||
|
|
||||||
|
void pop_max(double value) {
|
||||||
|
size_t popped = maxs.front().second;
|
||||||
|
|
||||||
|
if (popped == 0) {
|
||||||
|
maxs.pop_front();
|
||||||
|
} else {
|
||||||
|
--maxs.front().second;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* methods & fields omitted for brevity */
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### further improvements
|
||||||
|
|
||||||
|
The final implementation utilized in the TRB includes the following features:
|
||||||
|
|
||||||
|
1. A ringbuffer a statically-allocated `std::array`, as any fix-sized queue can be supplanted with one
|
||||||
|
2. A templatized value type and comparator for flexibility
|
||||||
|
3. C++ specific optimizations (rule of 5, smart pointers, and an STL-compliant API)
|
||||||
185
src/content/posts/algorithms/leetcode-daily.mdx
Normal file
185
src/content/posts/algorithms/leetcode-daily.mdx
Normal file
|
|
@ -0,0 +1,185 @@
|
||||||
|
---
|
||||||
|
title: "leetcode daily"
|
||||||
|
date: "2024-04-13"
|
||||||
|
useKatex: true
|
||||||
|
---
|
||||||
|
|
||||||
|
## [count good numbers](https://leetcode.com/problems/count-good-numbers/submissions/1605647445/?envType=daily-question&envId=2025-04-13) 04/13/2024
|
||||||
|
|
||||||
|
### understanding the problem
|
||||||
|
|
||||||
|
p is a combinatoric problem at heart. You have some slots for evens and some for primes, with a limited number of choices for each. Leverage the multiplication rule, which states that if you have $n$ slots with $x$ choices, you get $x^n$ possible outcomes.
|
||||||
|
|
||||||
|
### doing it
|
||||||
|
|
||||||
|
So, what's the answer? If we know which slots we have and the number of choices for them, we're done. Since this is leetcode, they don't let you think—they just give you the answer. You have 2 types of slots (even and odd indices) with 5 (${0,2,4,6,8}$) and 4 (${2,3,5,7}$) choices respectively. Therefore, the answer is: $5^{\text{# even slots}}\cdot4^{\text{# odd slots}}$ By counting or with small cases, we have $\lceil\frac{n}{2}\rceil$ even slots and $\lfloor\frac{n}{2}\rfloor$ odd slots. Let's submit it!And.... TLE. Checking _everything_ when you submit your code—in this case, constraint $n\leq 10^{16}$ informs us of something suspect. In the worst case, $\frac{n}{2}\approx n^{14}$. This is far too many multiplications, so we can leverage binary exponentiation instead (and probably should've been the whole time!). Don't forget the mod.
|
||||||
|
|
||||||
|
## [minimum number of operations to make array distinct](https://leetcode.com/problems/minimum-number-of-operations-to-make-elements-in-array-distinc) 04/09/2024
|
||||||
|
|
||||||
|
### understanding the problem
|
||||||
|
|
||||||
|
You can remove elements in groups of 3 _solely_ from the beginning of the array. Perform this operation until there are no more duplicates left, returning the number of times you had to perform the operation.
|
||||||
|
|
||||||
|
### solution: rephrase the question
|
||||||
|
|
||||||
|
Definitionally, you remove the _last_ duplicate. If such duplicate is at 0-indexed `i`, it belongs to the $\lceil \frac{i + 1}{3}\rceil$th chunk of 3 (i.e. operation). Find the last duplicate by leveraging a frequency map and iterating backwards through the input.
|
||||||
|
|
||||||
|
### asymptotic complexity
|
||||||
|
|
||||||
|
The solution is optimal, considering the least amount of elements possible in:
|
||||||
|
|
||||||
|
Time Complexity: $O(n)$ Space Complexity: $\Theta(1)$
|
||||||
|
|
||||||
|
## [count the number of fair pairs](https://leetcode.com/problems/count-the-number-of-fair-pairs/) 09/13/2024
|
||||||
|
|
||||||
|
### problem statement
|
||||||
|
|
||||||
|
Given an array `nums` of integers and upper/lower integer bounds `upper`/`lower` respectively, return the number of unique valid index pairs such that: $i\neq j,lower\leq nums$i$+nums$j$\leq upper$
|
||||||
|
|
||||||
|
### understanding the problem
|
||||||
|
|
||||||
|
This is another sleeper daily in which a bit of thinking in the beginning pays dividends. Intuitively, I think it makes sense to reduce the “dimensionality” of the problem. Choosing both `i` and `j` concurrently seems tricky, so let's assume we've found a valid `i`. What must be true? Well: $i\neq j,lower-nums_i\leq nums_j\leq upper-nums_i$
|
||||||
|
|
||||||
|
It doesn't seem like we've made much progress. If nums is a sequence of random integers, _there's truly no way to find all `j` satisfying this condition efficiently_.
|
||||||
|
|
||||||
|
The following question naturally arises: can we modify our input to find such `j` efficiently? Recall our goal: find the smallest/largest j to fit within our altered bounds—in other words, find the smallest $x$ less/greater than or equal to a number. If binary search bells aren't clanging in your head right now, I'm not sure what to say besides keep practicing.
|
||||||
|
|
||||||
|
So, it would be nice to sort `nums` to find such `j` relatively quickly. However: _are we actually allowed to do this?_ This is the core question I think everyone skips over. Maybe it is trivial but it is important to emphasize:
|
||||||
|
|
||||||
|
- _Yes, we are allowed to sort the input_. Re-frame the problem: what we are actually doing is choosing distinct `i`, `j` to satisfy some condition. The order of `nums` does not matter—rather, its contents do. Any input to this algorithm with `nums` with the same contents will yield the same result. If we were to modify `nums` instead of rearrange it, this would be invalid because we could be introducing/taking away valid index combinations.
|
||||||
|
|
||||||
|
Let's consider our solution a bit more before implementing it:
|
||||||
|
|
||||||
|
Is the approach feasible? We're sorting `nums` then binary searching over it considering all `i`, which will take around $O(nlg(n))$ time. `len(nums)`$\leq10^5$, so this is fine.* How do we avoid double-counting? The logic so far makes no effort. If we consider making all pairs with indices *less than* `i` for all `i` left-to-right, we'll be considering all valid pairs with no overlap. This is a common pattern—take a moment to justify it to yourself.
|
||||||
|
*Exactly* how many elements do we count? Okay, we're considering some rightmost index `i` and we've found upper and lower index bounds `j` and `k` respectively. We can pair `nums[j]` with all elements up to an including `nums[k]` (besides `nums[j]`). There are exactly $k-j$ of these. If the indexing confuses you, draw it out and prove it to yourself.* How do we get our final answer? Accumulate all `k-j` for all `i`.
|
||||||
|
|
||||||
|
### carrying out the plan
|
||||||
|
|
||||||
|
The following approach implements our logic quite elegantly and directly. The third and fourth arguments to the `bisect` calls specify `lo` (inclusive) and `hi` (exclusive) bounds for our search space, mirroring the criteria that we search across all indices $\lt i$.
|
||||||
|
|
||||||
|
### optimizing the approach
|
||||||
|
|
||||||
|
If we interpret the criteria this way, the above approach is relatively efficient. To improve this approach, we'll need to reinterpret the constraints. Forget about the indexing and consider the constraint in aggregate. We want to find all $i,j$ with $x=nums$i$+nums$j$$ such that $i\neq j,lower\leq x\leq upper$.
|
||||||
|
|
||||||
|
We _still_ need to reduce the “dimensionality” of the problem—there are just too many moving parts to consider at once. This seems challening. Let's simplify the problem to identify helpful ideas: pretend `lower` does not exist (and, of course, that `nums` is sorted).
|
||||||
|
|
||||||
|
We're looking for all index pairs with sum $\leq upper$. And behold: (almost) two sum in the wild. This can be accomplished with a two-pointers approach—this post is getting quite long so we'll skip over why this is the case—but the main win here is that we can solve this simplified version of our problem in $O(n)$.Are we any closer to actually solving the problem? Now, we have the count of index pairs $\leq upper$. Is this our answer? No—some may be too small, namely, with sum $\lt lower$. Let's exclude those by running our two-pointer approach with and upper bound of $lower-1$ (we want to include $lower$). Now, our count reflects the total number of index pairs with a sum in our interval bound.
|
||||||
|
|
||||||
|
Note that this really is just running a prefix sum/using the “inclusion-exclusion” principle/however you want to phrase it.
|
||||||
|
|
||||||
|
### some more considerations
|
||||||
|
|
||||||
|
The second approach is _asymptotically_ equivalent. However, it's still worth considering for two reasons:
|
||||||
|
|
||||||
|
1. If an interviewer says “assume `nums` is sorted” or “how can we do better?”—you're cooked.
|
||||||
|
2. (Much) more importantly, it's extremely valuable to be able to _reconceptualize_ a problem and look at it from different angles. Not being locked in on a solution shows perseverance, curiosity, and strong problem-solving abilities.
|
||||||
|
|
||||||
|
### asymptotic complexity
|
||||||
|
|
||||||
|
Time Complexity: $O(nlg(n))$ for both—$O(n)$ if `nums` is sorted with respect to the second approach.Space Complexity: $\Theta(1)$ for both.
|
||||||
|
|
||||||
|
## [most beautiful item for each query](https://leetcode.com/problems/most-beautiful-item-for-each-query/description/) 09/12/2024
|
||||||
|
|
||||||
|
### problem statement
|
||||||
|
|
||||||
|
Given an array `items` of $(price, beauty)$ tuples, answer each integer query of $queries$. The answer to some `query[i]` is the maximum beauty of an item with $price\leq$`items[i][0]`.
|
||||||
|
|
||||||
|
### understanding the problem
|
||||||
|
|
||||||
|
Focus on one aspect of the problem at a time. To answer a query, we need to have considered:
|
||||||
|
|
||||||
|
1. Items with a non-greater price
|
||||||
|
2. The beauty of all such items
|
||||||
|
|
||||||
|
Given some query, how can we _efficiently_ identify the “last” item with an acceptable price? Leverage the most common pre-processing algorithm: sorting. Subsequently, we can binary search `items` (keyed by price, of course) to identify all considerable items in $O(lg(n))$.Great. Now we need to find the item with the largest beauty. Naïvely considering all the element is a _correct_ approach—but is it correct? Considering our binary search $O(lg(n))$ and beauty search $O(n)$ across $\Theta(n)$ queries with `len(items)<=len(queries)`$\leq10^5$, an $O(n^2lg(n))$ approach is certainly unacceptable.
|
||||||
|
|
||||||
|
Consider alternative approaches to responding to our queries. It is clear that answering them in-order yields no benefit (i.e. we have to consider each item all over again, per query)—could we answer them in another order to save computations?
|
||||||
|
|
||||||
|
Visualizing our items from left-to-right, we's interested in both increasing beauty and prices. If we can scan our items left to right, we can certainly “accumulate” a running maximal beauty. We can leverage sorting once again to answer our queries left-to-right, then re-order them appropriately before returning a final answer. Sorting both `queries` and `items` with a linear scan will take $O(nlg(n))$ time, meeting the constraints.
|
||||||
|
|
||||||
|
### carrying out the plan
|
||||||
|
|
||||||
|
A few specifics need to be understood before coding up the approach:
|
||||||
|
|
||||||
|
- Re-ordering the queries: couple `query[i]` with `i`, then sort. When responding to queries in sorted order, we know where to place them in an output container—index `i`.
|
||||||
|
- The linear scan: accumulate a running maximal beauty, starting at index `0`. For some query `query`, we want to consider all items with price less than or equal to `query`. Therefore, loop until this condition is _violated_— the previous index will represent the last considered item.
|
||||||
|
- Edge cases: it's perfectly possible the last considered item is invalid (consider a query cheaper than the cheapest item). Return `0` as specified by the problem constraints.
|
||||||
|
|
||||||
|
### asymptotic complexity
|
||||||
|
|
||||||
|
Let `n=len(items)` and `m=len(queries)`. There may be more items than queries, or vice versa. Note that a “looser” upper bound can be found by analyzing the runtime in terms of $max\{n,m\}$.Time Complexity: $O(nlg(n)+mlg(m)+m)\in O(nlg(n)+mlg(m))$. An argument can be made that because `queries[i],items[i][{0,1}]`$\leq10^9$, radix sort can be leveraged to achieve a time complexity of $O(d \cdot (n + k + m + k))\in O(9\cdot (n + m))\in O(n+m)$.Space Complexity: $\Theta(1)$, considering that $O(m)$ space must be allocated. If `queries`/`items` cannot be modified in-place, increase the space complexity by $m$/$n$ respectively.
|
||||||
|
|
||||||
|
## [shortest subarray with or at least k ii](https://leetcode.com/problems/shortest-subarray-with-or-at-least-k-ii/description/) 09/11/2024
|
||||||
|
|
||||||
|
### problem statement
|
||||||
|
|
||||||
|
Given an array of non-negative integers $num$ and some $k$, find the length of the shortest non-empty subarray of nums such that its element-wise bitwise OR is greater than or equal to $k$—return -1 if no such array exists.
|
||||||
|
|
||||||
|
### developing an approach
|
||||||
|
|
||||||
|
Another convoluted, uninspired bitwise-oriented daily.
|
||||||
|
|
||||||
|
Anways, we're looking for a subarray that satisfies a condition. Considering all subarrays with `len(nums)`$\leq2\times10^5$ is impractical according to the common rule of $\approx10^8$ computations per second on modern CPUs.
|
||||||
|
|
||||||
|
Say we's building some array `xs`. Adding another element `x` to this sequence can only increase or element-wise bitwise OR. Of course, it makes sense to do this. However, consider `xs` after—it is certainly possible that including `x` finally got us to at least `k`. However, not all of the elements in the array are useful now; we should remove some.
|
||||||
|
|
||||||
|
Which do we remove? Certainly not any from the middle—we'd no longer be considering a subarray. We can only remove from the beginning.
|
||||||
|
|
||||||
|
Now, how many times do we remove? While the element-wise bitwise OR of `xs` is $\geq k$, we can naïvely remove from the start of `xs` to find the smallest subarray.Lastly, what' the state of `xs` after these removals? Now, we (may) have an answer and the element-wise bitwise OR of `xs` is guaranteed to be $\lt k$. Inductively, expand the array to search for a better answer.
|
||||||
|
|
||||||
|
This approach is generally called a variable-sized “sliding window”. Every element of `nums` is only added (considered in the element-wise bitwise OR) or removed (discard) one time, yielding an asymptotically linear time complexity. In other words, this is a realistic approach for our constraints.
|
||||||
|
|
||||||
|
### carrying out the plan
|
||||||
|
|
||||||
|
Plugging in our algorithm to my sliding window framework:
|
||||||
|
|
||||||
|
Done, right? No. TLE.
|
||||||
|
|
||||||
|
If you thought this solution would work, you move too fast. Consider _every_ aspect of an algorithm before implementing it. In this case, we (I) overlooked one core question:
|
||||||
|
|
||||||
|
1. _How do we maintain our element-wise bitwise OR_?
|
||||||
|
|
||||||
|
Calculating it by directly maintaining a window of length $n$ takes $n$ time—with a maximum window size of $n$, this solution is $O(n^2)$.Let's try again. Adding an element is simple—OR it to some cumulative value. Removing an element, not so much. Considering some $x$ to remove, we only unset one of its bits from our aggregated OR if it's the “last” one of these bits set across all numbers contributing to our aggregated value.Thus, to maintain our aggregate OR, we want to map bit “indices” to counts. A hashmap (dictionary) or static array will do just find. Adding/removing some $x$ will increment/decrement each the counter's bit count at its respective position. I like to be uselessly specific sometimes—choosing the latter approach, how big should our array be? As many bits as represented by the largest of $nums$—(or $k$ itself): $\lfloor \lg({max\{nums,k \})}\rfloor+1$
|
||||||
|
|
||||||
|
Note that:
|
||||||
|
|
||||||
|
Below we use the [change of base formula for logarithms](https://artofproblemsolving.com/wiki/index.php/Change_of_base_formula) because $log_2(x)$ is not available in python.It's certainly possible that $max\{nums, k\}=0$. To avoid the invalid calculation $log(0)$, take the larger of $1$ and this calculation. The number of digits will then (correctly) be $1$ in this special case.
|
||||||
|
|
||||||
|
### asymptotic complexity
|
||||||
|
|
||||||
|
Note that the size of the frequency map is bounded by $lg\_{2}({10^9})\approx30$.Space Complexity: Thus, the window uses $O(1)$ space.Time Complexity: $\Theta($`len(nums)`$)$ —every element of `nums` is considered at least once and takes $O(1)$ work each to find the element-wise bitwise OR.
|
||||||
|
|
||||||
|
## [minimum array end](https://leetcode.com/problems/minimum-array-end/) 09/10/2024
|
||||||
|
|
||||||
|
### problem statement
|
||||||
|
|
||||||
|
Given some $x$ and $n$, construct a strictly increasing array (say `nums` ) of length $n$ such that `nums[0] & nums[1] ... & nums[n - 1] == x` , where `&` denotes the bitwise AND operator.
|
||||||
|
|
||||||
|
Finally, return the minimum possible value of `nums[n - 1]`.
|
||||||
|
|
||||||
|
### understanding the problem
|
||||||
|
|
||||||
|
The main difficulty in this problem lies in understanding what is being asked (intentionally or not, the phrasing is terrible). Some initial notes:
|
||||||
|
|
||||||
|
- The final array need not be constructed
|
||||||
|
- If the element-wise bitwise AND of an array equals `x` if and only if each element has `x`'s bits set—and no other bit it set by all elements
|
||||||
|
- It makes sense to set `nums[0] == x` to ensure `nums[n - 1]` is minimal
|
||||||
|
|
||||||
|
### developing an approach
|
||||||
|
|
||||||
|
An inductive approach is helpful. Consider the natural question: “If I had correctly generated `nums[:i]`”, how could I find `nums[i]`? In other words, how can I find the next smallest number such that `nums` 's element-wise bitwise AND is still $x$?Hmm... this is tricky. Let's think of a similar problem to glean some insight: “Given some $x$, how can I find the next smallest number?”. The answer is, of course, add one (bear with me here).We also know that all of `nums[i]` must have at least $x$'s bits set. Therefore, we need to alter the unset bits of `nums[i]`.
|
||||||
|
|
||||||
|
The key insight of this problem is combining these two ideas to answer our question: _Just “add one” to `nums[i - 1]`'s unset bits_. Repeat this to find `nums[n - 1]`.
|
||||||
|
|
||||||
|
One last piece is missing—how do we know the element-wise bitwise AND is _exactly_ $x$? Because `nums[i > 0]` only sets $x$'s unset bits, every number in `nums` will have at least $x$'s bits set. Further, no other bits will be set because $x$ has them unset.
|
||||||
|
|
||||||
|
### carrying out the plan
|
||||||
|
|
||||||
|
Let's flesh out the remaining parts of the algorithm:
|
||||||
|
|
||||||
|
- `len(nums) == n` and we initialize `nums[0] == x`. So, we need to “add one” `n - 1` times
|
||||||
|
How do we carry out the additions? We could iterate $n - 1$ times and simulate them. However, we already know how we want to alter the unset bits of `nums[0]` inductively— (add one) _and_ how many times we want to do this ($n - 1$). Because we're adding one $n-1$ times to $x$'s unset bits (right to left, of course), we simply set its unset bits to those of $n - 1$.The implementation is relatively straightfoward. Traverse $x$ from least-to-most significant bit, setting its $i$th unset bit to $n - 1$'s $i$th bit. Use a bitwise mask `mask` to traverse $x$.
|
||||||
|
|
||||||
|
### asymptotic complexity
|
||||||
|
|
||||||
|
Space Complexity: $\Theta(1)$—a constant amount of numeric variables are allocated regardless of $n$ and $x$.Time Complexity: in the worst case, may need to traverse the entirety of $x$ to distribute every bit of $n - 1$ to $x$. This occurs if and only if $x$ is all ones ($\exists k\gt 0 : 2^k-1=x$)). $x$ and $n$ have $lg(x)$ and $lg(n)$ bits respectively, so the solution is $O(lg(x) + lg(n))\in O(log(xn))$. $1\leq x,n\leq 1e8$, so this runtime is bounded by $O(log(1e8^2))\in O(1)$.
|
||||||
227
src/content/posts/algorithms/models-of-production.mdx
Normal file
227
src/content/posts/algorithms/models-of-production.mdx
Normal file
|
|
@ -0,0 +1,227 @@
|
||||||
|
---
|
||||||
|
title: "models of production"
|
||||||
|
date: "2024-06-22"
|
||||||
|
useKatex: true
|
||||||
|
---
|
||||||
|
|
||||||
|
This post offers a basic introduction to the Solow, Romer, and Romer-Solow economic models, as taught by [Vladimir Smirnyagin](https://www.vladimirsmirnyagin.com/) and assisted by [Donghyun Suh](https://www.donghyunsuh.com/) in Intermediate Macroeconomics (ECON 3020) during the Spring semester of 2024 at the University of Virginia.
|
||||||
|
|
||||||
|
## solow
|
||||||
|
|
||||||
|
### introduction
|
||||||
|
|
||||||
|
The Solow Model is an economic model of production that incorporates the incorporates the idea of capital accumulation. Based on the [Cobb-Douglas production function](https://en.wikipedia.org/wiki/Cobb%E2%80%93Douglas_production_function), the Solow Model describes production as follows:
|
||||||
|
|
||||||
|
$$Y_t=F(K_t,L_t)=\bar{A}K_t^\alpha L_t^{1-\alpha}$$
|
||||||
|
|
||||||
|
With:
|
||||||
|
|
||||||
|
- $\bar{A}$: total factor productivity (TFP)
|
||||||
|
- $\alpha$: capital's share of output—usually $1/3$ based on [empirical data](https://arxiv.org/pdf/1105.2123)
|
||||||
|
|
||||||
|
In this simple model, the following statements describe the economy:
|
||||||
|
|
||||||
|
1. Output is either saved or consumed; in other words, savings equals investment
|
||||||
|
2. Capital accumulates according to investment $I_t$ and depreciation $\bar{d}$, beginning with $K_0$ (often called the <u>Law of Capital Motion</u>)
|
||||||
|
3. Labor $L_t$ is time-independent
|
||||||
|
4. A savings rate $\bar{s}$ describes the invested portion of total output
|
||||||
|
|
||||||
|
Including the production function, these four ideas encapsulate the Solow Model:
|
||||||
|
|
||||||
|
1. $C_t + I_t = Y_t$
|
||||||
|
2. $\Delta K_{t+1} = I_t - \bar{d} K_t$
|
||||||
|
3. $L_t = \bar{L}$
|
||||||
|
4. $I_t = \bar{s} Y_t$
|
||||||
|
|
||||||
|
### solving the model
|
||||||
|
|
||||||
|
Visualizing the model, namely output as a function of capital, provides helpful intuition before solving it.
|
||||||
|
|
||||||
|
Letting $(L_t,\alpha)=(\bar{L}, \frac{1}{3})$, it follows that:
|
||||||
|
|
||||||
|
$$Y_t=F(K_t,L_t)=\bar{A}K_t^{\frac{1}{3}} \bar{L}^{\frac{2}{3}}$$
|
||||||
|
|
||||||
|
Utilizing this simplification and its graphical representation below, output is clearly characterized by the cube root of capital:
|
||||||
|
|
||||||
|
$\bar{A}:$ 1.00 $\bar{d}:$ 0.50 $\bar{s}:$ 0.50 $\alpha:$ 0.33
|
||||||
|
|
||||||
|
When investment is completely disincentivized by depreciation (in other words, $sY_t=\bar{d}K_t$), the economy equilibrates at a so-called "steady-state" with equilibrium $(K_t,Y_t)=(K_t^*,Y_t^*)$.
|
||||||
|
|
||||||
|
Using this equilibrium condition, it follows that:
|
||||||
|
|
||||||
|
$$Y_t^*=\bar{A}{K_t^*}^\alpha\bar{L}^{1-\alpha}$$ $$\rightarrow \bar{d}K_t^*=\bar{s}\bar{A}{K_t^*}^\alpha\bar{L}^{1-\alpha}$$ $$\rightarrow K^*=\bar{L}(\frac{\bar{s}\bar{A}}{\bar{d}})^\frac{1}{1-\alpha}$$ $$\rightarrow Y^*=\bar{A}^\frac{1}{1-\alpha}(\frac{\bar{s}}{\bar{d}})^\frac{\alpha}{1-\alpha}\bar{L}$$
|
||||||
|
|
||||||
|
Thus, the equilibrium intensive form (output per worker) of both capital and output are summarized as follows:
|
||||||
|
|
||||||
|
$$(k^*,y^*)=(\frac{K^*}{\bar{L}},\frac{Y^*}{\bar{L}}) =((\frac{\bar{s}\bar{A}}{\bar{d}})^\frac{1}{1-\alpha}, \bar{A}^\frac{1}{1-\alpha}(\frac{\bar{s}}{\bar{d}})^\frac{\alpha}{1-\alpha})$$
|
||||||
|
|
||||||
|
### analysis
|
||||||
|
|
||||||
|
Using both mathematical intuition and manipulating the visualization above, we find that:
|
||||||
|
|
||||||
|
- $\bar{A}$ has a positive relationship with steady-state output
|
||||||
|
- Capital is influenced by workforce size, TFP, and savings rate
|
||||||
|
- Capital output share's $\alpha$ impact on output is twofold:
|
||||||
|
|
||||||
|
1. Directly through capital quantity
|
||||||
|
2. Indirectly through TFP
|
||||||
|
|
||||||
|
- Large deviations in capital from steady-state $K^*$ induce net investments of larger magnitude, leading to an accelerated reversion to the steady-state
|
||||||
|
- Economies stagnate at the steady-state $(K^*,Y^*)$—this model provides no avenues for long-run growth.
|
||||||
|
|
||||||
|
Lastly (and perhaps most importantly), exogenous parameters $\bar{s}, \bar{d}$, and $\bar{A}$ all have immense ramifications on economic status. For example, comparing the difference in country $C_1$'s output versus $C_2$'s using the Solow Model, we find that a difference in economic performance can only be explained by these factors:
|
||||||
|
|
||||||
|
$$\frac{Y_1}{Y_2}=\frac{\bar{A_1}}{\bar{A_2}}(\frac{\bar{s_1}}{\bar{s_2}})^\frac{\alpha}{1-\alpha}$$
|
||||||
|
|
||||||
|
We see that TFP is more important in explaining the differences in per-capital output ($\frac{1}{1-\alpha}>\frac{\alpha}{1-\alpha},\alpha\in[0,1)$). Notably, the Solow Model does not give any insights into how to alter the most important predictor of output, TFP.
|
||||||
|
|
||||||
|
## romer
|
||||||
|
|
||||||
|
### introduction
|
||||||
|
|
||||||
|
How, then, can we address these shortcomings?
|
||||||
|
|
||||||
|
The Romer Model provides an answer by both modeling ideas $A_t$ (analagous to TFP in the Solow model) endogenously and utilizing them to provide a justification for sustained long-run growth.
|
||||||
|
|
||||||
|
The Model divides the world into two parts:
|
||||||
|
|
||||||
|
- <u>Objects</u>: finite resources, like capital and labor in the Solow Model
|
||||||
|
- <u>Ideas</u>: infinite, [non-rivalrous](https://en.wikipedia.org/wiki/Rivalry_$economics$) items leveraged in production (note that ideas may be [excludable](blank), though)
|
||||||
|
|
||||||
|
The Romer Models' production function can be modelled as:
|
||||||
|
|
||||||
|
$$Y_t=F(A_t,L_{yt})=A_tL_{yt}$$
|
||||||
|
|
||||||
|
With:
|
||||||
|
|
||||||
|
- $A_t$: the amount of ideas $A$ in period $t$
|
||||||
|
- $L_{yt}$: the population working on production-facing (output-driving) tasks
|
||||||
|
|
||||||
|
Assuming $L_t=\bar{L}$ people work in the economy, a proportion $\bar{l}$ of the population focuses on making ideas: $L_{at}=\bar{l}\bar{L}\rightarrow L_{yt}=(1-\bar{l})\bar{L}$.
|
||||||
|
|
||||||
|
Further, this economy garners ideas with time at rate $\bar{z}$: the "speed of ideas". Now, we can describe the quantity of ideas tomorrow as function of those of today: <u>the Law of Ideal Motion</u> (I made that up).
|
||||||
|
|
||||||
|
$$A_{t+1}=A_t+\bar{z}A_tL_{at}\leftrightarrow\Delta A_{t+1}=\bar{z}A_tL_{at}$$
|
||||||
|
|
||||||
|
Analagously to capital in the solow model, ideas begin in the economy with some $\bar{A}_0\gt0$ and grow at an _exponential_ rate. At its core, this is because ideas are non-rivalrous; more ideas bring about more ideas.
|
||||||
|
|
||||||
|
Finally, we have a model:
|
||||||
|
|
||||||
|
1. $Y^*_t=A_tL_{yt}$
|
||||||
|
2. $\Delta A_{t+1} = \bar{z}A_tL_{at}$
|
||||||
|
3. $L_{yt}+L_{at}=\bar{L}$
|
||||||
|
4. $L_{at}=\bar{l}\bar{L}$
|
||||||
|
|
||||||
|
A visualization of the Romer Model shows that the economy grows exponentially—production knows no bounds ([_ceteris paribus_](https://en.wikipedia.org/wiki/Ceteris_paribus), of course). A graph of $log_{10}(Y_t)$ can be seen below:
|
||||||
|
|
||||||
|
$\bar{z}:$ 0.50 $\bar{L}:$ 505 $\bar{l}:$ 0.50 $\bar{A}_0:$ 500
|
||||||
|
|
||||||
|
Playing with the sliders, this graph may seem underwhelming in comparison to the Solow Model. However, on a logarithmic scale, small changes in the parameters lead to massive changes in the growth rate of ideas and economices:
|
||||||
|
|
||||||
|
### solving the model
|
||||||
|
|
||||||
|
To find the output in terms of exogenous parameters, first note that
|
||||||
|
|
||||||
|
$$L_t=\bar{L}\rightarrow L_{yt}=(1-\bar{l})\bar{L}$$
|
||||||
|
|
||||||
|
Now, all that remains is to find ideas $A_t$. It is assumed that ideas grow at some rate $g_A$:
|
||||||
|
|
||||||
|
$$A_t=A_0(1+g_A)^t$$
|
||||||
|
|
||||||
|
Using the growth rate formula, we find:
|
||||||
|
|
||||||
|
$$g_A=\frac{\Delta A_{t+1}-A_t}{A_t}=\frac{A_t+\bar{z}A_tL_{at}-A_t}{A_t}=\bar{z}L_{at}=\bar{z}\bar{l}\bar{L}$$
|
||||||
|
|
||||||
|
Thus, ideas $A_t=A_0(1+\bar{z}\bar{l}\bar{L})^t$. Finally, output can be solved the production function:
|
||||||
|
|
||||||
|
$$Y_t=A_t L_{yt}=A_0(1+\bar{z}\bar{l}\bar{L})^t(1-\bar{l})\bar{L}$$
|
||||||
|
|
||||||
|
### analysis
|
||||||
|
|
||||||
|
We see the Romer model exhibits long-run growth because ideas have non-diminishing returns due to their nonrival nature. In this model, capital and income eventually slow but ideas continue to yield increasing, unrestricted returns.
|
||||||
|
|
||||||
|
Further, all economy continually and perpetually grow along a constant "Balanced Growth Path" as previously defined by $Y_t$ as a function of the endogenous variables. This directly contrasts the Solow model, in which an economy converges to a steady-state via transition dynamics.
|
||||||
|
|
||||||
|
Changes in the growth rate of ideas, then, alter the growth rate of output itself—in this case, parameters $\bar{l}, \bar{z}$, and $\bar{L}$. This is best exemplified by comparing the growth rate before and and after a parameter changes. In the below example, a larger $\bar{l}$ initially drops output due to less workers being allocated to production. Soon after, though, output recovers along a "higher" Balanced Growth Path.
|
||||||
|
|
||||||
|
$\bar{l}_1:$ 0.50 $t_0:$ 50
|
||||||
|
|
||||||
|
Notably, while both the Romer and Solow Models help to analyze growth across countries, they both are unable to resolve one question: why can and do investment rates and TFP differ across contries? This is a more fundamental economic question involving culture, institutions, and social dynamics—one day I hope we'll have an answer.
|
||||||
|
|
||||||
|
## romer-solow
|
||||||
|
|
||||||
|
### introduction
|
||||||
|
|
||||||
|
While the Romer Model provides an avenue for long-run economic growth, it is anything but realistic—surely economies due not grow at an ever-increasing blistering rate into perpetuity. A model in which:
|
||||||
|
|
||||||
|
- Economies grow _faster_ the further _below_ they are from their balanced growth path
|
||||||
|
- Economies grow _slower_ the further _above_ they are from their balanced growth path
|
||||||
|
|
||||||
|
would certainly be more pragmatic. The Solow Model's capital dynamics do, in some sense, mirror part of this behavior with respect to the steady-state (output converges quicker/slower to the steady state the further/closer it is from equilibrium).
|
||||||
|
|
||||||
|
Combining the dynamics of the Romer Model's ideas and the Solow Model's capital stock could yield the desired result. Intuitively, incorporating capital into output via the Solow Model's production function, as well as including the Law of Capital Motion seems like one way to legitimately create this so-called "Romer-Solow" model:
|
||||||
|
|
||||||
|
1. $Y_t=A_t K_t^\alpha L_{yt}^{1-\alpha}$
|
||||||
|
2. $\Delta K_{t+1}=\bar{s}Y_t-\bar{d}K_t$
|
||||||
|
3. $\Delta A_{t+1} = \bar{z}A_tL_{at}$
|
||||||
|
4. $L_{yt}+L_{at}=\bar{L}$
|
||||||
|
5. $L_{at}=\bar{l}\bar{L}$
|
||||||
|
|
||||||
|
### solving the model
|
||||||
|
|
||||||
|
Based on the the motivations for creating this model, it is more useful to first analyze the growth rates of equilibrium long run output $Y_t^*$.
|
||||||
|
|
||||||
|
According to the production function, $g_Y=g_A+\alpha g_K+(1-\alpha)g_{L_y}$
|
||||||
|
|
||||||
|
From previous analysis it was found that $g_A=\bar{z}\bar{l}\bar{L}$.
|
||||||
|
|
||||||
|
Based on the Law of Capital Motion,
|
||||||
|
|
||||||
|
$$g_K=\frac{\Delta K_{t+1}}{K_t}=\bar{s}\frac{Y_t}{K_t}-\bar{d}$$
|
||||||
|
|
||||||
|
Because growth rates are constant on the Balanced Growth Path, $g_K$ must be constant as well. Thus, so is $\bar{s}\frac{Y_t}{K_t}-\bar{d}$; it must be that $g_K^*=g_Y^*$.
|
||||||
|
|
||||||
|
The model assumes population is constant, so $g_{\bar{L}}=0\rightarrow g_{\bar{L}_yt}=0$ as well.
|
||||||
|
|
||||||
|
Combining these terms, we find:
|
||||||
|
|
||||||
|
$$g_Y^*=\bar{z}\bar{l}\bar{L}+\alpha g_Y^*+(1-\alpha)\cdot 0\rightarrow$$
|
||||||
|
$$g_Y^*=\frac{\bar{z}\bar{l}\bar{L}}{1-\alpha}$$
|
||||||
|
|
||||||
|
Solving for $Y_t^*$ is trivial after discovering $g_K=g_Y$ must hold on a balanced growth path.
|
||||||
|
|
||||||
|
Invoking the <u>Law of Capital Motion</u> with magic chants,
|
||||||
|
|
||||||
|
$$g_K^*=\bar{s}\frac{Y_t^*}{K_t^*}-\bar{d}=g_Y^*\rightarrow K_t^*=\frac{\bar{s}Y_t^*}{g_Y^*+\bar{d}}$$
|
||||||
|
|
||||||
|
Isolating $Y_t^*$,
|
||||||
|
|
||||||
|
$$Y_t^*=A_t^* (\frac{\bar{s}Y_t^*}{g_Y^*+\bar{d}})^\alpha ({(1-\bar{l})\bar{L}})^{1-\alpha}$$
|
||||||
|
|
||||||
|
$$\rightarrow {Y_t^*}^{1-\alpha}=A_t^*(\frac{\bar{s}}{g_Y^*+\bar{d}})^\alpha({(1-\bar{l})\bar{L}})^{1-\alpha}$$
|
||||||
|
|
||||||
|
Plugging in the known expressions for $A_t^*$ and $g_Y^*$, a final expression for the Balanced Growth Path output as a function of the endogenous parameters and time is obtained:
|
||||||
|
|
||||||
|
$$Y_t^*={(A_0(1+\bar{z}\bar{l}\bar{L})^t})^\frac{1}{1-\alpha}(\frac{\bar{s}}{\frac{\bar{z}\bar{l}\bar{L}}{1-\alpha}+\bar{d}})^\frac{\alpha}{1-\alpha}(1-\bar{l})\bar{L}$$
|
||||||
|
|
||||||
|
### analysis
|
||||||
|
|
||||||
|
First looking at the growth rate of output, $g*Y^*=\frac{\bar{z}\bar{l}\bar{L}}{1-\alpha}$, idea-driving factors and an increased allocation of labor to output increase the equilibrium Balanced Growth Path—the _level* of long-run growth. Thus, this model captures the influences of both capital and ideas on economic growth.
|
||||||
|
|
||||||
|
Looking at $Y_t^*$, ideas have both a direct and indirect effect on output. Firstly, ideas raise output because they increase productivity (directly); second, with the introduction of capital stock, ideas also increase capital, in turn increasing output further (indirectly). Mathematically, this is evident in both instances of $g_A^*$ in the formula for output $Y_t^*$—note that $\frac{1}{1-\alpha},\frac{\alpha}{1-\alpha}>0$ for any $\alpha\in(0,1)$, so $\frac{d}{dg_A^*}Y_t^*>0$.
|
||||||
|
|
||||||
|
Expectedly, output has a positive relationship with the savings rate and a negative relationship with the depreciation rate.
|
||||||
|
|
||||||
|
Using the visualization below, we see a growth pattern similar to that of the Romer Model. However, the Romer-Solow economy indeed grows at a faster rate than the Romer model—I had to cap $\bar{L}$ at $400$ and $\alpha$ at $0.4$ because output would be _too large_ for JavaScript to contain in a number (the graph would disappear).
|
||||||
|
|
||||||
|
$\bar{z}:$ 0.50 $A_0:$ 500 $\bar{d}:$ 0.50 $\bar{s}:$ 0.50 $\alpha:$ 0.33 $\bar{l}:$ 0.50 $\bar{L}:$ 200
|
||||||
|
|
||||||
|
Playing with the parameters, the previous mathematical findings are validated. For example, because $g_Y^*=\frac{\bar{z}\bar{l}\bar{L}}{1-\alpha}$, only changes in parameters $\alpha,\bar{z},\bar{l}$, and $\bar{L}$ affect the growth rate of output, manifesting as the y-axis scaling up/down on a ratio scale.
|
||||||
|
|
||||||
|
However, do economics grow _faster_/_slower_ the further _below_/_above_ they are from their Balanced Growth Path, as initially desired? While this can be mathematically proven (of course), sometimes a visualization helps.
|
||||||
|
|
||||||
|
The graph below illustrates the transition dynamics of Romer-Solow Model. Namely, $(\bar{z}, \bar{l}, \bar{L}, \alpha)=(0.5, 0.5, 100, 0.33)\forall t<t_0$, then update to the slider values when $t>t_0$.
|
||||||
|
|
||||||
|
$\bar{z}_0:$ 0.50 $\alpha_0:$ 0.33 $\bar{L}_0:$ 100 $\bar{l}_0:$ 0.50 $t_0:$ 50
|
||||||
|
|
||||||
|
Finally, it is clear that economies converge to their Balanced Growth Path as desired—something slightly more convoluted to prove from the complex expression for $Y^*$ derived earlier. For example, with an increase in $\alpha_0$, output grows at an increasing rate after the change, then increases at a decreasing rate as it converges to the new higher Balanced Growth Path. Increasing parameters $\bar{z},\bar{l},\bar{L}$ yield similar results, although the changes are visually less obvious.
|
||||||
25
src/content/posts/algorithms/practice-makes-perfect.mdx
Normal file
25
src/content/posts/algorithms/practice-makes-perfect.mdx
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
---
|
||||||
|
title: "practice makes perfect"
|
||||||
|
date: "2025-05-07"
|
||||||
|
useKatex: true
|
||||||
|
---
|
||||||
|
|
||||||
|
Today I improved my implementation skills with [Codeforces Round 874 Div. 3 Problem G](https://codeforces.com/contest/1833/problem/G). Despite not solving the problem after a full 45 minutes, I came across to the following realizations:
|
||||||
|
|
||||||
|
1. Don't jump into coding. _Fully_ flesh out your implementation in your head before you begin. This is tempting to do, especially in a "competitive" environment. I tend to do this to avoid thinking about troublesome aspects of the problem that I _know_ I'll have to face later. Going into problems with a plan makes things much easier when coding but much harder up front. It is easy (for me) to get lost in the black-boxing four layers deep. Write it out, visualize it, and practice practice practice.
|
||||||
|
|
||||||
|
> Considering my solution would've led to me uncover my core misinterpretation of the problem: **the tree does not have to binary**. I developed a solution for binary trees but the greedy logic cannot be extended to trees.
|
||||||
|
|
||||||
|
2. Complex problems are, well, hard. You _have_ to practice to internalize patterns so you can focus on the _crux_ of the problem.
|
||||||
|
|
||||||
|
> I spent 10 minutes debugging retrieving the leaves of a tree before even beginning to code the actual algorithm. **1800 is out of my skill range** (for now!).
|
||||||
|
|
||||||
|
3. **Do not let a single thought/assertion/fact go unturned**. I made a litany of erroneous assertions in my time thinking about this problem, some of which include:
|
||||||
|
|
||||||
|
- The tree has to be binary (it does not).
|
||||||
|
- I can gather the leaves in arbitrary order (once again, this doesn't generalize to trees).
|
||||||
|
- Ignore all cuts between identical nodes—it's fine! (I didn't know why this was the case)
|
||||||
|
- A set shouldn't be needed to track visited nodes in a tree— slap it on anyway (this was superfluous and should've immediately set off red flags that my parent-ignoring policy in my BFS was wrong).
|
||||||
|
- When processing a node in the "child-parent-child" pattern, just pop off the next node from the queue (within binary/n-ary trees, this is wrong—the leaves are gathered by _level_, so the next node in the queue is not guaranteed to be the current's sibling).
|
||||||
|
|
||||||
|
5. Just because the solution passes the test cases does not mean it is right. This specifically applies to problems near/outside your skill range—create your own test cases.
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
---
|
||||||
|
title: "the problem with cs curricula"
|
||||||
|
useKatex: true
|
||||||
|
---
|
||||||
|
|
||||||
|
Edsger Wybe Dijkstra's ["On the cruelty of really teaching computing science"](https://www.cs.utexas.edu/~EWD/transcriptions/EWD10xx/EWD1036.html) perfectly sums up my gripes with how Computer Science is taught at a university level (at my school, at least).
|
||||||
|
|
||||||
|
Succinctly put, my time learning computer science at my unnamed college exemplified nearly everything he (and I) believe a CS curriculum should _not do_:
|
||||||
|
|
||||||
|
- Ignore the existential questions about computer programs (what are they? why do they exist? can they want? what should they be used for?)
|
||||||
|
- Ignore the notion of program behavior, i.e. provability (this is set aside as an advanced core class, counterintuitively reserved for a third or fourth year).
|
||||||
|
- Excessively simplify and frame new technologies with analogy, effectively instilling maladaptive thinking patterns that fail to extend to more novel problems
|
||||||
|
- Give up on doing the inverse of the above because it is too hard for young students.
|
||||||
|
|
||||||
|
Walking out of my third year, I left with the sad realization that I got by the majority of my classes by only understanding things as they pertained to assignments and exams. **And by "got by", I mean straight A's**.
|
||||||
|
|
||||||
|
I always knew something was wrong with how my school taught computer science (despite it being the biggest major as of 2025). As of late, though, I realized the gargantuan amount of damage it caused to my reasoning abilities. Damage that I have to reverse by, essentially, doing everything all over again.
|
||||||
|
|
||||||
|
My [competitive programming journey](https://barrettruth.com/posts/algorithms/competitive-programming-log.html) epitomizes this point: to this day I struggle with reasoning, argumentation, and understanding program behavior. I know how a segment tree works but can't formalize the constraints of a problem. I can do dynamic programming on trees but I can barely manipulate and work with primitive mathematical concepts such as the \$gcd\$ function. I cannot think of a more useless skillset.
|
||||||
|
|
||||||
|
Nearly all of this is my fault. However, _it should not be possible for this to happen in a computer science curriculum_. In other words, Djikstra is right.
|
||||||
8
src/content/posts/operating-systems/building-an-os.mdx
Normal file
8
src/content/posts/operating-systems/building-an-os.mdx
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
---
|
||||||
|
title: "building an os"
|
||||||
|
date: "2025-04-15"
|
||||||
|
---
|
||||||
|
|
||||||
|
## introduction
|
||||||
|
|
||||||
|
wip
|
||||||
30
src/content/posts/software/designing-this-website.mdx
Normal file
30
src/content/posts/software/designing-this-website.mdx
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
---
|
||||||
|
title: "designing this website"
|
||||||
|
date: "2024-06-18"
|
||||||
|
---
|
||||||
|
|
||||||
|
## HTML, JavaScript, and CSS
|
||||||
|
|
||||||
|
That's all there is to it.
|
||||||
|
|
||||||
|
I thought about using the following frameworks:
|
||||||
|
|
||||||
|
1. [React.js](https://react.dev/)
|
||||||
|
2. [Next.js](https://nextjs.org/)
|
||||||
|
3. [Hugo](https://gohugo.io/)
|
||||||
|
4. [Astro](https://astro.build/)
|
||||||
|
|
||||||
|
But I did not actually _need_ any of them to make this site look decent.
|
||||||
|
|
||||||
|
## What I've Learned
|
||||||
|
|
||||||
|
Of course, most people build simple websites like these to learn a new technology or framework, not to use an optimal tool. That's actually why I [hosted this website on AWS](/posts/software/from-github-pages-to-aws.html).
|
||||||
|
|
||||||
|
Building this website with truly bare-bones technologies has made me appreciate _why_ these web frameworks have emerged.
|
||||||
|
|
||||||
|
- Writing JavaScript to manipulate the DOM works just fine but lacks the readability and composability that many JavaScript frameworks bring to the table.
|
||||||
|
- Re-using code is odd. For example, I created a "common.js" with general utilities—there is zero indication (both to me and my language servers) that these functions are exposed to other scripts included by the same HTML file.
|
||||||
|
- JSX is great. Dynamically inserting HTML as raw strings or writing them line by line with the DOM is a pain, and a verbose one at that.
|
||||||
|
- Similarly, CSS styling (inline/stylesheet) works at the small scale. However, with styles being completely divorced from the HTML itself, much is left to be desired.
|
||||||
|
- Reusing HTML, styles, and JavaScript feels extremely fragile. Innovative type-safe, optimized, and composable solutions definitely have their place in the web.
|
||||||
|
- **You can be efficient with HTML, JS, and CSS.** My iteration speed on on this site versus other React.js/MDX blogs I have worked on is the same if not faster. While this may be a testament to my lack of JavaScript experience, I think people conclude too early that their task is beyond the technologies that form the foundation of the web today.
|
||||||
53
src/content/posts/software/from-github-pages-to-aws.mdx
Normal file
53
src/content/posts/software/from-github-pages-to-aws.mdx
Normal file
|
|
@ -0,0 +1,53 @@
|
||||||
|
---
|
||||||
|
title: "from github pages to AWS"
|
||||||
|
date: "2024-06-15"
|
||||||
|
---
|
||||||
|
|
||||||
|
## pages begone
|
||||||
|
|
||||||
|
Though GitHub Pages may work for hosting your small, internal, static site, I don't think Pages is the right choice for _anyone_:
|
||||||
|
|
||||||
|
1. Bandwidth caps: scale your software by default
|
||||||
|
2. Limited SEO control: not a downside if you don't want want traffic...
|
||||||
|
3. Static & client-side only: keep your options open
|
||||||
|
|
||||||
|
## why aws?
|
||||||
|
|
||||||
|
I used pages before because I had little knowledge of cloud computing.
|
||||||
|
|
||||||
|
This is not a justification—if you are a software developer, learn it.
|
||||||
|
|
||||||
|
Prior to hosting this site, I developed and hosted an internal application with Google Cloud while working at [nth Venture](https://nthventure.com). Getting a single Compute Engine up and running made me step away from cloud for the entire next year.
|
||||||
|
|
||||||
|
AWS is:
|
||||||
|
|
||||||
|
1. Industry standard: not an actual reason but it convinced me nonetheless
|
||||||
|
2. Secure: soon to be used by VISA, which holds security to a nearly stupid extent (seriously, I can't even clone a repository)
|
||||||
|
3. Well-documented: everything in the documentation worked _first try_. This is extremely rare in software, so compliments to Chef Bezos.
|
||||||
|
|
||||||
|
## the setup
|
||||||
|
|
||||||
|
This website is pure HTML, CSS, and JavaScript.
|
||||||
|
|
||||||
|
AWS-wise, I use:
|
||||||
|
|
||||||
|
- S3, to host the content (static for now)
|
||||||
|
- CloudFront, to serve and cache said content
|
||||||
|
- Route53, to manage routing
|
||||||
|
- GoDaddy, to reserve [barrettruth.com](https://barrettruth.com)
|
||||||
|
|
||||||
|
A user request can be modelled as follows:
|
||||||
|
|
||||||
|
1. A user accesses the website by typing barrettruth.com in their browser.
|
||||||
|
2. GoDaddy's DNS servers are queried, which translating the domain name to my Route53's IP address.
|
||||||
|
3. Route53 then routes to the request to my CloudFront distribution associated with my S3 bucket.
|
||||||
|
4. CloudFront checks its edge caches for the requested content. If the content is stale or not cached, CloudFront fetches the content from S3. Otherwise, it uses the cached content from an edge server.
|
||||||
|
5. CloudFront returns the content to the user's browser.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
## difficulties
|
||||||
|
|
||||||
|
The hardest part of hosting this website was interfacing with GoDaddy.
|
||||||
|
|
||||||
|
For example, configuring SSL certificates with GoDaddy is needlessly challenging. Follow [AWS's guide](https://docs.aws.amazon.com/amplify/latest/userguide/to-add-a-custom-domain-managed-by-godaddy.html) if you really want to. Otherwise, [configure your GoDaddy nameservers](https://www.godaddy.com/help/edit-my-domain-nameservers-664) and point them to your own DNS service (like Route53) instead.
|
||||||
38
src/content/posts/software/hosting-a-git-server.mdx
Normal file
38
src/content/posts/software/hosting-a-git-server.mdx
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
---
|
||||||
|
title: "hosting a git server"
|
||||||
|
date: "2025-05-07"
|
||||||
|
---
|
||||||
|
|
||||||
|
## why
|
||||||
|
|
||||||
|
No reason. Perhaps to host personal files in the future. AWS's micro free tier is great, too.
|
||||||
|
|
||||||
|
## what
|
||||||
|
|
||||||
|
- Write my own git web ui
|
||||||
|
- Support clones from my own website
|
||||||
|
- Host private files on my git ui
|
||||||
|
|
||||||
|
## the process
|
||||||
|
|
||||||
|
I detail self-hosting a git server on an AWS t2.micro instance ("free" for 1 year) as of May 2025. [Git's instructions](https://git-scm.com/book/en/v2/Git-on-the-Server-The-Protocols) were vastly outdated so hopefully this saves a lucky reader some time.
|
||||||
|
|
||||||
|
2. Create the ec2 instance with setup wizard and add \{in,out\}bound rules for \{SSH,HTTP,HTTPS,your ip\} in the wizard security group.
|
||||||
|
3. Use an elastic ip (free) to address public ip reassigning—this is a bother when ssh'ing (new verb?) into the box locally and/or configuring an Apache HTTP server.
|
||||||
|
4. Understand bare git repositories and the ssh protocol.
|
||||||
|
5. Configure an keypair and ssh in (the official instructions are fine for this). I moved it to `~/.ssh` and added an alias in `~/.ssh/config` for convenience. Clone a repo on the server to test.
|
||||||
|
6. Set up a git daemon for `git://` protocol cloning at your own risk.
|
||||||
|
7. Set up an Apache HTTPD server.
|
||||||
|
8. Configure file permissions for the new user:
|
||||||
|
1. `sudo chown -R git:git /srv/git`
|
||||||
|
2. `sudo chgrp -R apache /srv/git`
|
||||||
|
9. To deal with "dubious ownership" issues when cloning with HTTPS, I needed to add **exactly** the following configuration to `/etc/gitconfig`. _No group permission finagling will work_! Git only allows cloning repositories that are owned by the user. If you wish to clone via SSH with, say, user A, this same user must also be employed by your HTTP server to clone the files (customize HTTPD/whatever you're using accordingly).
|
||||||
|
|
||||||
|
10. Security-wise, set up TLS/HTTPS with [Let's Encrypt](https://letsencrypt.org/). Further, only allow authorized people to actually _push_ to the server. The following is my HTTPD configuration file `/etc/apache/conf.d/git-server.conf` hosting the web ui at the root and clone urls at `/git`:
|
||||||
|
|
||||||
|
11. There are a variety of choices for web ui, including [cgit](https://git.zx2c4.com/cgit/), [gitweb](https://git-scm.com/docs/gitweb) (I do not recommend this—the scripts are ancient and require manual tuning), and some even heavier options that allow for further customization. I am not a fan of viewing code on the web, so you cannot in [my custom ui](https://git.barrettruth.com). I spin up a simple python server to walk the projects in `/srv/git` and configured a systemd service to run it in the ec2 box:
|
||||||
|
|
||||||
|
## lessons
|
||||||
|
|
||||||
|
- **It feels great to do things yourself**: I used GPT-4o for linux server command help, that was about it
|
||||||
|
- **Always ask "what is this?" before using something**: this would've saved me hours of realizing a 12 year old perl script should not have been running my git ui.
|
||||||
38
src/content/posts/software/my-cp-setup.mdx
Normal file
38
src/content/posts/software/my-cp-setup.mdx
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
---
|
||||||
|
title: "my cp setup"
|
||||||
|
date: "2025-04-15"
|
||||||
|
---
|
||||||
|
|
||||||
|
Source code [here](https://github.com/barrett-ruth/dots/blob/main/nvim/lua/cp.lua).
|
||||||
|
|
||||||
|
## my goals
|
||||||
|
|
||||||
|
I wanted the following features in my competitive programming (cp) setup:
|
||||||
|
|
||||||
|
- Flexibility: support various environments (codeforces, USACO, cses, etc.) with ease
|
||||||
|
- Speed: instantaneous, non-blocking running/debugging; automatic environment configuration and easy code testing
|
||||||
|
- Editor-Agnostic: while I do provide first-in-class NeoVim integration for my setup, it should be easily portable to _any_ os/editor
|
||||||
|
|
||||||
|
## the solution
|
||||||
|
|
||||||
|
Some (POSIX-compliant, of course) scripts and a `makefile` are more than enough. I created the following intuitive way to interact with my CP setup:
|
||||||
|
|
||||||
|
1. `make setup`: populate the environment with configurations in `~/.config/cp-template` for `clang-format` and `clangd`
|
||||||
|
2. `make run file`
|
||||||
|
3. `make debug file`
|
||||||
|
4. `make clean`
|
||||||
|
|
||||||
|
That's it. The `makefile` relies on some scripts that compile code and run the corresponding executables.
|
||||||
|
|
||||||
|
## neovim integration
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Leveraging [LuaSnip](https://github.com/L3MON4D3/LuaSnip), a custom `CP` user command, and some scripting for window management and asynchronous jobs, I'm able to:
|
||||||
|
|
||||||
|
- Asynchronously format, run, and debug code (`:h vim.system`)
|
||||||
|
- Use a three-window (input, output, and code) view
|
||||||
|
- Toggle between problems instantly (yes, the windows update)
|
||||||
|
- Automatically populate my coding buffers with competition-specific templates (i.e. USACO, CSES, etc.)
|
||||||
|
- Run the code from the CLI in less than a second
|
||||||
|
- Easily tweak and change the setup—there's absolutely nothing fancy.
|
||||||
40
src/layouts/BaseLayout.astro
Normal file
40
src/layouts/BaseLayout.astro
Normal file
|
|
@ -0,0 +1,40 @@
|
||||||
|
---
|
||||||
|
import Header from '../components/Header.astro';
|
||||||
|
import Footer from '../components/Footer.astro';
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
title: string;
|
||||||
|
description?: string;
|
||||||
|
useKatex?: boolean;
|
||||||
|
bodyClass?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const {
|
||||||
|
title,
|
||||||
|
description = "Barrett Ruth's personal website",
|
||||||
|
useKatex = false,
|
||||||
|
bodyClass = "graph-background"
|
||||||
|
} = Astro.props;
|
||||||
|
---
|
||||||
|
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8" />
|
||||||
|
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
<meta name="description" content={description} />
|
||||||
|
<link rel="icon" type="image/webp" href="/logo.webp" />
|
||||||
|
<link rel="stylesheet" href="/styles/common.css" />
|
||||||
|
<title>{title}</title>
|
||||||
|
<slot name="head" />
|
||||||
|
</head>
|
||||||
|
<body class={bodyClass}>
|
||||||
|
<Header />
|
||||||
|
<main class="main">
|
||||||
|
<slot />
|
||||||
|
</main>
|
||||||
|
<Footer />
|
||||||
|
<slot name="scripts" />
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
62
src/layouts/PostLayout.astro
Normal file
62
src/layouts/PostLayout.astro
Normal file
|
|
@ -0,0 +1,62 @@
|
||||||
|
---
|
||||||
|
import BaseLayout from './BaseLayout.astro';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
frontmatter: {
|
||||||
|
title: string;
|
||||||
|
description?: string;
|
||||||
|
date?: string;
|
||||||
|
useKatex?: boolean;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const { frontmatter, post } = Astro.props;
|
||||||
|
const {
|
||||||
|
title,
|
||||||
|
description,
|
||||||
|
useKatex = false,
|
||||||
|
|
||||||
|
} = frontmatter;
|
||||||
|
|
||||||
|
const filePath = post?.id || '';
|
||||||
|
const category = filePath.split('/')[0];
|
||||||
|
|
||||||
|
function getTopicColor(category: string) {
|
||||||
|
switch (category) {
|
||||||
|
case 'algorithms':
|
||||||
|
return '#d50032';
|
||||||
|
case 'software':
|
||||||
|
return '#0073e6';
|
||||||
|
case 'operating-systems':
|
||||||
|
return '#009975';
|
||||||
|
case 'meditations':
|
||||||
|
return '#6a0dad';
|
||||||
|
default:
|
||||||
|
return '#000000';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const topicColor = getTopicColor(category);
|
||||||
|
---
|
||||||
|
|
||||||
|
<BaseLayout title={title} description={description} useKatex={useKatex}>
|
||||||
|
<slot name="head" slot="head">
|
||||||
|
<link rel="stylesheet" href="/styles/post.css" />
|
||||||
|
<link rel="stylesheet" href="/styles/mdx.css" />
|
||||||
|
{useKatex && (
|
||||||
|
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/katex@0.16.22/dist/katex.min.css" integrity="sha384-5TcZemv2l/9On385z///+d7MSYlvIEw9FuZTIdZ14vJLqWphw7e7ZPuOiCHJcFCP" crossorigin="anonymous" />
|
||||||
|
)}
|
||||||
|
</slot>
|
||||||
|
|
||||||
|
<div class="post-container" style={`--topic-color: ${topicColor};`}>
|
||||||
|
<header class="post-header">
|
||||||
|
<h1 class="post-title">{title}</h1>
|
||||||
|
<!-- Date removed from title as requested -->
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<article class="post-article">
|
||||||
|
<slot />
|
||||||
|
</article>
|
||||||
|
</div>
|
||||||
|
</BaseLayout>
|
||||||
100
src/pages/[category].astro
Normal file
100
src/pages/[category].astro
Normal file
|
|
@ -0,0 +1,100 @@
|
||||||
|
---
|
||||||
|
import { getCollection } from 'astro:content';
|
||||||
|
import BaseLayout from '../layouts/BaseLayout.astro';
|
||||||
|
|
||||||
|
export async function getStaticPaths() {
|
||||||
|
const categories = ['algorithms', 'software', 'operating-systems', 'meditations'];
|
||||||
|
|
||||||
|
return categories.map(category => {
|
||||||
|
return {
|
||||||
|
params: { category },
|
||||||
|
props: { category },
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const { category } = Astro.props;
|
||||||
|
const posts = await getCollection('posts', (post) => post.data.category === category);
|
||||||
|
|
||||||
|
posts.sort((a, b) => {
|
||||||
|
const dateA = a.data.date ? new Date(a.data.date) : new Date(0);
|
||||||
|
const dateB = b.data.date ? new Date(b.data.date) : new Date(0);
|
||||||
|
return dateB.getTime() - dateA.getTime();
|
||||||
|
});
|
||||||
|
|
||||||
|
const capitalizedCategory = category.charAt(0).toUpperCase() + category.slice(1);
|
||||||
|
---
|
||||||
|
|
||||||
|
<BaseLayout title={capitalizedCategory}>
|
||||||
|
<div class="content">
|
||||||
|
<h1>{capitalizedCategory}</h1>
|
||||||
|
<div class="posts">
|
||||||
|
{posts.map(post => (
|
||||||
|
<div class="post">
|
||||||
|
<a href={`/posts/${category}/${post.slug}`}>
|
||||||
|
{post.data.title}
|
||||||
|
</a>
|
||||||
|
{post.data.date && (
|
||||||
|
<time datetime={post.data.date}>
|
||||||
|
{new Date(post.data.date).toLocaleDateString('en-US', {
|
||||||
|
year: 'numeric',
|
||||||
|
month: 'long',
|
||||||
|
day: 'numeric'
|
||||||
|
})}
|
||||||
|
</time>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</BaseLayout>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
.content {
|
||||||
|
max-width: 800px;
|
||||||
|
margin: 0 auto;
|
||||||
|
padding: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1 {
|
||||||
|
margin-bottom: 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.posts {
|
||||||
|
list-style-type: none;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.post {
|
||||||
|
margin-bottom: 20px;
|
||||||
|
padding-bottom: 15px;
|
||||||
|
border-bottom: 1px solid #eee;
|
||||||
|
}
|
||||||
|
|
||||||
|
.post a {
|
||||||
|
display: block;
|
||||||
|
font-size: 1.2em;
|
||||||
|
text-decoration: underline;
|
||||||
|
color: var(--topic-color, inherit);
|
||||||
|
}
|
||||||
|
|
||||||
|
time {
|
||||||
|
display: block;
|
||||||
|
font-size: 0.9em;
|
||||||
|
color: #555;
|
||||||
|
margin-top: 5px;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
|
||||||
|
<script define:vars={{ category }}>
|
||||||
|
import { getTopicColor } from '../utils/colors.js';
|
||||||
|
|
||||||
|
document.addEventListener('DOMContentLoaded', function() {
|
||||||
|
document.documentElement.style.setProperty(
|
||||||
|
'--topic-color',
|
||||||
|
getTopicColor(category)
|
||||||
|
);
|
||||||
|
|
||||||
|
window.getTopicColor = getTopicColor;
|
||||||
|
});
|
||||||
|
</script>
|
||||||
45
src/pages/about.astro
Normal file
45
src/pages/about.astro
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
---
|
||||||
|
import BaseLayout from "../layouts/BaseLayout.astro";
|
||||||
|
---
|
||||||
|
|
||||||
|
<BaseLayout title="Barrett Ruth">
|
||||||
|
<div class="post-container">
|
||||||
|
<header class="post-header">
|
||||||
|
<h1 class="post-title">Barrett Ruth</h1>
|
||||||
|
</header>
|
||||||
|
<article class="post-article">
|
||||||
|
<p>
|
||||||
|
I am a software developer studying computer science at the
|
||||||
|
University of Virginia.
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
I began working as a software engineer part-time with
|
||||||
|
<a target="blank" href="https://gotransverse.com/">GoTransverse</a>
|
||||||
|
in high school. After developing an interest in the
|
||||||
|
financial/venture capital world, I transitioned to
|
||||||
|
<a target="blank" href="https://www.nthventure.com/">Nth Venture</a>
|
||||||
|
in the spring of my second year. I worked at
|
||||||
|
<a target="blank" href="https://usa.visa.com/">VISA</a> and
|
||||||
|
<a href="https://trbcap.com" target="_blank">TRB Capital Management</a>
|
||||||
|
during the summer of 2024. Luckily enough, I'll be joining
|
||||||
|
<a href="https://drw.com" target="_blank">DRW</a> and
|
||||||
|
<a href="https://ramp.com" target="_blank">Ramp</a> in the summer
|
||||||
|
and spring of 2025.
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
I've a developing interest in high-performance computing,
|
||||||
|
quantitative finance, and open-source software. I am also a
|
||||||
|
passionate contributor to the (Neo)Vim ecosystem and beyond.
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
You can see my related contributions on
|
||||||
|
<a target="blank" href="https://github.com/barrett-ruth">GitHub</a>.
|
||||||
|
</p>
|
||||||
|
</article>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<slot name="head" slot="head">
|
||||||
|
<link rel="stylesheet" href="/styles/post.css" />
|
||||||
|
</slot>
|
||||||
|
</BaseLayout>
|
||||||
|
|
||||||
183
src/pages/index.astro
Normal file
183
src/pages/index.astro
Normal file
|
|
@ -0,0 +1,183 @@
|
||||||
|
---
|
||||||
|
import BaseLayout from "../layouts/BaseLayout.astro";
|
||||||
|
import { getCollection } from "astro:content";
|
||||||
|
|
||||||
|
const title = "Barrett Ruth";
|
||||||
|
|
||||||
|
const allPosts = await getCollection("posts");
|
||||||
|
const postsByCategory = allPosts.reduce((acc, post) => {
|
||||||
|
const category = post.id.split('/')[0];
|
||||||
|
if (!acc[category]) acc[category] = [];
|
||||||
|
acc[category].push(post);
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
Object.keys(postsByCategory).forEach(category => {
|
||||||
|
postsByCategory[category].sort((a, b) => {
|
||||||
|
const dateA = a.data.date ? new Date(a.data.date) : new Date(0);
|
||||||
|
const dateB = b.data.date ? new Date(b.data.date) : new Date(0);
|
||||||
|
return dateB.getTime() - dateA.getTime();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
---
|
||||||
|
|
||||||
|
<BaseLayout title={title}>
|
||||||
|
<slot name="head" slot="head">
|
||||||
|
<link rel="stylesheet" href="/styles/index.css" />
|
||||||
|
</slot>
|
||||||
|
<div class="content">
|
||||||
|
<ul class="topics">
|
||||||
|
<li class="topic algorithms">
|
||||||
|
<a href="#algorithms" data-topic="algorithms">algorithms</a>
|
||||||
|
</li>
|
||||||
|
<li class="topic software">
|
||||||
|
<a href="#software" data-topic="software">software</a>
|
||||||
|
</li>
|
||||||
|
<li class="topic meditations">
|
||||||
|
<a href="#meditations" data-topic="meditations">meditations</a>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
<div class="posts" id="posts"></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script slot="scripts" define:vars={{ postsByCategory }}>
|
||||||
|
function getTopicColor(topicName) {
|
||||||
|
switch (topicName) {
|
||||||
|
case "software":
|
||||||
|
return "#0073e6";
|
||||||
|
case "operating-systems":
|
||||||
|
return "#009975";
|
||||||
|
case "algorithms":
|
||||||
|
return "#d50032";
|
||||||
|
case "meditations":
|
||||||
|
return "#6a0dad";
|
||||||
|
default:
|
||||||
|
return "#000000";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const TERMINAL_PROMPT = "barrett@ruth:~$ ";
|
||||||
|
let typing = false;
|
||||||
|
let clearing = false;
|
||||||
|
|
||||||
|
function clearPrompt(delay, callback) {
|
||||||
|
if (clearing) return;
|
||||||
|
clearing = true;
|
||||||
|
|
||||||
|
const terminalPrompt = document.querySelector(".terminal-prompt");
|
||||||
|
if (!terminalPrompt) {
|
||||||
|
clearing = false;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const topicLength = terminalPrompt.innerHTML.length - TERMINAL_PROMPT.length;
|
||||||
|
let i = 0;
|
||||||
|
|
||||||
|
function removeChar() {
|
||||||
|
if (i++ < topicLength) {
|
||||||
|
terminalPrompt.textContent = terminalPrompt.textContent.slice(0, -1);
|
||||||
|
setTimeout(removeChar, delay / topicLength);
|
||||||
|
} else {
|
||||||
|
i = 0;
|
||||||
|
clearing = false;
|
||||||
|
callback && callback();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
removeChar();
|
||||||
|
}
|
||||||
|
|
||||||
|
function typechars(e) {
|
||||||
|
e.preventDefault();
|
||||||
|
|
||||||
|
const topicElement = e.target;
|
||||||
|
if (topicElement.classList.contains("active")) return;
|
||||||
|
if (typing) return;
|
||||||
|
typing = true;
|
||||||
|
|
||||||
|
const topic = topicElement.dataset.topic;
|
||||||
|
const terminalText = ` /${topic.toLowerCase()}`;
|
||||||
|
const terminalPrompt = document.querySelector(".terminal-prompt");
|
||||||
|
const delay =
|
||||||
|
terminalPrompt.innerHTML.length > TERMINAL_PROMPT.length ? 250 : 500;
|
||||||
|
|
||||||
|
const topics = document.querySelectorAll(".topic a");
|
||||||
|
topics.forEach((t) => {
|
||||||
|
t.classList.remove("active");
|
||||||
|
t.style.color = "";
|
||||||
|
});
|
||||||
|
|
||||||
|
topicElement.classList.add("active");
|
||||||
|
topicElement.style.color = getTopicColor(topic);
|
||||||
|
|
||||||
|
clearPrompt(delay, () => {
|
||||||
|
let i = 0;
|
||||||
|
function typechar() {
|
||||||
|
if (i < terminalText.length) {
|
||||||
|
terminalPrompt.innerHTML += terminalText.charAt(i++);
|
||||||
|
setTimeout(typechar, delay / terminalText.length);
|
||||||
|
} else {
|
||||||
|
renderPosts(topic);
|
||||||
|
typing = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
typechar();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderPosts(topic) {
|
||||||
|
const posts = document.getElementById("posts");
|
||||||
|
posts.innerHTML = "";
|
||||||
|
|
||||||
|
const categoryPosts = postsByCategory[topic];
|
||||||
|
|
||||||
|
if (!categoryPosts) {
|
||||||
|
console.error(`No posts found for topic: ${topic}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
categoryPosts.forEach((post) => {
|
||||||
|
const postDiv = document.createElement("div");
|
||||||
|
postDiv.classList.add("post");
|
||||||
|
|
||||||
|
const link = document.createElement("a");
|
||||||
|
const slug = post.id.split('/').pop().replace(/\.mdx?$/, '');
|
||||||
|
|
||||||
|
link.href = `/posts/${topic}/${slug}`;
|
||||||
|
link.textContent = post.data.title;
|
||||||
|
link.style.textDecoration = "underline";
|
||||||
|
|
||||||
|
postDiv.appendChild(link);
|
||||||
|
posts.appendChild(postDiv);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
document.addEventListener('DOMContentLoaded', function() {
|
||||||
|
const topics = document.querySelectorAll('.topic a');
|
||||||
|
|
||||||
|
topics.forEach(topic => {
|
||||||
|
topic.addEventListener('click', typechars);
|
||||||
|
|
||||||
|
const topicName = topic.dataset.topic;
|
||||||
|
|
||||||
|
topic.addEventListener('mouseenter', () => {
|
||||||
|
const color = getTopicColor(topicName);
|
||||||
|
topic.style.color = color;
|
||||||
|
});
|
||||||
|
|
||||||
|
topic.addEventListener('mouseleave', () => {
|
||||||
|
if (!topic.classList.contains('active')) {
|
||||||
|
topic.style.color = "";
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
window.addEventListener('beforeunload', () => {
|
||||||
|
const terminalPrompt = document.querySelector('.terminal-prompt');
|
||||||
|
if (terminalPrompt) {
|
||||||
|
terminalPrompt.innerHTML = TERMINAL_PROMPT;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
</BaseLayout>
|
||||||
35
src/pages/posts/[category]/[slug].astro
Normal file
35
src/pages/posts/[category]/[slug].astro
Normal file
|
|
@ -0,0 +1,35 @@
|
||||||
|
---
|
||||||
|
import { getCollection } from 'astro:content';
|
||||||
|
import PostLayout from '../../../layouts/PostLayout.astro';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
|
export async function getStaticPaths() {
|
||||||
|
const allPosts = await getCollection('posts');
|
||||||
|
|
||||||
|
const routes = [];
|
||||||
|
|
||||||
|
for (const post of allPosts) {
|
||||||
|
const filePath = post.id;
|
||||||
|
|
||||||
|
const pathParts = filePath.split('/');
|
||||||
|
const category = pathParts[0];
|
||||||
|
|
||||||
|
const slug = path.basename(post.id, path.extname(post.id));
|
||||||
|
|
||||||
|
routes.push({
|
||||||
|
params: { category, slug },
|
||||||
|
props: { post },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return routes;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { post } = Astro.props;
|
||||||
|
|
||||||
|
const { Content } = await post.render();
|
||||||
|
---
|
||||||
|
|
||||||
|
<PostLayout frontmatter={post.data} post={post}>
|
||||||
|
<Content />
|
||||||
|
</PostLayout>
|
||||||
14
src/utils/colors.js
Normal file
14
src/utils/colors.js
Normal file
|
|
@ -0,0 +1,14 @@
|
||||||
|
export function getTopicColor(topicName) {
|
||||||
|
switch (topicName) {
|
||||||
|
case "software":
|
||||||
|
return "#0073e6";
|
||||||
|
case "operating-systems":
|
||||||
|
return "#009975";
|
||||||
|
case "algorithms":
|
||||||
|
return "#d50032";
|
||||||
|
case "meditations":
|
||||||
|
return "#6a0dad";
|
||||||
|
default:
|
||||||
|
return "#000000";
|
||||||
|
}
|
||||||
|
}
|
||||||
5
tsconfig.json
Normal file
5
tsconfig.json
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
{
|
||||||
|
"extends": "astro/tsconfigs/strict",
|
||||||
|
"include": [".astro/types.d.ts", "**/*"],
|
||||||
|
"exclude": ["dist"]
|
||||||
|
}
|
||||||
Loading…
Add table
Add a link
Reference in a new issue