refactor(scrapers): centralize cookie storage into shared file

Problem: each platform scraper managed its own cookie file path and
load/save/clear logic, duplicating boilerplate across kattis, usaco,
codeforces, and codechef.

Solution: add \`load_platform_cookies\`, \`save_platform_cookies\`, and
\`clear_platform_cookies\` to \`base.py\` backed by a single
\`~/.cache/cp-nvim/cookies.json\` keyed by platform name. Update all
scrapers to use these helpers.
This commit is contained in:
Barrett Ruth 2026-03-07 03:46:28 -05:00
parent eb0dea777e
commit cb58062464
Signed by: barrett
GPG key ID: A6C96C9349D2FC81
4 changed files with 55 additions and 43 deletions

View file

@ -8,7 +8,7 @@ from typing import Any, cast
import httpx
from .base import BaseScraper, extract_precision
from .base import BaseScraper, extract_precision, load_platform_cookies, save_platform_cookies
from .timeouts import HTTP_TIMEOUT
from .models import (
ContestListResult,
@ -27,7 +27,6 @@ HEADERS = {
}
CONNECTIONS = 4
_COOKIE_PATH = Path.home() / ".cache" / "cp-nvim" / "usaco-cookies.json"
_LOGIN_PATH = "/current/tpcm/login-session.php"
_SUBMIT_PATH = "/current/tpcm/submit-solution.php"
@ -202,20 +201,16 @@ def _parse_submit_form(
async def _load_usaco_cookies(client: httpx.AsyncClient) -> None:
if not _COOKIE_PATH.exists():
return
try:
for k, v in json.loads(_COOKIE_PATH.read_text()).items():
data = load_platform_cookies("usaco")
if isinstance(data, dict):
for k, v in data.items():
client.cookies.set(k, v)
except Exception:
pass
async def _save_usaco_cookies(client: httpx.AsyncClient) -> None:
cookies = {k: v for k, v in client.cookies.items()}
cookies = dict(client.cookies.items())
if cookies:
_COOKIE_PATH.parent.mkdir(parents=True, exist_ok=True)
_COOKIE_PATH.write_text(json.dumps(cookies))
save_platform_cookies("usaco", cookies)
async def _check_usaco_login(client: httpx.AsyncClient, username: str) -> bool: