From 6c8c32268db6c243023bbcd8922a8b4fca0e70d4 Mon Sep 17 00:00:00 2001 From: Barrett Ruth Date: Thu, 5 Mar 2026 14:25:39 -0500 Subject: [PATCH] fix(scrapers): align submit signatures with base class `file_path` param Problem: `BaseScraper.submit` was updated to take `file_path` instead of `source_code`, but cses, codechef, kattis, and usaco still used the old parameter name, causing basedpyright override errors. Solution: rename the parameter in all four scrapers. CSES reads the file content from the path before use. Fix `codechef.fetch_json` missing type args and `usaco._parse_results_page` narrowing for basedpyright. --- scrapers/codechef.py | 4 ++-- scrapers/cses.py | 5 ++++- scrapers/kattis.py | 2 +- scrapers/usaco.py | 9 ++++++--- 4 files changed, 13 insertions(+), 7 deletions(-) diff --git a/scrapers/codechef.py b/scrapers/codechef.py index c4b9d37..b7f4ec7 100644 --- a/scrapers/codechef.py +++ b/scrapers/codechef.py @@ -33,7 +33,7 @@ MEMORY_LIMIT_RE = re.compile( ) -async def fetch_json(client: httpx.AsyncClient, path: str) -> dict: +async def fetch_json(client: httpx.AsyncClient, path: str) -> dict[str, Any]: r = await client.get(BASE_URL + path, headers=HEADERS, timeout=HTTP_TIMEOUT) r.raise_for_status() return r.json() @@ -256,7 +256,7 @@ class CodeChefScraper(BaseScraper): self, contest_id: str, problem_id: str, - source_code: str, + file_path: str, language_id: str, credentials: dict[str, str], ) -> SubmitResult: diff --git a/scrapers/cses.py b/scrapers/cses.py index 7d9f4f0..4df3fcc 100644 --- a/scrapers/cses.py +++ b/scrapers/cses.py @@ -357,10 +357,13 @@ class CSESScraper(BaseScraper): self, contest_id: str, problem_id: str, - source_code: str, + file_path: str, language_id: str, credentials: dict[str, str], ) -> SubmitResult: + from pathlib import Path + + source_code = Path(file_path).read_text() username = credentials.get("username", "") password = credentials.get("password", "") if not username or not password: diff --git a/scrapers/kattis.py b/scrapers/kattis.py index 2bfd2d6..566473c 100644 --- a/scrapers/kattis.py +++ b/scrapers/kattis.py @@ -273,7 +273,7 @@ class KattisScraper(BaseScraper): self, contest_id: str, problem_id: str, - source_code: str, + file_path: str, language_id: str, credentials: dict[str, str], ) -> SubmitResult: diff --git a/scrapers/usaco.py b/scrapers/usaco.py index 9e4d7da..099f102 100644 --- a/scrapers/usaco.py +++ b/scrapers/usaco.py @@ -73,8 +73,11 @@ def _parse_results_page(html: str) -> dict[str, list[tuple[str, str]]]: for part in parts: heading_m = DIVISION_HEADING_RE.search(part) if heading_m: - current_div = heading_m.group(3).lower() - sections.setdefault(current_div, []) + div = heading_m.group(3) + if div: + key = div.lower() + current_div = key + sections.setdefault(key, []) continue if current_div is not None: for m in PROBLEM_BLOCK_RE.finditer(part): @@ -285,7 +288,7 @@ class USACOScraper(BaseScraper): self, contest_id: str, problem_id: str, - source_code: str, + file_path: str, language_id: str, credentials: dict[str, str], ) -> SubmitResult: