fix(scrapers): align submit signatures with base class file_path param

Problem: `BaseScraper.submit` was updated to take `file_path` instead of
`source_code`, but cses, codechef, kattis, and usaco still used the old
parameter name, causing basedpyright override errors.

Solution: rename the parameter in all four scrapers. CSES reads the file
content from the path before use. Fix `codechef.fetch_json` missing type
args and `usaco._parse_results_page` narrowing for basedpyright.
This commit is contained in:
Barrett Ruth 2026-03-05 14:25:39 -05:00
parent 2a373b72dd
commit 6c8c32268d
Signed by: barrett
GPG key ID: A6C96C9349D2FC81
4 changed files with 13 additions and 7 deletions

View file

@ -33,7 +33,7 @@ MEMORY_LIMIT_RE = re.compile(
)
async def fetch_json(client: httpx.AsyncClient, path: str) -> dict:
async def fetch_json(client: httpx.AsyncClient, path: str) -> dict[str, Any]:
r = await client.get(BASE_URL + path, headers=HEADERS, timeout=HTTP_TIMEOUT)
r.raise_for_status()
return r.json()
@ -256,7 +256,7 @@ class CodeChefScraper(BaseScraper):
self,
contest_id: str,
problem_id: str,
source_code: str,
file_path: str,
language_id: str,
credentials: dict[str, str],
) -> SubmitResult:

View file

@ -357,10 +357,13 @@ class CSESScraper(BaseScraper):
self,
contest_id: str,
problem_id: str,
source_code: str,
file_path: str,
language_id: str,
credentials: dict[str, str],
) -> SubmitResult:
from pathlib import Path
source_code = Path(file_path).read_text()
username = credentials.get("username", "")
password = credentials.get("password", "")
if not username or not password:

View file

@ -273,7 +273,7 @@ class KattisScraper(BaseScraper):
self,
contest_id: str,
problem_id: str,
source_code: str,
file_path: str,
language_id: str,
credentials: dict[str, str],
) -> SubmitResult:

View file

@ -73,8 +73,11 @@ def _parse_results_page(html: str) -> dict[str, list[tuple[str, str]]]:
for part in parts:
heading_m = DIVISION_HEADING_RE.search(part)
if heading_m:
current_div = heading_m.group(3).lower()
sections.setdefault(current_div, [])
div = heading_m.group(3)
if div:
key = div.lower()
current_div = key
sections.setdefault(key, [])
continue
if current_div is not None:
for m in PROBLEM_BLOCK_RE.finditer(part):
@ -285,7 +288,7 @@ class USACOScraper(BaseScraper):
self,
contest_id: str,
problem_id: str,
source_code: str,
file_path: str,
language_id: str,
credentials: dict[str, str],
) -> SubmitResult: