diff --git a/scrapers/codechef.py b/scrapers/codechef.py index c4b9d37..b7f4ec7 100644 --- a/scrapers/codechef.py +++ b/scrapers/codechef.py @@ -33,7 +33,7 @@ MEMORY_LIMIT_RE = re.compile( ) -async def fetch_json(client: httpx.AsyncClient, path: str) -> dict: +async def fetch_json(client: httpx.AsyncClient, path: str) -> dict[str, Any]: r = await client.get(BASE_URL + path, headers=HEADERS, timeout=HTTP_TIMEOUT) r.raise_for_status() return r.json() @@ -256,7 +256,7 @@ class CodeChefScraper(BaseScraper): self, contest_id: str, problem_id: str, - source_code: str, + file_path: str, language_id: str, credentials: dict[str, str], ) -> SubmitResult: diff --git a/scrapers/cses.py b/scrapers/cses.py index 7d9f4f0..4df3fcc 100644 --- a/scrapers/cses.py +++ b/scrapers/cses.py @@ -357,10 +357,13 @@ class CSESScraper(BaseScraper): self, contest_id: str, problem_id: str, - source_code: str, + file_path: str, language_id: str, credentials: dict[str, str], ) -> SubmitResult: + from pathlib import Path + + source_code = Path(file_path).read_text() username = credentials.get("username", "") password = credentials.get("password", "") if not username or not password: diff --git a/scrapers/kattis.py b/scrapers/kattis.py index 2bfd2d6..566473c 100644 --- a/scrapers/kattis.py +++ b/scrapers/kattis.py @@ -273,7 +273,7 @@ class KattisScraper(BaseScraper): self, contest_id: str, problem_id: str, - source_code: str, + file_path: str, language_id: str, credentials: dict[str, str], ) -> SubmitResult: diff --git a/scrapers/usaco.py b/scrapers/usaco.py index 9e4d7da..099f102 100644 --- a/scrapers/usaco.py +++ b/scrapers/usaco.py @@ -73,8 +73,11 @@ def _parse_results_page(html: str) -> dict[str, list[tuple[str, str]]]: for part in parts: heading_m = DIVISION_HEADING_RE.search(part) if heading_m: - current_div = heading_m.group(3).lower() - sections.setdefault(current_div, []) + div = heading_m.group(3) + if div: + key = div.lower() + current_div = key + sections.setdefault(key, []) continue if current_div is not None: for m in PROBLEM_BLOCK_RE.finditer(part): @@ -285,7 +288,7 @@ class USACOScraper(BaseScraper): self, contest_id: str, problem_id: str, - source_code: str, + file_path: str, language_id: str, credentials: dict[str, str], ) -> SubmitResult: