diff --git a/scrapers/kattis.py b/scrapers/kattis.py index 2c5c1fc..7741847 100644 --- a/scrapers/kattis.py +++ b/scrapers/kattis.py @@ -10,7 +10,7 @@ from pathlib import Path import httpx -from .base import BaseScraper, extract_precision +from .base import BaseScraper from .timeouts import HTTP_TIMEOUT from .models import ( ContestListResult, @@ -173,7 +173,6 @@ async def _stream_single_problem(client: httpx.AsyncClient, slug: str) -> None: timeout_ms, memory_mb = _parse_limits(html) interactive = _is_interactive(html) - precision = extract_precision(html) tests: list[TestCase] = [] try: @@ -201,7 +200,6 @@ async def _stream_single_problem(client: httpx.AsyncClient, slug: str) -> None: "memory_mb": memory_mb, "interactive": interactive, "multi_test": False, - "precision": precision, } ), flush=True, @@ -256,8 +254,6 @@ class KattisScraper(BaseScraper): ProblemSummary(id=slug, name=name) for slug, name in slugs ], url=f"{BASE_URL}/problems/%s", - contest_url=f"{BASE_URL}/contests/{contest_id}", - standings_url=f"{BASE_URL}/contests/{contest_id}/standings", ) try: html = await _fetch_text( @@ -277,8 +273,6 @@ class KattisScraper(BaseScraper): contest_id=contest_id, problems=[ProblemSummary(id=contest_id, name=name)], url=f"{BASE_URL}/problems/%s", - contest_url=f"{BASE_URL}/problems/{contest_id}", - standings_url="", ) except Exception as e: return self._metadata_error(str(e)) @@ -379,15 +373,9 @@ class KattisScraper(BaseScraper): return self._submit_error(f"Submit request failed: {e}") sid_m = re.search(r"Submission ID:\s*(\d+)", r.text, re.IGNORECASE) - if not sid_m: - return self._submit_error( - r.text.strip() or "Submit failed (no submission ID)" - ) + sid = sid_m.group(1) if sid_m else "" return SubmitResult( - success=True, - error="", - submission_id=sid_m.group(1), - verdict="submitted", + success=True, error="", submission_id=sid, verdict="submitted" ) async def login(self, credentials: dict[str, str]) -> LoginResult: diff --git a/scrapers/usaco.py b/scrapers/usaco.py index 5ab89f7..074cbf9 100644 --- a/scrapers/usaco.py +++ b/scrapers/usaco.py @@ -8,7 +8,7 @@ from typing import Any, cast import httpx -from .base import BaseScraper, extract_precision +from .base import BaseScraper from .timeouts import HTTP_TIMEOUT from .models import ( ContestListResult, @@ -130,14 +130,12 @@ def _parse_problem_page(html: str) -> dict[str, Any]: memory_mb = int(mm.group(1)) if mm else 256 interactive = "interactive problem" in html.lower() - precision = extract_precision(html) return { "tests": tests, "timeout_ms": timeout_ms, "memory_mb": memory_mb, "interactive": interactive, - "precision": precision, } @@ -377,7 +375,6 @@ class USACOScraper(BaseScraper): "timeout_ms": 4000, "memory_mb": 256, "interactive": False, - "precision": None, } tests = cast(list[TestCase], info["tests"]) @@ -399,7 +396,6 @@ class USACOScraper(BaseScraper): "memory_mb": info["memory_mb"], "interactive": info["interactive"], "multi_test": False, - "precision": info["precision"], } tasks = [run_one(cpid) for cpid, _ in problems_raw]