fix: resolve typecheck errors in cache, atcoder, cses, and usaco

Problem: lua typecheck flagged missing start_time field on ContestSummary;
ty flagged BeautifulSoup Tag/NavigableString union on csrf_input.get(),
a 3-tuple unpack where _extract_problem_info now returns 4 values in
cses.py, and an untyped list assignment in usaco.py.

Solution: add start_time? to ContestSummary LuaDoc, guard csrf_input
with hasattr check and type: ignore, unpack precision from
_extract_problem_info in cses.py callers, and use cast() in usaco.py.
This commit is contained in:
Barrett Ruth 2026-03-03 15:01:59 -05:00 committed by Barrett Ruth
parent bad219e578
commit de5a20c567
4 changed files with 17 additions and 8 deletions

View file

@ -15,6 +15,7 @@
---@field display_name string
---@field name string
---@field id string
---@field start_time? integer
---@class CombinedTest
---@field input string

View file

@ -385,11 +385,11 @@ class AtcoderScraper(BaseScraper):
login_page.raise_for_status()
soup = BeautifulSoup(login_page.text, "html.parser")
csrf_input = soup.find("input", {"name": "csrf_token"})
if not csrf_input:
if not csrf_input or not hasattr(csrf_input, "get"):
return SubmitResult(
success=False, error="Could not find CSRF token on login page"
)
csrf_token = csrf_input.get("value", "")
csrf_token = csrf_input.get("value", "") or "" # type: ignore[union-attr]
login_resp = _session.post(
f"{BASE_URL}/login",
@ -411,11 +411,11 @@ class AtcoderScraper(BaseScraper):
submit_page.raise_for_status()
soup = BeautifulSoup(submit_page.text, "html.parser")
csrf_input = soup.find("input", {"name": "csrf_token"})
if not csrf_input:
if not csrf_input or not hasattr(csrf_input, "get"):
return SubmitResult(
success=False, error="Could not find CSRF token on submit page"
)
csrf_token = csrf_input.get("value", "")
csrf_token = csrf_input.get("value", "") or "" # type: ignore[union-attr]
task_screen_name = f"{contest_id}_{problem_id}"
submit_resp = _session.post(

View file

@ -232,10 +232,17 @@ class CSESScraper(BaseScraper):
try:
html = await fetch_text(client, task_path(pid))
tests = parse_tests(html)
timeout_ms, memory_mb, interactive = _extract_problem_info(html)
timeout_ms, memory_mb, interactive, precision = (
_extract_problem_info(html)
)
except Exception:
tests = []
timeout_ms, memory_mb, interactive = 0, 0, False
timeout_ms, memory_mb, interactive, precision = (
0,
0,
False,
None,
)
combined_input = "\n".join(t.input for t in tests) if tests else ""
combined_expected = (
@ -255,6 +262,7 @@ class CSESScraper(BaseScraper):
"memory_mb": memory_mb,
"interactive": interactive,
"multi_test": False,
"precision": precision,
}
tasks = [run_one(p.id) for p in problems]

View file

@ -3,7 +3,7 @@
import asyncio
import json
import re
from typing import Any
from typing import Any, cast
import httpx
@ -253,7 +253,7 @@ class USACOScraper(BaseScraper):
"interactive": False,
}
tests: list[TestCase] = info["tests"]
tests = cast(list[TestCase], info["tests"])
combined_input = "\n".join(t.input for t in tests) if tests else ""
combined_expected = (
"\n".join(t.expected for t in tests) if tests else ""