fix: resolve typecheck errors in cache, atcoder, cses, and usaco
Problem: lua typecheck flagged missing start_time field on ContestSummary; ty flagged BeautifulSoup Tag/NavigableString union on csrf_input.get(), a 3-tuple unpack where _extract_problem_info now returns 4 values in cses.py, and an untyped list assignment in usaco.py. Solution: add start_time? to ContestSummary LuaDoc, guard csrf_input with hasattr check and type: ignore, unpack precision from _extract_problem_info in cses.py callers, and use cast() in usaco.py.
This commit is contained in:
parent
bad219e578
commit
de5a20c567
4 changed files with 17 additions and 8 deletions
|
|
@ -15,6 +15,7 @@
|
||||||
---@field display_name string
|
---@field display_name string
|
||||||
---@field name string
|
---@field name string
|
||||||
---@field id string
|
---@field id string
|
||||||
|
---@field start_time? integer
|
||||||
|
|
||||||
---@class CombinedTest
|
---@class CombinedTest
|
||||||
---@field input string
|
---@field input string
|
||||||
|
|
|
||||||
|
|
@ -385,11 +385,11 @@ class AtcoderScraper(BaseScraper):
|
||||||
login_page.raise_for_status()
|
login_page.raise_for_status()
|
||||||
soup = BeautifulSoup(login_page.text, "html.parser")
|
soup = BeautifulSoup(login_page.text, "html.parser")
|
||||||
csrf_input = soup.find("input", {"name": "csrf_token"})
|
csrf_input = soup.find("input", {"name": "csrf_token"})
|
||||||
if not csrf_input:
|
if not csrf_input or not hasattr(csrf_input, "get"):
|
||||||
return SubmitResult(
|
return SubmitResult(
|
||||||
success=False, error="Could not find CSRF token on login page"
|
success=False, error="Could not find CSRF token on login page"
|
||||||
)
|
)
|
||||||
csrf_token = csrf_input.get("value", "")
|
csrf_token = csrf_input.get("value", "") or "" # type: ignore[union-attr]
|
||||||
|
|
||||||
login_resp = _session.post(
|
login_resp = _session.post(
|
||||||
f"{BASE_URL}/login",
|
f"{BASE_URL}/login",
|
||||||
|
|
@ -411,11 +411,11 @@ class AtcoderScraper(BaseScraper):
|
||||||
submit_page.raise_for_status()
|
submit_page.raise_for_status()
|
||||||
soup = BeautifulSoup(submit_page.text, "html.parser")
|
soup = BeautifulSoup(submit_page.text, "html.parser")
|
||||||
csrf_input = soup.find("input", {"name": "csrf_token"})
|
csrf_input = soup.find("input", {"name": "csrf_token"})
|
||||||
if not csrf_input:
|
if not csrf_input or not hasattr(csrf_input, "get"):
|
||||||
return SubmitResult(
|
return SubmitResult(
|
||||||
success=False, error="Could not find CSRF token on submit page"
|
success=False, error="Could not find CSRF token on submit page"
|
||||||
)
|
)
|
||||||
csrf_token = csrf_input.get("value", "")
|
csrf_token = csrf_input.get("value", "") or "" # type: ignore[union-attr]
|
||||||
|
|
||||||
task_screen_name = f"{contest_id}_{problem_id}"
|
task_screen_name = f"{contest_id}_{problem_id}"
|
||||||
submit_resp = _session.post(
|
submit_resp = _session.post(
|
||||||
|
|
|
||||||
|
|
@ -232,10 +232,17 @@ class CSESScraper(BaseScraper):
|
||||||
try:
|
try:
|
||||||
html = await fetch_text(client, task_path(pid))
|
html = await fetch_text(client, task_path(pid))
|
||||||
tests = parse_tests(html)
|
tests = parse_tests(html)
|
||||||
timeout_ms, memory_mb, interactive = _extract_problem_info(html)
|
timeout_ms, memory_mb, interactive, precision = (
|
||||||
|
_extract_problem_info(html)
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
tests = []
|
tests = []
|
||||||
timeout_ms, memory_mb, interactive = 0, 0, False
|
timeout_ms, memory_mb, interactive, precision = (
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
False,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
combined_input = "\n".join(t.input for t in tests) if tests else ""
|
combined_input = "\n".join(t.input for t in tests) if tests else ""
|
||||||
combined_expected = (
|
combined_expected = (
|
||||||
|
|
@ -255,6 +262,7 @@ class CSESScraper(BaseScraper):
|
||||||
"memory_mb": memory_mb,
|
"memory_mb": memory_mb,
|
||||||
"interactive": interactive,
|
"interactive": interactive,
|
||||||
"multi_test": False,
|
"multi_test": False,
|
||||||
|
"precision": precision,
|
||||||
}
|
}
|
||||||
|
|
||||||
tasks = [run_one(p.id) for p in problems]
|
tasks = [run_one(p.id) for p in problems]
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
from typing import Any
|
from typing import Any, cast
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
|
|
||||||
|
|
@ -253,7 +253,7 @@ class USACOScraper(BaseScraper):
|
||||||
"interactive": False,
|
"interactive": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
tests: list[TestCase] = info["tests"]
|
tests = cast(list[TestCase], info["tests"])
|
||||||
combined_input = "\n".join(t.input for t in tests) if tests else ""
|
combined_input = "\n".join(t.input for t in tests) if tests else ""
|
||||||
combined_expected = (
|
combined_expected = (
|
||||||
"\n".join(t.expected for t in tests) if tests else ""
|
"\n".join(t.expected for t in tests) if tests else ""
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue