test: add offline fixture coverage for Kattis and USACO (#342)

## Problem

Kattis and USACO had zero offline test coverage — no fixtures, no
conftest
routers, and no entries in the test matrix. The `precision` field and
error
paths were also unverified across all platforms.

## Solution

Add HTML fixtures for both platforms and wire up `httpx.AsyncClient.get`
routers in `conftest.py` following the existing CSES/CodeChef pattern.
Extend the test matrix from 12 to 23 parametrized cases. Add a dedicated
test for the Kattis contest-vs-slug fallback path (verifying
`contest_url`
and `standings_url`), three parametrized metadata error cases, and a
targeted assertion that `extract_precision` returns a non-`None` float
for
problems with floating-point tolerance hints.

Closes #281.
This commit is contained in:
Barrett Ruth 2026-03-06 16:49:49 -05:00 committed by GitHub
parent 9727dccc6f
commit 8465e70772
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 216 additions and 1 deletions

View file

@ -27,6 +27,16 @@ MATRIX = {
"tests": ("START209D",),
"contests": tuple(),
},
"kattis": {
"metadata": ("hello",),
"tests": ("hello",),
"contests": tuple(),
},
"usaco": {
"metadata": ("dec24_gold",),
"tests": ("dec24_gold",),
"contests": tuple(),
},
}
@ -85,5 +95,45 @@ def test_scraper_offline_fixture_matrix(run_scraper_offline, scraper, mode):
)
assert "multi_test" in obj, "Missing multi_test field in raw JSON"
assert isinstance(obj["multi_test"], bool), "multi_test not boolean"
assert "precision" in obj, "Missing precision field in raw JSON"
assert obj["precision"] is None or isinstance(
obj["precision"], float
), "precision must be None or float"
validated_any = True
assert validated_any, "No valid tests payloads validated"
def test_kattis_contest_metadata(run_scraper_offline):
rc, objs = run_scraper_offline("kattis", "metadata", "open2024")
assert rc == 0
assert objs
model = MetadataResult.model_validate(objs[-1])
assert model.success is True
assert len(model.problems) == 2
assert model.contest_url != ""
assert model.standings_url != ""
def test_usaco_precision_extracted(run_scraper_offline):
rc, objs = run_scraper_offline("usaco", "tests", "dec24_gold")
assert rc == 0
precisions = [obj["precision"] for obj in objs if "problem_id" in obj]
assert any(p is not None for p in precisions), (
"Expected at least one problem with precision"
)
@pytest.mark.parametrize(
"scraper,contest_id",
[
("cses", "nonexistent_category_xyz"),
("usaco", "badformat"),
("kattis", "nonexistent_problem_xyz"),
],
)
def test_scraper_metadata_error(run_scraper_offline, scraper, contest_id):
rc, objs = run_scraper_offline(scraper, "metadata", contest_id)
assert rc == 1
assert objs
assert objs[-1].get("success") is False
assert objs[-1].get("error")