ci: format

This commit is contained in:
Barrett Ruth 2026-03-04 12:46:47 -05:00
parent a37e7f2e4a
commit ff03b932b1
Signed by: barrett
GPG key ID: A6C96C9349D2FC81
8 changed files with 28 additions and 79 deletions

View file

@ -17,16 +17,9 @@ from urllib3.util.retry import Retry
from .base import BaseScraper, extract_precision
from .language_ids import get_language_id
from .models import (
CombinedTest,
ContestListResult,
ContestSummary,
MetadataResult,
ProblemSummary,
SubmitResult,
TestCase,
TestsResult,
)
from .models import (CombinedTest, ContestListResult, ContestSummary,
MetadataResult, ProblemSummary, SubmitResult, TestCase,
TestsResult)
MIB_TO_MB = 1.048576
BASE_URL = "https://atcoder.jp"
@ -385,9 +378,7 @@ class AtcoderScraper(BaseScraper):
try:
session = curl_requests.Session(impersonate="chrome")
login_page = session.get(
f"{BASE_URL}/login", timeout=TIMEOUT_SECONDS
)
login_page = session.get(f"{BASE_URL}/login", timeout=TIMEOUT_SECONDS)
login_page.raise_for_status()
soup = BeautifulSoup(login_page.text, "html.parser")
csrf_input = soup.find("input", {"name": "csrf_token"})
@ -414,9 +405,7 @@ class AtcoderScraper(BaseScraper):
success=False,
error="Login failed: incorrect username or password",
)
session.get(
BASE_URL + location, timeout=TIMEOUT_SECONDS
)
session.get(BASE_URL + location, timeout=TIMEOUT_SECONDS)
else:
login_resp.raise_for_status()

View file

@ -6,13 +6,8 @@ import sys
from abc import ABC, abstractmethod
from .language_ids import get_language_id
from .models import (
CombinedTest,
ContestListResult,
MetadataResult,
SubmitResult,
TestsResult,
)
from .models import (CombinedTest, ContestListResult, MetadataResult,
SubmitResult, TestsResult)
_PRECISION_ABS_REL_RE = re.compile(
r"(?:absolute|relative)\s+error[^.]*?10\s*[\^{]\s*\{?\s*[-\u2212]\s*(\d+)\s*\}?",

View file

@ -9,14 +9,8 @@ import httpx
from curl_cffi import requests as curl_requests
from .base import BaseScraper, extract_precision
from .models import (
ContestListResult,
ContestSummary,
MetadataResult,
ProblemSummary,
SubmitResult,
TestCase,
)
from .models import (ContestListResult, ContestSummary, MetadataResult,
ProblemSummary, SubmitResult, TestCase)
BASE_URL = "https://www.codechef.com"
API_CONTESTS_ALL = "/api/list/contests/all"

View file

@ -10,14 +10,8 @@ from bs4 import BeautifulSoup, Tag
from curl_cffi import requests as curl_requests
from .base import BaseScraper, extract_precision
from .models import (
ContestListResult,
ContestSummary,
MetadataResult,
ProblemSummary,
SubmitResult,
TestCase,
)
from .models import (ContestListResult, ContestSummary, MetadataResult,
ProblemSummary, SubmitResult, TestCase)
BASE_URL = "https://codeforces.com"
API_CONTEST_LIST_URL = f"{BASE_URL}/api/contest.list"

View file

@ -8,14 +8,8 @@ from typing import Any
import httpx
from .base import BaseScraper, extract_precision
from .models import (
ContestListResult,
ContestSummary,
MetadataResult,
ProblemSummary,
SubmitResult,
TestCase,
)
from .models import (ContestListResult, ContestSummary, MetadataResult,
ProblemSummary, SubmitResult, TestCase)
BASE_URL = "https://cses.fi"
INDEX_PATH = "/problemset"

View file

@ -10,14 +10,8 @@ from datetime import datetime
import httpx
from .base import BaseScraper
from .models import (
ContestListResult,
ContestSummary,
MetadataResult,
ProblemSummary,
SubmitResult,
TestCase,
)
from .models import (ContestListResult, ContestSummary, MetadataResult,
ProblemSummary, SubmitResult, TestCase)
BASE_URL = "https://open.kattis.com"
HEADERS = {

View file

@ -8,14 +8,8 @@ from typing import Any, cast
import httpx
from .base import BaseScraper
from .models import (
ContestListResult,
ContestSummary,
MetadataResult,
ProblemSummary,
SubmitResult,
TestCase,
)
from .models import (ContestListResult, ContestSummary, MetadataResult,
ProblemSummary, SubmitResult, TestCase)
BASE_URL = "http://www.usaco.org"
HEADERS = {
@ -37,8 +31,7 @@ DIVISION_HEADING_RE = re.compile(
re.IGNORECASE,
)
PROBLEM_BLOCK_RE = re.compile(
r"<b>([^<]+)</b>\s*<br\s*/?>.*?"
r"viewproblem2&cpid=(\d+)",
r"<b>([^<]+)</b>\s*<br\s*/?>.*?" r"viewproblem2&cpid=(\d+)",
re.DOTALL,
)
SAMPLE_IN_RE = re.compile(r"<pre\s+class=['\"]in['\"]>(.*?)</pre>", re.DOTALL)

View file

@ -1,10 +1,6 @@
import pytest
from scrapers.models import (
ContestListResult,
MetadataResult,
TestsResult,
)
from scrapers.models import ContestListResult, MetadataResult, TestsResult
MATRIX = {
"cses": {
@ -61,9 +57,9 @@ def test_scraper_offline_fixture_matrix(run_scraper_offline, scraper, mode):
assert hasattr(tr.combined, "input"), "combined missing input"
assert hasattr(tr.combined, "expected"), "combined missing expected"
assert isinstance(tr.combined.input, str), "combined.input not string"
assert isinstance(tr.combined.expected, str), (
"combined.expected not string"
)
assert isinstance(
tr.combined.expected, str
), "combined.expected not string"
assert hasattr(tr, "multi_test"), "Missing multi_test field"
assert isinstance(tr.multi_test, bool), "multi_test not boolean"
validated_any = True
@ -77,12 +73,12 @@ def test_scraper_offline_fixture_matrix(run_scraper_offline, scraper, mode):
assert isinstance(obj["combined"], dict), "combined not a dict"
assert "input" in obj["combined"], "combined missing input key"
assert "expected" in obj["combined"], "combined missing expected key"
assert isinstance(obj["combined"]["input"], str), (
"combined.input not string"
)
assert isinstance(obj["combined"]["expected"], str), (
"combined.expected not string"
)
assert isinstance(
obj["combined"]["input"], str
), "combined.input not string"
assert isinstance(
obj["combined"]["expected"], str
), "combined.expected not string"
assert "multi_test" in obj, "Missing multi_test field in raw JSON"
assert isinstance(obj["multi_test"], bool), "multi_test not boolean"
validated_any = True