Merge pull request #200 from barrett-ruth/fix/miscl

update uv pks
This commit is contained in:
Barrett Ruth 2025-12-08 19:16:37 -06:00 committed by GitHub
commit 9926965677
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 1208 additions and 988 deletions

3
.envrc Normal file
View file

@ -0,0 +1,3 @@
VIRTUAL_ENV="$PWD/.venv"
PATH_add "$VIRTUAL_ENV/bin"
export VIRTUAL_ENV

View file

@ -115,10 +115,10 @@ jobs:
- uses: actions/checkout@v4
- name: Install uv
uses: astral-sh/setup-uv@v4
- name: Install mypy
run: uv tool install mypy
- name: Type check Python files with mypy
run: mypy .
- name: Install dependencies with uv
run: uv sync --dev
- name: Type check Python files with ty
run: uvx ty check .
markdown-format:
name: Markdown Format Check

View file

@ -20,17 +20,16 @@ repos:
args: ['--fix', '--select=I']
files: \.py$
- repo: local
hooks:
- id: mypy
name: mypy (type check)
entry: uv run mypy
language: system
args: ['.']
pass_filenames: false
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v4.0.0-alpha.8
hooks:
- id: prettier
name: prettier
- repo: local
hooks:
- id: ty-type-check
name: ty (Python type checker)
language: system
entry: uv run ty check
types: [python]

View file

@ -5,3 +5,4 @@ build/
dist/
*.pyc
__pycache__/
tests/fixtures/

View file

@ -1,5 +1,8 @@
# cp.nvim
> NOTE: [scrapling] is currently down - CodeForces scraping will not work (for
> now). Track the issue [here](https://github.com/D4Vinci/Scrapling/issues/118).
**The definitive competitive programming environment for Neovim**
Scrape problems, run tests, and debug solutions across multiple platforms with

View file

@ -13,11 +13,11 @@ dependencies = [
"pydantic>=2.11.10",
"requests>=2.32.5",
"scrapling[fetchers]>=0.3.5",
"types-requests>=2.32.4.20250913",
]
[dependency-groups]
dev = [
"mypy>=1.18.2",
"types-beautifulsoup4>=4.12.0.20250516",
"types-requests>=2.32.4.20250913",
"pytest>=8.0.0",
@ -25,6 +25,7 @@ dev = [
"pre-commit>=4.3.0",
"basedpyright>=1.31.6",
"ruff>=0.14.2",
"ty>=0.0.1a32",
]
[tool.pytest.ini_options]

View file

@ -71,7 +71,7 @@ def _retry_after_requests(details):
on_backoff=_retry_after_requests,
)
def _fetch(url: str) -> str:
r = _session.get(url, headers=HEADERS, timeout=TIMEOUT_SECONDS, verify=False)
r = _session.get(url, headers=HEADERS, timeout=TIMEOUT_SECONDS)
if r.status_code in RETRY_STATUS:
raise requests.HTTPError(response=r)
r.raise_for_status()
@ -244,7 +244,6 @@ def _to_problem_summaries(rows: list[dict[str, str]]) -> list[ProblemSummary]:
async def _fetch_all_contests_async() -> list[ContestSummary]:
async with httpx.AsyncClient(
limits=httpx.Limits(max_connections=100, max_keepalive_connections=100),
verify=False,
) as client:
first_html = await _get_async(client, ARCHIVE_URL)
last = _parse_last_page(first_html)

View file

@ -1,5 +1,4 @@
#!/usr/bin/env python3
import asyncio
import json
import re
@ -25,13 +24,11 @@ API_CONTESTS_ALL = "/api/list/contests/all"
API_CONTEST = "/api/contests/{contest_id}"
API_PROBLEM = "/api/contests/{contest_id}/problems/{problem_id}"
PROBLEM_URL = "https://www.codechef.com/problems/{problem_id}"
HEADERS = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
}
TIMEOUT_S = 15.0
CONNECTIONS = 8
MEMORY_LIMIT_RE = re.compile(
r"Memory\s+[Ll]imit.*?([0-9.]+)\s*(MB|GB)", re.IGNORECASE | re.DOTALL
)
@ -74,12 +71,10 @@ class CodeChefScraper(BaseScraper):
return self._create_metadata_error(
f"Failed to fetch contest {contest_id}: {e}", contest_id
)
if not data.get("problems"):
return self._create_metadata_error(
f"No problems found for contest {contest_id}", contest_id
)
problems = []
for problem_code, problem_data in data["problems"].items():
if problem_data.get("category_name") == "main":
@ -89,7 +84,6 @@ class CodeChefScraper(BaseScraper):
name=problem_data.get("name", problem_code),
)
)
return MetadataResult(
success=True,
error="",
@ -104,11 +98,9 @@ class CodeChefScraper(BaseScraper):
data = await fetch_json(client, API_CONTESTS_ALL)
except httpx.HTTPStatusError as e:
return self._create_contests_error(f"Failed to fetch contests: {e}")
all_contests = data.get("future_contests", []) + data.get(
"past_contests", []
)
max_num = 0
for contest in all_contests:
contest_code = contest.get("contest_code", "")
@ -117,10 +109,8 @@ class CodeChefScraper(BaseScraper):
if match:
num = int(match.group(1))
max_num = max(max_num, num)
if max_num == 0:
return self._create_contests_error("No Starters contests found")
contests = []
sem = asyncio.Semaphore(CONNECTIONS)
@ -136,14 +126,11 @@ class CodeChefScraper(BaseScraper):
print(f"Error fetching {parent_id}: {e}", file=sys.stderr)
return []
child_contests = parent_data.get("child_contests", {})
if not child_contests:
return []
base_name = f"Starters {i}"
divisions = []
for div_key, div_data in child_contests.items():
div_code = div_data.get("contest_code", "")
div_num = div_data.get("div", {}).get("div_number", "")
@ -155,37 +142,52 @@ class CodeChefScraper(BaseScraper):
display_name=f"{base_name} (Div. {div_num})",
)
)
return divisions
tasks = [fetch_divisions(i) for i in range(1, max_num + 1)]
for coro in asyncio.as_completed(tasks):
divisions = await coro
contests.extend(divisions)
return ContestListResult(success=True, error="", contests=contests)
async def stream_tests_for_category_async(self, contest_id: str) -> None:
async def stream_tests_for_category_async(self, category_id: str) -> None:
async with httpx.AsyncClient(
limits=httpx.Limits(max_connections=CONNECTIONS)
) as client:
try:
contest_data = await fetch_json(
client, API_CONTEST.format(contest_id=contest_id)
client, API_CONTEST.format(contest_id=category_id)
)
except Exception as e:
print(
json.dumps(
{"error": f"Failed to fetch contest {category_id}: {str(e)}"}
),
flush=True,
)
except Exception:
return
all_problems = contest_data.get("problems", {})
if not all_problems:
print(
json.dumps(
{"error": f"No problems found for contest {category_id}"}
),
flush=True,
)
return
problems = {
code: data
for code, data in all_problems.items()
if data.get("category_name") == "main"
}
if not problems:
print(
json.dumps(
{"error": f"No main problems found for contest {category_id}"}
),
flush=True,
)
return
sem = asyncio.Semaphore(CONNECTIONS)
async def run_one(problem_code: str) -> dict[str, Any]:
@ -194,10 +196,9 @@ class CodeChefScraper(BaseScraper):
problem_data = await fetch_json(
client,
API_PROBLEM.format(
contest_id=contest_id, problem_id=problem_code
contest_id=category_id, problem_id=problem_code
),
)
sample_tests = (
problem_data.get("problemComponents", {}).get(
"sampleTestCases", []
@ -212,30 +213,24 @@ class CodeChefScraper(BaseScraper):
for t in sample_tests
if not t.get("isDeleted", False)
]
time_limit_str = problem_data.get("max_timelimit", "1")
timeout_ms = int(float(time_limit_str) * 1000)
problem_url = PROBLEM_URL.format(problem_id=problem_code)
loop = asyncio.get_event_loop()
html = await loop.run_in_executor(
None, _fetch_html_sync, problem_url
)
memory_mb = _extract_memory_limit(html)
interactive = False
except Exception:
tests = []
timeout_ms = 1000
memory_mb = 256.0
interactive = False
combined_input = "\n".join(t.input for t in tests) if tests else ""
combined_expected = (
"\n".join(t.expected for t in tests) if tests else ""
)
return {
"problem_id": problem_code,
"combined": {
@ -266,10 +261,8 @@ async def main_async() -> int:
)
print(result.model_dump_json())
return 1
mode: str = sys.argv[1]
scraper = CodeChefScraper()
if mode == "metadata":
if len(sys.argv) != 3:
result = MetadataResult(
@ -283,7 +276,6 @@ async def main_async() -> int:
result = await scraper.scrape_contest_metadata(contest_id)
print(result.model_dump_json())
return 0 if result.success else 1
if mode == "tests":
if len(sys.argv) != 3:
tests_result = TestsResult(
@ -300,7 +292,6 @@ async def main_async() -> int:
contest_id = sys.argv[2]
await scraper.stream_tests_for_category_async(contest_id)
return 0
if mode == "contests":
if len(sys.argv) != 2:
contest_result = ContestListResult(
@ -311,7 +302,6 @@ async def main_async() -> int:
contest_result = await scraper.scrape_contest_list()
print(contest_result.model_dump_json())
return 0 if contest_result.success else 1
result = MetadataResult(
success=False,
error=f"Unknown mode: {mode}. Use 'metadata <contest_id>', 'tests <contest_id>', or 'contests'",

View file

@ -27,7 +27,7 @@
<a href="/" class="logo"><img src="/logo.png?1" alt="CSES" /></a>
<a
class="menu-toggle"
onclick="document.body.classList.toggle('menu-open');"
onclick="document.body.classList.toggle('menu-open')"
>
<i class="fas fa-bars"></i>
</a>

View file

@ -27,7 +27,7 @@
<a href="/" class="logo"><img src="/logo.png?1" alt="CSES" /></a>
<a
class="menu-toggle"
onclick="document.body.classList.toggle('menu-open');"
onclick="document.body.classList.toggle('menu-open')"
>
<i class="fas fa-bars"></i>
</a>

View file

@ -27,7 +27,7 @@
<a href="/" class="logo"><img src="/logo.png?1" alt="CSES" /></a>
<a
class="menu-toggle"
onclick="document.body.classList.toggle('menu-open');"
onclick="document.body.classList.toggle('menu-open')"
>
<i class="fas fa-bars"></i>
</a>

2094
uv.lock generated

File diff suppressed because it is too large Load diff