feat: scraper cleanup
This commit is contained in:
parent
002b75b0ab
commit
ca6f8417c0
5 changed files with 296 additions and 178 deletions
|
|
@ -13,4 +13,6 @@ dependencies = [
|
|||
[dependency-groups]
|
||||
dev = [
|
||||
"mypy>=1.18.2",
|
||||
"types-beautifulsoup4>=4.12.0.20250516",
|
||||
"types-requests>=2.32.4.20250913",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -2,9 +2,10 @@
|
|||
|
||||
import json
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4 import BeautifulSoup, Tag
|
||||
|
||||
|
||||
def parse_problem_url(contest_id: str, problem_letter: str) -> str:
|
||||
|
|
@ -23,7 +24,6 @@ def extract_problem_from_row(row, contest_id: str) -> dict[str, str] | None:
|
|||
|
||||
task_name = task_link.get_text(strip=True)
|
||||
task_href = task_link.get("href", "")
|
||||
|
||||
if not task_href:
|
||||
return None
|
||||
|
||||
|
|
@ -50,13 +50,11 @@ def scrape_contest_problems(contest_id: str) -> list[dict[str, str]]:
|
|||
|
||||
soup = BeautifulSoup(response.text, "html.parser")
|
||||
task_table = soup.find("table", class_="table")
|
||||
|
||||
if not task_table:
|
||||
if not task_table or not isinstance(task_table, Tag):
|
||||
return []
|
||||
|
||||
rows = task_table.find_all("tr")[1:]
|
||||
problems = []
|
||||
|
||||
rows = task_table.find_all("tr")[1:] # skip header
|
||||
problems: list[dict[str, str]] = []
|
||||
for row in rows:
|
||||
problem = extract_problem_from_row(row, contest_id)
|
||||
if problem:
|
||||
|
|
@ -92,7 +90,6 @@ def extract_test_case_from_headers(sample_headers, i: int) -> tuple[str, str] |
|
|||
|
||||
input_text = input_pre.get_text().strip().replace("\r", "")
|
||||
output_text = output_pre.get_text().strip().replace("\r", "")
|
||||
|
||||
if not input_text or not output_text:
|
||||
return None
|
||||
|
||||
|
|
@ -109,19 +106,17 @@ def scrape(url: str) -> list[tuple[str, str]]:
|
|||
response.raise_for_status()
|
||||
|
||||
soup = BeautifulSoup(response.text, "html.parser")
|
||||
|
||||
sample_headers = soup.find_all(
|
||||
"h3", string=lambda x: x and "sample" in x.lower() if x else False
|
||||
)
|
||||
|
||||
tests = []
|
||||
tests: list[tuple[str, str]] = []
|
||||
i = 0
|
||||
|
||||
while i < len(sample_headers):
|
||||
test_case = extract_test_case_from_headers(sample_headers, i)
|
||||
if test_case:
|
||||
tests.append(test_case)
|
||||
i += 2
|
||||
i += 2 # move from "Sample Input n" to after "Sample Output n"
|
||||
else:
|
||||
i += 1
|
||||
|
||||
|
|
@ -134,88 +129,111 @@ def scrape(url: str) -> list[tuple[str, str]]:
|
|||
|
||||
def main() -> None:
|
||||
if len(sys.argv) < 2:
|
||||
result: dict[str, str | bool] = {
|
||||
"success": False,
|
||||
"error": "Usage: atcoder.py metadata <contest_id> OR atcoder.py tests <contest_id> <problem_letter>",
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Usage: atcoder.py metadata <contest_id> OR atcoder.py tests <contest_id> <problem_letter>",
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
mode: str = sys.argv[1]
|
||||
|
||||
if mode == "metadata":
|
||||
if len(sys.argv) != 3:
|
||||
result = {
|
||||
"success": False,
|
||||
"error": "Usage: atcoder.py metadata <contest_id>",
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Usage: atcoder.py metadata <contest_id>",
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
contest_id: str = sys.argv[2]
|
||||
problems: list[dict[str, str]] = scrape_contest_problems(contest_id)
|
||||
|
||||
if not problems:
|
||||
result = {
|
||||
"success": False,
|
||||
"error": f"No problems found for contest {contest_id}",
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": f"No problems found for contest {contest_id}",
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
result = {
|
||||
"success": True,
|
||||
"contest_id": contest_id,
|
||||
"problems": problems,
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": True,
|
||||
"contest_id": contest_id,
|
||||
"problems": problems,
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
elif mode == "tests":
|
||||
if len(sys.argv) != 4:
|
||||
result = {
|
||||
"success": False,
|
||||
"error": "Usage: atcoder.py tests <contest_id> <problem_letter>",
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Usage: atcoder.py tests <contest_id> <problem_letter>",
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
contest_id: str = sys.argv[2]
|
||||
test_contest_id: str = sys.argv[2]
|
||||
problem_letter: str = sys.argv[3]
|
||||
problem_id: str = contest_id + problem_letter.lower()
|
||||
problem_id: str = f"{test_contest_id}_{problem_letter.lower()}"
|
||||
|
||||
url: str = parse_problem_url(contest_id, problem_letter)
|
||||
url: str = parse_problem_url(test_contest_id, problem_letter)
|
||||
print(f"Scraping: {url}", file=sys.stderr)
|
||||
|
||||
tests: list[tuple[str, str]] = scrape(url)
|
||||
|
||||
if not tests:
|
||||
result = {
|
||||
"success": False,
|
||||
"error": f"No tests found for {contest_id} {problem_letter}",
|
||||
"problem_id": problem_id,
|
||||
"url": url,
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": f"No tests found for {test_contest_id} {problem_letter}",
|
||||
"problem_id": problem_id,
|
||||
"url": url,
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
test_list: list[dict[str, str]] = []
|
||||
for input_data, output_data in tests:
|
||||
test_list.append({"input": input_data, "expected": output_data})
|
||||
test_list: list[dict[str, str]] = [
|
||||
{"input": i, "expected": o} for i, o in tests
|
||||
]
|
||||
|
||||
result = {
|
||||
"success": True,
|
||||
"problem_id": problem_id,
|
||||
"url": url,
|
||||
"tests": test_list,
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": True,
|
||||
"problem_id": problem_id,
|
||||
"url": url,
|
||||
"tests": test_list,
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
else:
|
||||
result = {
|
||||
"success": False,
|
||||
"error": f"Unknown mode: {mode}. Use 'metadata' or 'tests'",
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": f"Unknown mode: {mode}. Use 'metadata' or 'tests'",
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,9 +2,10 @@
|
|||
|
||||
import json
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
import cloudscraper
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4 import BeautifulSoup, Tag
|
||||
|
||||
|
||||
def scrape(url: str) -> list[tuple[str, str]]:
|
||||
|
|
@ -17,12 +18,12 @@ def scrape(url: str) -> list[tuple[str, str]]:
|
|||
input_sections = soup.find_all("div", class_="input")
|
||||
output_sections = soup.find_all("div", class_="output")
|
||||
|
||||
individual_inputs = {}
|
||||
individual_outputs = {}
|
||||
individual_inputs: dict[str, list[str]] = {}
|
||||
individual_outputs: dict[str, list[str]] = {}
|
||||
|
||||
for inp_section in input_sections:
|
||||
inp_pre = inp_section.find("pre")
|
||||
if not inp_pre:
|
||||
if not inp_pre or not isinstance(inp_pre, Tag):
|
||||
continue
|
||||
|
||||
test_line_divs = inp_pre.find_all(
|
||||
|
|
@ -51,7 +52,7 @@ def scrape(url: str) -> list[tuple[str, str]]:
|
|||
|
||||
for out_section in output_sections:
|
||||
out_pre = out_section.find("pre")
|
||||
if not out_pre:
|
||||
if not out_pre or not isinstance(out_pre, Tag):
|
||||
continue
|
||||
|
||||
test_line_divs = out_pre.find_all(
|
||||
|
|
@ -95,12 +96,12 @@ def scrape(url: str) -> list[tuple[str, str]]:
|
|||
|
||||
for inp_section in input_sections:
|
||||
inp_pre = inp_section.find("pre")
|
||||
if not inp_pre:
|
||||
if not inp_pre or not isinstance(inp_pre, Tag):
|
||||
continue
|
||||
|
||||
divs = inp_pre.find_all("div")
|
||||
if divs:
|
||||
lines = [div.get_text().strip() for div in divs]
|
||||
lines = [div.get_text().strip() for div in divs if isinstance(div, Tag)]
|
||||
text = "\n".join(lines)
|
||||
else:
|
||||
text = inp_pre.get_text().replace("\r", "").strip()
|
||||
|
|
@ -108,12 +109,12 @@ def scrape(url: str) -> list[tuple[str, str]]:
|
|||
|
||||
for out_section in output_sections:
|
||||
out_pre = out_section.find("pre")
|
||||
if not out_pre:
|
||||
if not out_pre or not isinstance(out_pre, Tag):
|
||||
continue
|
||||
|
||||
divs = out_pre.find_all("div")
|
||||
if divs:
|
||||
lines = [div.get_text().strip() for div in divs]
|
||||
lines = [div.get_text().strip() for div in divs if isinstance(div, Tag)]
|
||||
text = "\n".join(lines)
|
||||
else:
|
||||
text = out_pre.get_text().replace("\r", "").strip()
|
||||
|
|
@ -152,7 +153,9 @@ def scrape_contest_problems(contest_id: str) -> list[dict[str, str]]:
|
|||
)
|
||||
|
||||
for link in problem_links:
|
||||
href: str = link.get("href", "")
|
||||
if not isinstance(link, Tag):
|
||||
continue
|
||||
href: str = str(link.get("href", ""))
|
||||
if f"/contest/{contest_id}/problem/" in href:
|
||||
problem_letter: str = href.split("/")[-1].lower()
|
||||
problem_name: str = link.get_text(strip=True)
|
||||
|
|
@ -183,86 +186,110 @@ def scrape_sample_tests(url: str) -> list[tuple[str, str]]:
|
|||
|
||||
def main() -> None:
|
||||
if len(sys.argv) < 2:
|
||||
result: dict[str, str | bool] = {
|
||||
"success": False,
|
||||
"error": "Usage: codeforces.py metadata <contest_id> OR codeforces.py tests <contest_id> <problem_letter>",
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Usage: codeforces.py metadata <contest_id> OR codeforces.py tests <contest_id> <problem_letter>",
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
mode: str = sys.argv[1]
|
||||
|
||||
if mode == "metadata":
|
||||
if len(sys.argv) != 3:
|
||||
result: dict[str, str | bool] = {
|
||||
"success": False,
|
||||
"error": "Usage: codeforces.py metadata <contest_id>",
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Usage: codeforces.py metadata <contest_id>",
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
contest_id: str = sys.argv[2]
|
||||
problems: list[dict[str, str]] = scrape_contest_problems(contest_id)
|
||||
|
||||
if not problems:
|
||||
result: dict[str, str | bool] = {
|
||||
"success": False,
|
||||
"error": f"No problems found for contest {contest_id}",
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": f"No problems found for contest {contest_id}",
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
result: dict[str, str | bool | list] = {
|
||||
"success": True,
|
||||
"contest_id": contest_id,
|
||||
"problems": problems,
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": True,
|
||||
"contest_id": contest_id,
|
||||
"problems": problems,
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
elif mode == "tests":
|
||||
if len(sys.argv) != 4:
|
||||
result: dict[str, str | bool] = {
|
||||
"success": False,
|
||||
"error": "Usage: codeforces.py tests <contest_id> <problem_letter>",
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Usage: codeforces.py tests <contest_id> <problem_letter>",
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
contest_id: str = sys.argv[2]
|
||||
tests_contest_id: str = sys.argv[2]
|
||||
problem_letter: str = sys.argv[3]
|
||||
problem_id: str = contest_id + problem_letter.lower()
|
||||
problem_id: str = tests_contest_id + problem_letter.lower()
|
||||
|
||||
url: str = parse_problem_url(contest_id, problem_letter)
|
||||
url: str = parse_problem_url(tests_contest_id, problem_letter)
|
||||
tests: list[tuple[str, str]] = scrape_sample_tests(url)
|
||||
|
||||
if not tests:
|
||||
result: dict[str, str | bool] = {
|
||||
"success": False,
|
||||
"error": f"No tests found for {contest_id} {problem_letter}",
|
||||
"problem_id": problem_id,
|
||||
"url": url,
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": f"No tests found for {tests_contest_id} {problem_letter}",
|
||||
"problem_id": problem_id,
|
||||
"url": url,
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
test_list: list[dict[str, str]] = []
|
||||
for input_data, output_data in tests:
|
||||
test_list.append({"input": input_data, "expected": output_data})
|
||||
|
||||
result: dict[str, str | bool | list] = {
|
||||
"success": True,
|
||||
"problem_id": problem_id,
|
||||
"url": url,
|
||||
"tests": test_list,
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": True,
|
||||
"problem_id": problem_id,
|
||||
"url": url,
|
||||
"tests": test_list,
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
else:
|
||||
result: dict[str, str | bool] = {
|
||||
"success": False,
|
||||
"error": f"Unknown mode: {mode}. Use 'metadata' or 'tests'",
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": f"Unknown mode: {mode}. Use 'metadata' or 'tests'",
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
|
|
|||
142
scrapers/cses.py
142
scrapers/cses.py
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
import json
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
|
@ -16,7 +17,9 @@ def parse_problem_url(problem_input: str) -> str | None:
|
|||
|
||||
|
||||
def process_problem_element(
|
||||
element, current_category: str, all_categories: dict
|
||||
element,
|
||||
current_category: str | None,
|
||||
all_categories: dict[str, list[dict[str, str]]],
|
||||
) -> str | None:
|
||||
if element.name == "h1":
|
||||
category_name = element.get_text().strip()
|
||||
|
|
@ -52,7 +55,7 @@ def scrape_all_problems() -> dict[str, list[dict[str, str]]]:
|
|||
response.raise_for_status()
|
||||
|
||||
soup = BeautifulSoup(response.text, "html.parser")
|
||||
all_categories = {}
|
||||
all_categories: dict[str, list[dict[str, str]]] = {}
|
||||
|
||||
problem_links = soup.find_all(
|
||||
"a", href=lambda x: x and "/problemset/task/" in x
|
||||
|
|
@ -127,59 +130,79 @@ def scrape(url: str) -> list[tuple[str, str]]:
|
|||
|
||||
def main() -> None:
|
||||
if len(sys.argv) < 2:
|
||||
result: dict[str, str | bool] = {
|
||||
"success": False,
|
||||
"error": "Usage: cses.py metadata OR cses.py tests <problem_id_or_url>",
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Usage: cses.py metadata OR cses.py tests <problem_id_or_url>",
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
mode: str = sys.argv[1]
|
||||
|
||||
if mode == "metadata":
|
||||
if len(sys.argv) != 2:
|
||||
result = {
|
||||
"success": False,
|
||||
"error": "Usage: cses.py metadata",
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Usage: cses.py metadata",
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
all_categories: dict[str, list[dict[str, str]]] = scrape_all_problems()
|
||||
|
||||
if not all_categories:
|
||||
result = {
|
||||
"success": False,
|
||||
"error": "Failed to scrape CSES problem categories",
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Failed to scrape CSES problem categories",
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
result = {
|
||||
"success": True,
|
||||
"categories": all_categories,
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": True,
|
||||
"categories": all_categories,
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
elif mode == "tests":
|
||||
if len(sys.argv) != 3:
|
||||
result = {
|
||||
"success": False,
|
||||
"error": "Usage: cses.py tests <problem_id_or_url>",
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Usage: cses.py tests <problem_id_or_url>",
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
problem_input: str = sys.argv[2]
|
||||
url: str | None = parse_problem_url(problem_input)
|
||||
|
||||
if not url:
|
||||
result = {
|
||||
"success": False,
|
||||
"error": f"Invalid problem input: {problem_input}. Use either problem ID (e.g., 1068) or full URL",
|
||||
"problem_id": problem_input if problem_input.isdigit() else None,
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": f"Invalid problem input: {problem_input}. Use either problem ID (e.g., 1068) or full URL",
|
||||
"problem_id": problem_input
|
||||
if problem_input.isdigit()
|
||||
else None,
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
tests: list[tuple[str, str]] = scrape(url)
|
||||
|
|
@ -189,33 +212,42 @@ def main() -> None:
|
|||
)
|
||||
|
||||
if not tests:
|
||||
result = {
|
||||
"success": False,
|
||||
"error": f"No tests found for {problem_input}",
|
||||
"problem_id": problem_id,
|
||||
"url": url,
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": f"No tests found for {problem_input}",
|
||||
"problem_id": problem_id,
|
||||
"url": url,
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
test_list: list[dict[str, str]] = []
|
||||
for input_data, output_data in tests:
|
||||
test_list.append({"input": input_data, "expected": output_data})
|
||||
test_list: list[dict[str, str]] = [
|
||||
{"input": i, "expected": o} for i, o in tests
|
||||
]
|
||||
|
||||
result = {
|
||||
"success": True,
|
||||
"problem_id": problem_id,
|
||||
"url": url,
|
||||
"tests": test_list,
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": True,
|
||||
"problem_id": problem_id,
|
||||
"url": url,
|
||||
"tests": test_list,
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
else:
|
||||
result = {
|
||||
"success": False,
|
||||
"error": f"Unknown mode: {mode}. Use 'metadata' or 'tests'",
|
||||
}
|
||||
print(json.dumps(result))
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"success": False,
|
||||
"error": f"Unknown mode: {mode}. Use 'metadata' or 'tests'",
|
||||
}
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
|
|
|||
41
uv.lock
generated
41
uv.lock
generated
|
|
@ -205,6 +205,8 @@ dependencies = [
|
|||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "mypy" },
|
||||
{ name = "types-beautifulsoup4" },
|
||||
{ name = "types-requests" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
|
|
@ -215,7 +217,11 @@ requires-dist = [
|
|||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [{ name = "mypy", specifier = ">=1.18.2" }]
|
||||
dev = [
|
||||
{ name = "mypy", specifier = ">=1.18.2" },
|
||||
{ name = "types-beautifulsoup4", specifier = ">=4.12.0.20250516" },
|
||||
{ name = "types-requests", specifier = ">=2.32.4.20250913" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "soupsieve"
|
||||
|
|
@ -226,6 +232,39 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679, upload-time = "2025-08-27T15:39:50.179Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-beautifulsoup4"
|
||||
version = "4.12.0.20250516"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "types-html5lib" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6d/d1/32b410f6d65eda94d3dfb0b3d0ca151f12cb1dc4cef731dcf7cbfd8716ff/types_beautifulsoup4-4.12.0.20250516.tar.gz", hash = "sha256:aa19dd73b33b70d6296adf92da8ab8a0c945c507e6fb7d5db553415cc77b417e", size = 16628, upload-time = "2025-05-16T03:09:09.93Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/79/d84de200a80085b32f12c5820d4fd0addcbe7ba6dce8c1c9d8605e833c8e/types_beautifulsoup4-4.12.0.20250516-py3-none-any.whl", hash = "sha256:5923399d4a1ba9cc8f0096fe334cc732e130269541d66261bb42ab039c0376ee", size = 16879, upload-time = "2025-05-16T03:09:09.051Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-html5lib"
|
||||
version = "1.1.11.20250917"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/68/4b/a970718e8bd9324ee8fb8eaf02ff069f6d03c20d4523bb4232892ecc3d06/types_html5lib-1.1.11.20250917.tar.gz", hash = "sha256:7b52743377f33f9b4fd7385afbd2d457b8864ee51f90ff2a795ad9e8c053373a", size = 16868, upload-time = "2025-09-17T02:47:41.18Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/78/8a/da91a9c64dcb5e69beb567519857411996d8ecae9f6f128bcef8260e7a8d/types_html5lib-1.1.11.20250917-py3-none-any.whl", hash = "sha256:b294fd06d60da205daeb2f615485ca4d475088d2eff1009cf427f4a80fcd5346", size = 22908, upload-time = "2025-09-17T02:47:40.39Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.32.4.20250913"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "urllib3" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/36/27/489922f4505975b11de2b5ad07b4fe1dca0bca9be81a703f26c5f3acfce5/types_requests-2.32.4.20250913.tar.gz", hash = "sha256:abd6d4f9ce3a9383f269775a9835a4c24e5cd6b9f647d64f88aa4613c33def5d", size = 23113, upload-time = "2025-09-13T02:40:02.309Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/20/9a227ea57c1285986c4cf78400d0a91615d25b24e257fd9e2969606bdfae/types_requests-2.32.4.20250913-py3-none-any.whl", hash = "sha256:78c9c1fffebbe0fa487a418e0fa5252017e9c60d1a2da394077f1780f655d7e1", size = 20658, upload-time = "2025-09-13T02:40:01.115Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.14.1"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue