fix(ci): run as modukle

This commit is contained in:
Barrett Ruth 2025-09-19 21:20:31 -04:00
parent a7cd58ad90
commit ff9a3d1abb
6 changed files with 11 additions and 10 deletions

View file

@ -114,4 +114,4 @@ jobs:
- name: Install dependencies with mypy - name: Install dependencies with mypy
run: uv sync --dev run: uv sync --dev
- name: Type check Python files with mypy - name: Type check Python files with mypy
run: cd scrapers && PYTHONPATH=.. uv run mypy . ../tests/scrapers/ run: uv run mypy scrapers/ tests/scrapers/

View file

@ -61,4 +61,4 @@ jobs:
- name: Install dependencies with pytest - name: Install dependencies with pytest
run: uv sync --dev run: uv sync --dev
- name: Run Python tests - name: Run Python tests
run: cd scrapers && PYTHONPATH=.. uv run pytest ../tests/scrapers/ -v run: uv run pytest tests/scrapers/ -v

View file

@ -88,7 +88,6 @@ function M.scrape_contest_metadata(platform, contest_id)
end end
local plugin_path = get_plugin_path() local plugin_path = get_plugin_path()
local scraper_path = plugin_path .. '/scrapers/' .. platform .. '.py'
local args local args
if platform == 'cses' then if platform == 'cses' then
@ -97,7 +96,8 @@ function M.scrape_contest_metadata(platform, contest_id)
'run', 'run',
'--directory', '--directory',
plugin_path, plugin_path,
scraper_path, '-m',
'scrapers.' .. platform,
'metadata', 'metadata',
} }
else else
@ -211,7 +211,6 @@ function M.scrape_problem(ctx)
end end
local plugin_path = get_plugin_path() local plugin_path = get_plugin_path()
local scraper_path = plugin_path .. '/scrapers/' .. ctx.contest .. '.py'
local args local args
if ctx.contest == 'cses' then if ctx.contest == 'cses' then
@ -220,7 +219,8 @@ function M.scrape_problem(ctx)
'run', 'run',
'--directory', '--directory',
plugin_path, plugin_path,
scraper_path, '-m',
'scrapers.' .. ctx.contest,
'tests', 'tests',
ctx.contest_id, ctx.contest_id,
} }
@ -230,7 +230,8 @@ function M.scrape_problem(ctx)
'run', 'run',
'--directory', '--directory',
plugin_path, plugin_path,
scraper_path, '-m',
'scrapers.' .. ctx.contest,
'tests', 'tests',
ctx.contest_id, ctx.contest_id,
ctx.problem_id, ctx.problem_id,

View file

@ -8,7 +8,7 @@ from dataclasses import asdict
import requests import requests
from bs4 import BeautifulSoup, Tag from bs4 import BeautifulSoup, Tag
from models import MetadataResult, ProblemSummary, TestCase, TestsResult from .models import MetadataResult, ProblemSummary, TestCase, TestsResult
def extract_problem_limits(soup: BeautifulSoup) -> tuple[int, float]: def extract_problem_limits(soup: BeautifulSoup) -> tuple[int, float]:

View file

@ -7,7 +7,7 @@ from dataclasses import asdict
import cloudscraper import cloudscraper
from bs4 import BeautifulSoup, Tag from bs4 import BeautifulSoup, Tag
from models import MetadataResult, ProblemSummary, TestCase, TestsResult from .models import MetadataResult, ProblemSummary, TestCase, TestsResult
def scrape(url: str) -> list[TestCase]: def scrape(url: str) -> list[TestCase]:

View file

@ -8,7 +8,7 @@ from dataclasses import asdict
import requests import requests
from bs4 import BeautifulSoup, Tag from bs4 import BeautifulSoup, Tag
from models import MetadataResult, ProblemSummary, TestCase, TestsResult from .models import MetadataResult, ProblemSummary, TestCase, TestsResult
def parse_problem_url(problem_input: str) -> str | None: def parse_problem_url(problem_input: str) -> str | None: