feat: file conflict prompt, empty submit guard, and lint fixes (#366)

## Problem

Loading a problem whose source file already exists silently overwrites
user code. Submitting an empty file sends a blank submission to the
platform. Two ruff lint violations existed in the scrapers.

## Solution

- `setup.lua`: when the target source file exists on the filesystem
(`vim.uv.fs_stat`), show an inline `Overwrite? [y/N]:` prompt. Declining
keeps the existing file open and registers state normally. Skipped when
the file is already loaded in a buffer.
- `submit.lua`: resolve path to absolute, use `vim.uv.fs_stat` to verify
existence, abort with WARN if `stat.size == 0` ("Submit aborted: source
file has no content").
- `scrapers/atcoder.py`: remove unused `pathlib.Path` import (F401).
- `scrapers/base.py`: move local imports to top of file (E402).

Closes #364, #365.
This commit is contained in:
Barrett Ruth 2026-03-07 16:30:51 -05:00 committed by GitHub
parent b7ddf4c253
commit 573b335646
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 145 additions and 560 deletions

View file

@ -8,7 +8,13 @@ from typing import Any
import requests
from bs4 import BeautifulSoup, Tag
from .base import BaseScraper, clear_platform_cookies, extract_precision, load_platform_cookies, save_platform_cookies
from .base import (
BaseScraper,
clear_platform_cookies,
extract_precision,
load_platform_cookies,
save_platform_cookies,
)
from .models import (
ContestListResult,
ContestSummary,
@ -387,7 +393,9 @@ def _login_headless_cf(credentials: dict[str, str]) -> LoginResult:
google_search=False,
cookies=saved_cookies,
) as session:
session.fetch(f"{BASE_URL}/", page_action=check_action, solve_cloudflare=True)
session.fetch(
f"{BASE_URL}/", page_action=check_action, solve_cloudflare=True
)
if logged_in:
return LoginResult(success=True, error="")
except Exception:
@ -419,7 +427,9 @@ def _login_headless_cf(credentials: dict[str, str]) -> LoginResult:
session.fetch(f"{BASE_URL}/", page_action=verify_action, network_idle=True)
if not logged_in:
return LoginResult(success=False, error="Login failed (bad credentials?)")
return LoginResult(
success=False, error="Login failed (bad credentials?)"
)
try:
browser_cookies = session.context.cookies()
@ -445,7 +455,6 @@ def _submit_headless(
source_code = Path(file_path).read_text()
try:
from scrapling.fetchers import StealthySession # type: ignore[import-untyped,unresolved-import]
except ImportError:
@ -519,7 +528,9 @@ def _submit_headless(
) as session:
if not _retried and saved_cookies:
print(json.dumps({"status": "checking_login"}), flush=True)
session.fetch(f"{BASE_URL}/", page_action=check_login, solve_cloudflare=True)
session.fetch(
f"{BASE_URL}/", page_action=check_login, solve_cloudflare=True
)
if not logged_in:
print(json.dumps({"status": "logging_in"}), flush=True)