fix(login): remove cookie fast-path from login subcommand

Problem: `:CP <platform> login` short-circuited on cached cookies/tokens.
If an old session was still valid, the new credentials were never tested,
so the user got "login successful" even with garbage input.

Solution: Always validate credentials against the platform in the login
path. Remove cookie/token loading from `_login_headless` (AtCoder),
`_login_headless_cf` (CF), `_login_headless_codechef` (CodeChef), and
`login` (CSES). For USACO submit, replace the `_check_usaco_login`
roundtrip with cookie trust + retry-on-auth-failure (the Kattis pattern).
Submit paths are unchanged — cookie fast-paths remain for contest speed.

Closes #331
This commit is contained in:
Barrett Ruth 2026-03-06 17:52:05 -05:00
parent 8465e70772
commit 84343d2045
5 changed files with 110 additions and 141 deletions

View file

@ -306,12 +306,6 @@ def _login_headless(credentials: dict[str, str]) -> LoginResult:
cookie_cache = Path.home() / ".cache" / "cp-nvim" / "atcoder-cookies.json" cookie_cache = Path.home() / ".cache" / "cp-nvim" / "atcoder-cookies.json"
cookie_cache.parent.mkdir(parents=True, exist_ok=True) cookie_cache.parent.mkdir(parents=True, exist_ok=True)
saved_cookies: list[dict[str, Any]] = []
if cookie_cache.exists():
try:
saved_cookies = json.loads(cookie_cache.read_text())
except Exception:
pass
logged_in = False logged_in = False
login_error: str | None = None login_error: str | None = None
@ -340,15 +334,7 @@ def _login_headless(credentials: dict[str, str]) -> LoginResult:
headless=True, headless=True,
timeout=BROWSER_SESSION_TIMEOUT, timeout=BROWSER_SESSION_TIMEOUT,
google_search=False, google_search=False,
cookies=saved_cookies if saved_cookies else [],
) as session: ) as session:
if saved_cookies:
print(json.dumps({"status": "checking_login"}), flush=True)
session.fetch(
f"{BASE_URL}/home", page_action=check_login, network_idle=True
)
if not logged_in:
print(json.dumps({"status": "logging_in"}), flush=True) print(json.dumps({"status": "logging_in"}), flush=True)
session.fetch( session.fetch(
f"{BASE_URL}/login", f"{BASE_URL}/login",

View file

@ -65,12 +65,6 @@ def _login_headless_codechef(credentials: dict[str, str]) -> LoginResult:
_ensure_browser() _ensure_browser()
_COOKIE_PATH.parent.mkdir(parents=True, exist_ok=True) _COOKIE_PATH.parent.mkdir(parents=True, exist_ok=True)
saved_cookies: list[dict[str, Any]] = []
if _COOKIE_PATH.exists():
try:
saved_cookies = json.loads(_COOKIE_PATH.read_text())
except Exception:
pass
logged_in = False logged_in = False
login_error: str | None = None login_error: str | None = None
@ -100,15 +94,7 @@ def _login_headless_codechef(credentials: dict[str, str]) -> LoginResult:
headless=True, headless=True,
timeout=BROWSER_SESSION_TIMEOUT, timeout=BROWSER_SESSION_TIMEOUT,
google_search=False, google_search=False,
cookies=saved_cookies if saved_cookies else [],
) as session: ) as session:
if saved_cookies:
print(json.dumps({"status": "checking_login"}), flush=True)
session.fetch(
f"{BASE_URL}/", page_action=check_login, network_idle=True
)
if not logged_in:
print(json.dumps({"status": "logging_in"}), flush=True) print(json.dumps({"status": "logging_in"}), flush=True)
session.fetch(f"{BASE_URL}/login", page_action=login_action) session.fetch(f"{BASE_URL}/login", page_action=login_action)
if login_error: if login_error:

View file

@ -348,12 +348,6 @@ def _login_headless_cf(credentials: dict[str, str]) -> LoginResult:
cookie_cache = Path.home() / ".cache" / "cp-nvim" / "codeforces-cookies.json" cookie_cache = Path.home() / ".cache" / "cp-nvim" / "codeforces-cookies.json"
cookie_cache.parent.mkdir(parents=True, exist_ok=True) cookie_cache.parent.mkdir(parents=True, exist_ok=True)
saved_cookies: list[dict[str, Any]] = []
if cookie_cache.exists():
try:
saved_cookies = json.loads(cookie_cache.read_text())
except Exception:
pass
logged_in = False logged_in = False
login_error: str | None = None login_error: str | None = None
@ -388,17 +382,7 @@ def _login_headless_cf(credentials: dict[str, str]) -> LoginResult:
headless=True, headless=True,
timeout=BROWSER_SESSION_TIMEOUT, timeout=BROWSER_SESSION_TIMEOUT,
google_search=False, google_search=False,
cookies=saved_cookies if saved_cookies else [],
) as session: ) as session:
if saved_cookies:
print(json.dumps({"status": "checking_login"}), flush=True)
session.fetch(
f"{BASE_URL}/",
page_action=check_login,
network_idle=True,
)
if not logged_in:
print(json.dumps({"status": "logging_in"}), flush=True) print(json.dumps({"status": "logging_in"}), flush=True)
session.fetch( session.fetch(
f"{BASE_URL}/enter", f"{BASE_URL}/enter",

View file

@ -239,21 +239,6 @@ class CSESScraper(BaseScraper):
return self._login_error("Missing username or password") return self._login_error("Missing username or password")
async with httpx.AsyncClient(follow_redirects=True) as client: async with httpx.AsyncClient(follow_redirects=True) as client:
token = credentials.get("token")
if token:
print(json.dumps({"status": "checking_login"}), flush=True)
if await self._check_token(client, token):
return LoginResult(
success=True,
error="",
credentials={
"username": username,
"password": password,
"token": token,
},
)
print(json.dumps({"status": "logging_in"}), flush=True) print(json.dumps({"status": "logging_in"}), flush=True)
token = await self._web_login(client, username, password) token = await self._web_login(client, username, password)
if not token: if not token:

View file

@ -423,11 +423,7 @@ class USACOScraper(BaseScraper):
async with httpx.AsyncClient(follow_redirects=True) as client: async with httpx.AsyncClient(follow_redirects=True) as client:
await _load_usaco_cookies(client) await _load_usaco_cookies(client)
print(json.dumps({"status": "checking_login"}), flush=True) if not client.cookies:
logged_in = bool(client.cookies) and await _check_usaco_login(
client, username
)
if not logged_in:
print(json.dumps({"status": "logging_in"}), flush=True) print(json.dumps({"status": "logging_in"}), flush=True)
try: try:
ok = await _do_usaco_login(client, username, password) ok = await _do_usaco_login(client, username, password)
@ -437,6 +433,34 @@ class USACOScraper(BaseScraper):
return self._submit_error("Login failed (bad credentials?)") return self._submit_error("Login failed (bad credentials?)")
await _save_usaco_cookies(client) await _save_usaco_cookies(client)
result = await self._do_submit(
client, problem_id, language_id, source
)
if result.success or result.error != "auth_failure":
return result
client.cookies.clear()
print(json.dumps({"status": "logging_in"}), flush=True)
try:
ok = await _do_usaco_login(client, username, password)
except Exception as e:
return self._submit_error(f"Login failed: {e}")
if not ok:
return self._submit_error("Login failed (bad credentials?)")
await _save_usaco_cookies(client)
return await self._do_submit(
client, problem_id, language_id, source
)
async def _do_submit(
self,
client: httpx.AsyncClient,
problem_id: str,
language_id: str,
source: bytes,
) -> SubmitResult:
print(json.dumps({"status": "submitting"}), flush=True) print(json.dumps({"status": "submitting"}), flush=True)
try: try:
page_r = await client.get( page_r = await client.get(
@ -444,6 +468,8 @@ class USACOScraper(BaseScraper):
headers=HEADERS, headers=HEADERS,
timeout=HTTP_TIMEOUT, timeout=HTTP_TIMEOUT,
) )
if "login" in page_r.url.path.lower() or "Login" in page_r.text[:2000]:
return self._submit_error("auth_failure")
form_url, hidden_fields, lang_val = _parse_submit_form( form_url, hidden_fields, lang_val = _parse_submit_form(
page_r.text, language_id page_r.text, language_id
) )
@ -469,6 +495,8 @@ class USACOScraper(BaseScraper):
try: try:
resp = r.json() resp = r.json()
if resp.get("code") == 0 and "login" in resp.get("message", "").lower():
return self._submit_error("auth_failure")
sid = str(resp.get("submission_id", resp.get("id", ""))) sid = str(resp.get("submission_id", resp.get("id", "")))
except Exception: except Exception:
sid = "" sid = ""