ci: remove unused var
This commit is contained in:
parent
3ecd200da7
commit
38cd0482f0
6 changed files with 186 additions and 3 deletions
|
|
@ -12,7 +12,6 @@ from bs4 import BeautifulSoup, Tag
|
||||||
from curl_cffi import requests as curl_requests
|
from curl_cffi import requests as curl_requests
|
||||||
|
|
||||||
from .base import BaseScraper, extract_precision
|
from .base import BaseScraper, extract_precision
|
||||||
from .language_ids import get_language_id
|
|
||||||
from .models import (
|
from .models import (
|
||||||
ContestListResult,
|
ContestListResult,
|
||||||
ContestSummary,
|
ContestSummary,
|
||||||
|
|
|
||||||
|
|
@ -37,7 +37,7 @@ constexpr T MAX = std::numeric_limits<T>::max();
|
||||||
// }}}
|
// }}}
|
||||||
|
|
||||||
void solve() {
|
void solve() {
|
||||||
cout << "hi\n";
|
std::cout << "change\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
int main() { // {{{
|
int main() { // {{{
|
||||||
|
|
@ -49,6 +49,10 @@ int main() { // {{{
|
||||||
#else
|
#else
|
||||||
std::cin.tie(nullptr)->sync_with_stdio(false);
|
std::cin.tie(nullptr)->sync_with_stdio(false);
|
||||||
#endif
|
#endif
|
||||||
|
u32 tc = 1;
|
||||||
|
std::cin >> tc;
|
||||||
|
for (u32 t = 0; t < tc; ++t) {
|
||||||
solve();
|
solve();
|
||||||
|
}
|
||||||
return 0;
|
return 0;
|
||||||
} // }}}
|
} // }}}
|
||||||
67
t/cf_cli_debug.py
Normal file
67
t/cf_cli_debug.py
Normal file
|
|
@ -0,0 +1,67 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Reproduce CLI hang: go through asyncio.to_thread like the real code."""
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
sys.path.insert(0, ".")
|
||||||
|
|
||||||
|
from scrapers.atcoder import _ensure_browser, _solve_turnstile
|
||||||
|
from scrapers.codeforces import BASE_URL, _wait_for_gate_reload
|
||||||
|
from scrapers.timeouts import BROWSER_SESSION_TIMEOUT
|
||||||
|
|
||||||
|
|
||||||
|
def _test_submit():
|
||||||
|
from scrapling.fetchers import StealthySession
|
||||||
|
|
||||||
|
_ensure_browser()
|
||||||
|
|
||||||
|
cookie_cache = Path.home() / ".cache" / "cp-nvim" / "codeforces-cookies.json"
|
||||||
|
saved_cookies = []
|
||||||
|
if cookie_cache.exists():
|
||||||
|
try:
|
||||||
|
saved_cookies = json.loads(cookie_cache.read_text())
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
logged_in = False
|
||||||
|
|
||||||
|
def check_login(page):
|
||||||
|
nonlocal logged_in
|
||||||
|
logged_in = page.evaluate(
|
||||||
|
"() => Array.from(document.querySelectorAll('a'))"
|
||||||
|
".some(a => a.textContent.includes('Logout'))"
|
||||||
|
)
|
||||||
|
print(f"logged_in: {logged_in}", flush=True)
|
||||||
|
|
||||||
|
def submit_action(page):
|
||||||
|
print(f"ENTERED submit_action: url={page.url}", flush=True)
|
||||||
|
|
||||||
|
with StealthySession(
|
||||||
|
headless=True,
|
||||||
|
timeout=BROWSER_SESSION_TIMEOUT,
|
||||||
|
google_search=False,
|
||||||
|
cookies=saved_cookies,
|
||||||
|
) as session:
|
||||||
|
print("fetch homepage...", flush=True)
|
||||||
|
session.fetch(f"{BASE_URL}/", page_action=check_login, network_idle=True)
|
||||||
|
|
||||||
|
print("fetch submit page...", flush=True)
|
||||||
|
session.fetch(
|
||||||
|
f"{BASE_URL}/contest/1933/submit",
|
||||||
|
page_action=submit_action,
|
||||||
|
)
|
||||||
|
print("DONE", flush=True)
|
||||||
|
|
||||||
|
return "ok"
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
print("Running via asyncio.to_thread...", flush=True)
|
||||||
|
result = await asyncio.to_thread(_test_submit)
|
||||||
|
print(f"Result: {result}", flush=True)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
||||||
30
t/cf_cli_real.py
Normal file
30
t/cf_cli_real.py
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Simulate exactly what the CLI does."""
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, ".")
|
||||||
|
|
||||||
|
SOURCE = '#include <bits/stdc++.h>\nusing namespace std;\nint main() { cout << 42; }\n'
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
from scrapers.codeforces import CodeforcesScraper
|
||||||
|
from scrapers.language_ids import get_language_id
|
||||||
|
|
||||||
|
scraper = CodeforcesScraper()
|
||||||
|
credentials = json.loads(os.environ.get("CP_CREDENTIALS", "{}"))
|
||||||
|
language_id = get_language_id("codeforces", "cpp") or "89"
|
||||||
|
|
||||||
|
print(f"source length: {len(SOURCE)}", flush=True)
|
||||||
|
print(f"credentials keys: {list(credentials.keys())}", flush=True)
|
||||||
|
print(f"language_id: {language_id}", flush=True)
|
||||||
|
|
||||||
|
result = await scraper.submit("1933", "a", SOURCE, language_id, credentials)
|
||||||
|
print(result.model_dump_json(indent=2), flush=True)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
||||||
13
t/cf_exact.py
Normal file
13
t/cf_exact.py
Normal file
|
|
@ -0,0 +1,13 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Call _submit_headless directly, no asyncio."""
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, ".")
|
||||||
|
|
||||||
|
from scrapers.codeforces import _submit_headless
|
||||||
|
|
||||||
|
creds = json.loads(os.environ.get("CP_CREDENTIALS", "{}"))
|
||||||
|
result = _submit_headless("1933", "a", "int main(){}", "89", creds)
|
||||||
|
print(result.model_dump_json(indent=2))
|
||||||
70
t/cf_hang_debug.py
Normal file
70
t/cf_hang_debug.py
Normal file
|
|
@ -0,0 +1,70 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Pinpoint where session.fetch hangs on the submit page."""
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
sys.path.insert(0, ".")
|
||||||
|
|
||||||
|
from scrapers.atcoder import _ensure_browser
|
||||||
|
from scrapers.codeforces import BASE_URL
|
||||||
|
from scrapers.timeouts import BROWSER_SESSION_TIMEOUT
|
||||||
|
|
||||||
|
|
||||||
|
def watchdog(label, timeout=20):
|
||||||
|
import time
|
||||||
|
time.sleep(timeout)
|
||||||
|
print(f"WATCHDOG: {label} timed out after {timeout}s", flush=True)
|
||||||
|
import os
|
||||||
|
os._exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
from scrapling.fetchers import StealthySession
|
||||||
|
|
||||||
|
_ensure_browser()
|
||||||
|
|
||||||
|
cookie_cache = Path.home() / ".cache" / "cp-nvim" / "codeforces-cookies.json"
|
||||||
|
saved_cookies = []
|
||||||
|
if cookie_cache.exists():
|
||||||
|
try:
|
||||||
|
saved_cookies = json.loads(cookie_cache.read_text())
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def check_login(page):
|
||||||
|
logged_in = page.evaluate(
|
||||||
|
"() => Array.from(document.querySelectorAll('a'))"
|
||||||
|
".some(a => a.textContent.includes('Logout'))"
|
||||||
|
)
|
||||||
|
print(f"logged_in: {logged_in}", flush=True)
|
||||||
|
|
||||||
|
def submit_action(page):
|
||||||
|
print(f"submit_action ENTERED: url={page.url} title={page.title()}", flush=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with StealthySession(
|
||||||
|
headless=True,
|
||||||
|
timeout=BROWSER_SESSION_TIMEOUT,
|
||||||
|
google_search=False,
|
||||||
|
cookies=saved_cookies,
|
||||||
|
) as session:
|
||||||
|
print("1. Homepage...", flush=True)
|
||||||
|
session.fetch(f"{BASE_URL}/", page_action=check_login, network_idle=True)
|
||||||
|
|
||||||
|
print("2. Submit page (no network_idle, no solve_cloudflare)...", flush=True)
|
||||||
|
t = threading.Thread(target=watchdog, args=("session.fetch submit", 30), daemon=True)
|
||||||
|
t.start()
|
||||||
|
|
||||||
|
session.fetch(
|
||||||
|
f"{BASE_URL}/contest/1933/submit",
|
||||||
|
page_action=submit_action,
|
||||||
|
)
|
||||||
|
print("3. Done!", flush=True)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"FATAL: {type(e).__name__}: {e}", flush=True)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Loading…
Add table
Add a link
Reference in a new issue