chore: remove accidentally committed files
This commit is contained in:
parent
38cd0482f0
commit
c95f7f4c53
5 changed files with 0 additions and 238 deletions
58
t/a.cc
58
t/a.cc
|
|
@ -1,58 +0,0 @@
|
|||
#include <bits/stdc++.h> // {{{
|
||||
|
||||
#include <version>
|
||||
#ifdef __cpp_lib_ranges_enumerate
|
||||
#include <ranges>
|
||||
namespace rv = std::views;
|
||||
namespace rs = std::ranges;
|
||||
#endif
|
||||
|
||||
#pragma GCC optimize("O2,unroll-loops")
|
||||
#pragma GCC target("avx2,bmi,bmi2,lzcnt,popcnt")
|
||||
|
||||
using namespace std;
|
||||
|
||||
using i32 = int32_t;
|
||||
using u32 = uint32_t;
|
||||
using i64 = int64_t;
|
||||
using u64 = uint64_t;
|
||||
using f64 = double;
|
||||
using f128 = long double;
|
||||
|
||||
#if __cplusplus >= 202002L
|
||||
template <typename T>
|
||||
constexpr T MIN = std::numeric_limits<T>::min();
|
||||
|
||||
template <typename T>
|
||||
constexpr T MAX = std::numeric_limits<T>::max();
|
||||
#endif
|
||||
|
||||
#ifdef LOCAL
|
||||
#define db(...) std::print(__VA_ARGS__)
|
||||
#define dbln(...) std::println(__VA_ARGS__)
|
||||
#else
|
||||
#define db(...)
|
||||
#define dbln(...)
|
||||
#endif
|
||||
// }}}
|
||||
|
||||
void solve() {
|
||||
std::cout << "change\n";
|
||||
}
|
||||
|
||||
int main() { // {{{
|
||||
std::cin.exceptions(std::cin.failbit);
|
||||
#ifdef LOCAL
|
||||
std::cerr.rdbuf(std::cout.rdbuf());
|
||||
std::cout.setf(std::ios::unitbuf);
|
||||
std::cerr.setf(std::ios::unitbuf);
|
||||
#else
|
||||
std::cin.tie(nullptr)->sync_with_stdio(false);
|
||||
#endif
|
||||
u32 tc = 1;
|
||||
std::cin >> tc;
|
||||
for (u32 t = 0; t < tc; ++t) {
|
||||
solve();
|
||||
}
|
||||
return 0;
|
||||
} // }}}
|
||||
|
|
@ -1,67 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Reproduce CLI hang: go through asyncio.to_thread like the real code."""
|
||||
import asyncio
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, ".")
|
||||
|
||||
from scrapers.atcoder import _ensure_browser, _solve_turnstile
|
||||
from scrapers.codeforces import BASE_URL, _wait_for_gate_reload
|
||||
from scrapers.timeouts import BROWSER_SESSION_TIMEOUT
|
||||
|
||||
|
||||
def _test_submit():
|
||||
from scrapling.fetchers import StealthySession
|
||||
|
||||
_ensure_browser()
|
||||
|
||||
cookie_cache = Path.home() / ".cache" / "cp-nvim" / "codeforces-cookies.json"
|
||||
saved_cookies = []
|
||||
if cookie_cache.exists():
|
||||
try:
|
||||
saved_cookies = json.loads(cookie_cache.read_text())
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
logged_in = False
|
||||
|
||||
def check_login(page):
|
||||
nonlocal logged_in
|
||||
logged_in = page.evaluate(
|
||||
"() => Array.from(document.querySelectorAll('a'))"
|
||||
".some(a => a.textContent.includes('Logout'))"
|
||||
)
|
||||
print(f"logged_in: {logged_in}", flush=True)
|
||||
|
||||
def submit_action(page):
|
||||
print(f"ENTERED submit_action: url={page.url}", flush=True)
|
||||
|
||||
with StealthySession(
|
||||
headless=True,
|
||||
timeout=BROWSER_SESSION_TIMEOUT,
|
||||
google_search=False,
|
||||
cookies=saved_cookies,
|
||||
) as session:
|
||||
print("fetch homepage...", flush=True)
|
||||
session.fetch(f"{BASE_URL}/", page_action=check_login, network_idle=True)
|
||||
|
||||
print("fetch submit page...", flush=True)
|
||||
session.fetch(
|
||||
f"{BASE_URL}/contest/1933/submit",
|
||||
page_action=submit_action,
|
||||
)
|
||||
print("DONE", flush=True)
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
async def main():
|
||||
print("Running via asyncio.to_thread...", flush=True)
|
||||
result = await asyncio.to_thread(_test_submit)
|
||||
print(f"Result: {result}", flush=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Simulate exactly what the CLI does."""
|
||||
import asyncio
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, ".")
|
||||
|
||||
SOURCE = '#include <bits/stdc++.h>\nusing namespace std;\nint main() { cout << 42; }\n'
|
||||
|
||||
|
||||
async def main():
|
||||
from scrapers.codeforces import CodeforcesScraper
|
||||
from scrapers.language_ids import get_language_id
|
||||
|
||||
scraper = CodeforcesScraper()
|
||||
credentials = json.loads(os.environ.get("CP_CREDENTIALS", "{}"))
|
||||
language_id = get_language_id("codeforces", "cpp") or "89"
|
||||
|
||||
print(f"source length: {len(SOURCE)}", flush=True)
|
||||
print(f"credentials keys: {list(credentials.keys())}", flush=True)
|
||||
print(f"language_id: {language_id}", flush=True)
|
||||
|
||||
result = await scraper.submit("1933", "a", SOURCE, language_id, credentials)
|
||||
print(result.model_dump_json(indent=2), flush=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Call _submit_headless directly, no asyncio."""
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, ".")
|
||||
|
||||
from scrapers.codeforces import _submit_headless
|
||||
|
||||
creds = json.loads(os.environ.get("CP_CREDENTIALS", "{}"))
|
||||
result = _submit_headless("1933", "a", "int main(){}", "89", creds)
|
||||
print(result.model_dump_json(indent=2))
|
||||
|
|
@ -1,70 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Pinpoint where session.fetch hangs on the submit page."""
|
||||
import json
|
||||
import sys
|
||||
import threading
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, ".")
|
||||
|
||||
from scrapers.atcoder import _ensure_browser
|
||||
from scrapers.codeforces import BASE_URL
|
||||
from scrapers.timeouts import BROWSER_SESSION_TIMEOUT
|
||||
|
||||
|
||||
def watchdog(label, timeout=20):
|
||||
import time
|
||||
time.sleep(timeout)
|
||||
print(f"WATCHDOG: {label} timed out after {timeout}s", flush=True)
|
||||
import os
|
||||
os._exit(1)
|
||||
|
||||
|
||||
def main():
|
||||
from scrapling.fetchers import StealthySession
|
||||
|
||||
_ensure_browser()
|
||||
|
||||
cookie_cache = Path.home() / ".cache" / "cp-nvim" / "codeforces-cookies.json"
|
||||
saved_cookies = []
|
||||
if cookie_cache.exists():
|
||||
try:
|
||||
saved_cookies = json.loads(cookie_cache.read_text())
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def check_login(page):
|
||||
logged_in = page.evaluate(
|
||||
"() => Array.from(document.querySelectorAll('a'))"
|
||||
".some(a => a.textContent.includes('Logout'))"
|
||||
)
|
||||
print(f"logged_in: {logged_in}", flush=True)
|
||||
|
||||
def submit_action(page):
|
||||
print(f"submit_action ENTERED: url={page.url} title={page.title()}", flush=True)
|
||||
|
||||
try:
|
||||
with StealthySession(
|
||||
headless=True,
|
||||
timeout=BROWSER_SESSION_TIMEOUT,
|
||||
google_search=False,
|
||||
cookies=saved_cookies,
|
||||
) as session:
|
||||
print("1. Homepage...", flush=True)
|
||||
session.fetch(f"{BASE_URL}/", page_action=check_login, network_idle=True)
|
||||
|
||||
print("2. Submit page (no network_idle, no solve_cloudflare)...", flush=True)
|
||||
t = threading.Thread(target=watchdog, args=("session.fetch submit", 30), daemon=True)
|
||||
t.start()
|
||||
|
||||
session.fetch(
|
||||
f"{BASE_URL}/contest/1933/submit",
|
||||
page_action=submit_action,
|
||||
)
|
||||
print("3. Done!", flush=True)
|
||||
except Exception as e:
|
||||
print(f"FATAL: {type(e).__name__}: {e}", flush=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Loading…
Add table
Add a link
Reference in a new issue