diff --git a/lua/cp/scraper.lua b/lua/cp/scraper.lua index 6980c7d..8f7deac 100644 --- a/lua/cp/scraper.lua +++ b/lua/cp/scraper.lua @@ -316,15 +316,14 @@ function M.submit( contest_id, problem_id, language, - source_code, + source_file, credentials, on_status, callback ) local done = false - run_scraper(platform, 'submit', { contest_id, problem_id, language }, { + run_scraper(platform, 'submit', { contest_id, problem_id, language, source_file }, { ndjson = true, - stdin = source_code, env_extra = { CP_CREDENTIALS = vim.json.encode(credentials) }, on_event = function(ev) if ev.credentials ~= nil then diff --git a/lua/cp/submit.lua b/lua/cp/submit.lua index 7dc9a71..b9c7dbb 100644 --- a/lua/cp/submit.lua +++ b/lua/cp/submit.lua @@ -53,9 +53,7 @@ function M.submit(opts) end prompt_credentials(platform, function(creds) - local source_lines = vim.fn.readfile(source_file) - local source_code = table.concat(source_lines, '\n') - + vim.cmd.update() vim.notify('[cp.nvim] Submitting...', vim.log.levels.INFO) require('cp.scraper').submit( @@ -63,7 +61,7 @@ function M.submit(opts) contest_id, problem_id, language, - source_code, + source_file, creds, function(ev) vim.schedule(function() diff --git a/scrapers/atcoder.py b/scrapers/atcoder.py index 8b3db72..61e4213 100644 --- a/scrapers/atcoder.py +++ b/scrapers/atcoder.py @@ -37,11 +37,6 @@ from .timeouts import ( HTTP_TIMEOUT, ) -_LANGUAGE_ID_EXTENSION = { - "6017": "cc", - "6082": "py", -} - MIB_TO_MB = 1.048576 BASE_URL = "https://atcoder.jp" ARCHIVE_URL = f"{BASE_URL}/contests/archive" @@ -297,7 +292,7 @@ def _ensure_browser() -> None: def _submit_headless( contest_id: str, problem_id: str, - source_code: str, + file_path: str, language_id: str, credentials: dict[str, str], _retried: bool = False, @@ -362,15 +357,7 @@ def _submit_headless( f'select[name="data.LanguageId"] option[value="{language_id}"]' ).wait_for(state="attached", timeout=BROWSER_ELEMENT_WAIT) page.select_option('select[name="data.LanguageId"]', language_id) - ext = _LANGUAGE_ID_EXTENSION.get(language_id, "txt") - page.set_input_files( - "#input-open-file", - { - "name": f"solution.{ext}", - "mimeType": "text/plain", - "buffer": source_code.encode(), - }, - ) + page.set_input_files("#input-open-file", file_path) page.wait_for_timeout(BROWSER_SETTLE_DELAY) page.locator('button[type="submit"]').click() page.wait_for_url( @@ -423,7 +410,7 @@ def _submit_headless( return _submit_headless( contest_id, problem_id, - source_code, + file_path, language_id, credentials, _retried=True, @@ -581,7 +568,7 @@ class AtcoderScraper(BaseScraper): self, contest_id: str, problem_id: str, - source_code: str, + file_path: str, language_id: str, credentials: dict[str, str], ) -> SubmitResult: @@ -589,7 +576,7 @@ class AtcoderScraper(BaseScraper): _submit_headless, contest_id, problem_id, - source_code, + file_path, language_id, credentials, ) @@ -651,15 +638,14 @@ async def main_async() -> int: return 0 if contest_result.success else 1 if mode == "submit": - if len(sys.argv) != 5: + if len(sys.argv) != 6: print( SubmitResult( success=False, - error="Usage: atcoder.py submit ", + error="Usage: atcoder.py submit ", ).model_dump_json() ) return 1 - source_code = sys.stdin.read() creds_raw = os.environ.get("CP_CREDENTIALS", "{}") try: credentials = json.loads(creds_raw) @@ -667,7 +653,7 @@ async def main_async() -> int: credentials = {} language_id = get_language_id("atcoder", sys.argv[4]) or sys.argv[4] submit_result = await scraper.submit( - sys.argv[2], sys.argv[3], source_code, language_id, credentials + sys.argv[2], sys.argv[3], sys.argv[5], language_id, credentials ) print(submit_result.model_dump_json()) return 0 if submit_result.success else 1 diff --git a/scrapers/base.py b/scrapers/base.py index ed0636b..c77e293 100644 --- a/scrapers/base.py +++ b/scrapers/base.py @@ -53,7 +53,7 @@ class BaseScraper(ABC): self, contest_id: str, problem_id: str, - source_code: str, + file_path: str, language_id: str, credentials: dict[str, str], ) -> SubmitResult: ... @@ -114,14 +114,13 @@ class BaseScraper(ABC): return 0 if result.success else 1 case "submit": - if len(args) != 5: + if len(args) != 6: print( self._submit_error( - "Usage: submit " + "Usage: submit " ).model_dump_json() ) return 1 - source_code = sys.stdin.read() creds_raw = os.environ.get("CP_CREDENTIALS", "{}") try: credentials = json.loads(creds_raw) @@ -129,7 +128,7 @@ class BaseScraper(ABC): credentials = {} language_id = get_language_id(self.platform_name, args[4]) or args[4] result = await self.submit( - args[2], args[3], source_code, language_id, credentials + args[2], args[3], args[5], language_id, credentials ) print(result.model_dump_json()) return 0 if result.success else 1 diff --git a/scrapers/codeforces.py b/scrapers/codeforces.py index 895a511..75d9915 100644 --- a/scrapers/codeforces.py +++ b/scrapers/codeforces.py @@ -289,7 +289,7 @@ class CodeforcesScraper(BaseScraper): self, contest_id: str, problem_id: str, - source_code: str, + file_path: str, language_id: str, credentials: dict[str, str], ) -> SubmitResult: @@ -297,7 +297,7 @@ class CodeforcesScraper(BaseScraper): _submit_headless, contest_id, problem_id, - source_code, + file_path, language_id, credentials, ) @@ -306,13 +306,15 @@ class CodeforcesScraper(BaseScraper): def _submit_headless( contest_id: str, problem_id: str, - source_code: str, + file_path: str, language_id: str, credentials: dict[str, str], _retried: bool = False, ) -> SubmitResult: from pathlib import Path + source_code = Path(file_path).read_text() + try: from scrapling.fetchers import StealthySession # type: ignore[import-untyped,unresolved-import] except ImportError: @@ -451,7 +453,7 @@ def _submit_headless( return _submit_headless( contest_id, problem_id, - source_code, + file_path, language_id, credentials, _retried=True,