feat(scraper): add precision extraction, start_time, and submit support
Problem: problem pages contain floating-point precision requirements and contest start timestamps that were not being extracted or stored. The submit workflow also needed a foundation in the scraper layer. Solution: add extract_precision() to base.py and propagate through all scrapers into cache. Add start_time to ContestSummary and extract it from AtCoder and Codeforces. Add SubmitResult model, abstract submit() method, submit CLI case with get_language_id() resolution, stdin/env_extra support in run_scraper, and a full AtCoder submit implementation; stub the remaining platforms.
This commit is contained in:
parent
865e3b5928
commit
90bd13580b
9 changed files with 245 additions and 20 deletions
|
|
@ -27,7 +27,7 @@
|
|||
---@field multi_test? boolean
|
||||
---@field memory_mb? number
|
||||
---@field timeout_ms? number
|
||||
---@field epsilon? number
|
||||
---@field precision? number
|
||||
---@field combined_test? CombinedTest
|
||||
---@field test_cases TestCase[]
|
||||
|
||||
|
|
@ -231,7 +231,8 @@ function M.set_test_cases(
|
|||
timeout_ms,
|
||||
memory_mb,
|
||||
interactive,
|
||||
multi_test
|
||||
multi_test,
|
||||
precision
|
||||
)
|
||||
vim.validate({
|
||||
platform = { platform, 'string' },
|
||||
|
|
@ -243,6 +244,7 @@ function M.set_test_cases(
|
|||
memory_mb = { memory_mb, { 'number', 'nil' }, true },
|
||||
interactive = { interactive, { 'boolean', 'nil' }, true },
|
||||
multi_test = { multi_test, { 'boolean', 'nil' }, true },
|
||||
precision = { precision, { 'number', 'nil' }, true },
|
||||
})
|
||||
|
||||
local index = cache_data[platform][contest_id].index_map[problem_id]
|
||||
|
|
@ -253,6 +255,7 @@ function M.set_test_cases(
|
|||
cache_data[platform][contest_id].problems[index].memory_mb = memory_mb
|
||||
cache_data[platform][contest_id].problems[index].interactive = interactive
|
||||
cache_data[platform][contest_id].problems[index].multi_test = multi_test
|
||||
cache_data[platform][contest_id].problems[index].precision = precision
|
||||
|
||||
M.save()
|
||||
end
|
||||
|
|
@ -278,7 +281,7 @@ end
|
|||
---@param contest_id string
|
||||
---@param problem_id? string
|
||||
---@return number?
|
||||
function M.get_epsilon(platform, contest_id, problem_id)
|
||||
function M.get_precision(platform, contest_id, problem_id)
|
||||
vim.validate({
|
||||
platform = { platform, 'string' },
|
||||
contest_id = { contest_id, 'string' },
|
||||
|
|
@ -299,7 +302,7 @@ function M.get_epsilon(platform, contest_id, problem_id)
|
|||
end
|
||||
|
||||
local problem_data = cache_data[platform][contest_id].problems[index]
|
||||
return problem_data and problem_data.epsilon or nil
|
||||
return problem_data and problem_data.precision or nil
|
||||
end
|
||||
|
||||
---@param file_path string
|
||||
|
|
@ -349,11 +352,24 @@ function M.set_contest_summaries(platform, contests)
|
|||
cache_data[platform][contest.id] = cache_data[platform][contest.id] or {}
|
||||
cache_data[platform][contest.id].display_name = contest.display_name
|
||||
cache_data[platform][contest.id].name = contest.name
|
||||
if contest.start_time then
|
||||
cache_data[platform][contest.id].start_time = contest.start_time
|
||||
end
|
||||
end
|
||||
|
||||
M.save()
|
||||
end
|
||||
|
||||
---@param platform string
|
||||
---@param contest_id string
|
||||
---@return integer?
|
||||
function M.get_contest_start_time(platform, contest_id)
|
||||
if not cache_data[platform] or not cache_data[platform][contest_id] then
|
||||
return nil
|
||||
end
|
||||
return cache_data[platform][contest_id].start_time
|
||||
end
|
||||
|
||||
function M.clear_all()
|
||||
cache_data = {}
|
||||
M.save()
|
||||
|
|
|
|||
|
|
@ -56,6 +56,12 @@ local function run_scraper(platform, subcommand, args, opts)
|
|||
env.PYTHONPATH = ''
|
||||
env.CONDA_PREFIX = ''
|
||||
|
||||
if opts and opts.env_extra then
|
||||
for k, v in pairs(opts.env_extra) do
|
||||
env[k] = v
|
||||
end
|
||||
end
|
||||
|
||||
if opts and opts.ndjson then
|
||||
local uv = vim.uv
|
||||
local stdout = uv.new_pipe(false)
|
||||
|
|
@ -126,6 +132,9 @@ local function run_scraper(platform, subcommand, args, opts)
|
|||
end
|
||||
|
||||
local sysopts = { text = true, timeout = 30000, env = env, cwd = plugin_path }
|
||||
if opts and opts.stdin then
|
||||
sysopts.stdin = opts.stdin
|
||||
end
|
||||
if opts and opts.sync then
|
||||
local result = vim.system(cmd, sysopts):wait()
|
||||
return syshandle(result)
|
||||
|
|
@ -228,6 +237,7 @@ function M.scrape_all_tests(platform, contest_id, callback)
|
|||
memory_mb = ev.memory_mb or 0,
|
||||
interactive = ev.interactive or false,
|
||||
multi_test = ev.multi_test or false,
|
||||
precision = ev.precision,
|
||||
problem_id = ev.problem_id,
|
||||
})
|
||||
end
|
||||
|
|
@ -236,4 +246,21 @@ function M.scrape_all_tests(platform, contest_id, callback)
|
|||
})
|
||||
end
|
||||
|
||||
function M.submit(platform, contest_id, problem_id, language, source_code, credentials, callback)
|
||||
local creds_json = vim.json.encode(credentials)
|
||||
run_scraper(platform, 'submit', { contest_id, problem_id, language }, {
|
||||
stdin = source_code,
|
||||
env_extra = { CP_CREDENTIALS = creds_json },
|
||||
on_exit = function(result)
|
||||
if type(callback) == 'function' then
|
||||
if result and result.success then
|
||||
callback(result.data or { success = true })
|
||||
else
|
||||
callback({ success = false, error = result and result.error or 'unknown' })
|
||||
end
|
||||
end
|
||||
end,
|
||||
})
|
||||
end
|
||||
|
||||
return M
|
||||
|
|
|
|||
|
|
@ -130,7 +130,8 @@ local function start_tests(platform, contest_id, problems)
|
|||
ev.timeout_ms or 0,
|
||||
ev.memory_mb or 0,
|
||||
ev.interactive,
|
||||
ev.multi_test
|
||||
ev.multi_test,
|
||||
ev.precision
|
||||
)
|
||||
|
||||
local io_state = state.get_io_view_state()
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue