feat: caching
This commit is contained in:
parent
64c7559c78
commit
40117c2cf1
10 changed files with 764 additions and 175 deletions
|
|
@ -1,5 +1,6 @@
|
|||
local M = {}
|
||||
local logger = require("cp.log")
|
||||
local cache = require("cp.cache")
|
||||
|
||||
local function get_plugin_path()
|
||||
local plugin_path = debug.getinfo(1, "S").source:sub(2)
|
||||
|
|
@ -35,7 +36,72 @@ local function setup_python_env()
|
|||
return true
|
||||
end
|
||||
|
||||
---@param contest_type string
|
||||
---@param contest_id string
|
||||
---@return {success: boolean, problems?: table[], error?: string}
|
||||
function M.scrape_contest_metadata(contest_type, contest_id)
|
||||
cache.load()
|
||||
|
||||
local cached_data = cache.get_contest_data(contest_type, contest_id)
|
||||
if cached_data then
|
||||
return {
|
||||
success = true,
|
||||
problems = cached_data.problems,
|
||||
}
|
||||
end
|
||||
|
||||
if not setup_python_env() then
|
||||
return {
|
||||
success = false,
|
||||
error = "Python environment setup failed",
|
||||
}
|
||||
end
|
||||
|
||||
local plugin_path = get_plugin_path()
|
||||
local scraper_path = plugin_path .. "/scrapers/" .. contest_type .. ".py"
|
||||
local args = { "uv", "run", scraper_path, "metadata", contest_id }
|
||||
|
||||
local result = vim.system(args, {
|
||||
cwd = plugin_path,
|
||||
text = true,
|
||||
timeout = 30000,
|
||||
}):wait()
|
||||
|
||||
if result.code ~= 0 then
|
||||
return {
|
||||
success = false,
|
||||
error = "Failed to run metadata scraper: " .. (result.stderr or "Unknown error"),
|
||||
}
|
||||
end
|
||||
|
||||
local ok, data = pcall(vim.json.decode, result.stdout)
|
||||
if not ok then
|
||||
return {
|
||||
success = false,
|
||||
error = "Failed to parse metadata scraper output: " .. tostring(data),
|
||||
}
|
||||
end
|
||||
|
||||
if not data.success then
|
||||
return data
|
||||
end
|
||||
|
||||
local problems_list
|
||||
if contest_type == "cses" then
|
||||
problems_list = data.categories and data.categories["CSES Problem Set"] or {}
|
||||
else
|
||||
problems_list = data.problems or {}
|
||||
end
|
||||
|
||||
cache.set_contest_data(contest_type, contest_id, problems_list)
|
||||
return {
|
||||
success = true,
|
||||
problems = problems_list,
|
||||
}
|
||||
end
|
||||
|
||||
---@param ctx ProblemContext
|
||||
---@return {success: boolean, problem_id: string, test_count?: number, url?: string, error?: string}
|
||||
function M.scrape_problem(ctx)
|
||||
ensure_io_directory()
|
||||
|
||||
|
|
@ -50,6 +116,7 @@ function M.scrape_problem(ctx)
|
|||
if not setup_python_env() then
|
||||
return {
|
||||
success = false,
|
||||
problem_id = ctx.problem_name,
|
||||
error = "Python environment setup failed",
|
||||
}
|
||||
end
|
||||
|
|
@ -59,9 +126,9 @@ function M.scrape_problem(ctx)
|
|||
|
||||
local args
|
||||
if ctx.contest == "cses" then
|
||||
args = { "uv", "run", scraper_path, ctx.contest_id }
|
||||
args = { "uv", "run", scraper_path, "tests", ctx.contest_id }
|
||||
else
|
||||
args = { "uv", "run", scraper_path, ctx.contest_id, ctx.problem_id }
|
||||
args = { "uv", "run", scraper_path, "tests", ctx.contest_id, ctx.problem_id }
|
||||
end
|
||||
|
||||
local result = vim.system(args, {
|
||||
|
|
@ -73,7 +140,8 @@ function M.scrape_problem(ctx)
|
|||
if result.code ~= 0 then
|
||||
return {
|
||||
success = false,
|
||||
error = "Failed to run scraper: " .. (result.stderr or "Unknown error"),
|
||||
problem_id = ctx.problem_name,
|
||||
error = "Failed to run tests scraper: " .. (result.stderr or "Unknown error"),
|
||||
}
|
||||
end
|
||||
|
||||
|
|
@ -81,7 +149,8 @@ function M.scrape_problem(ctx)
|
|||
if not ok then
|
||||
return {
|
||||
success = false,
|
||||
error = "Failed to parse scraper output: " .. tostring(data),
|
||||
problem_id = ctx.problem_name,
|
||||
error = "Failed to parse tests scraper output: " .. tostring(data),
|
||||
}
|
||||
end
|
||||
|
||||
|
|
@ -89,7 +158,7 @@ function M.scrape_problem(ctx)
|
|||
return data
|
||||
end
|
||||
|
||||
if #data.test_cases > 0 then
|
||||
if data.test_cases and #data.test_cases > 0 then
|
||||
local all_inputs = {}
|
||||
local all_outputs = {}
|
||||
|
||||
|
|
@ -113,7 +182,7 @@ function M.scrape_problem(ctx)
|
|||
return {
|
||||
success = true,
|
||||
problem_id = ctx.problem_name,
|
||||
test_count = #data.test_cases,
|
||||
test_count = data.test_cases and #data.test_cases or 0,
|
||||
url = data.url,
|
||||
}
|
||||
end
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue