feat: make autocomplete more sophisticated

This commit is contained in:
Barrett Ruth 2025-09-15 14:32:44 -04:00
parent f743b75a59
commit 5417da9b52
3 changed files with 10 additions and 7 deletions

View file

@ -41,8 +41,8 @@ local function set_platform(platform)
end end
state.platform = platform state.platform = platform
vim.fs.mkdir("build", { parents = true }) vim.fn.mkdir("build", "p")
vim.fs.mkdir("io", { parents = true }) vim.fn.mkdir("io", "p")
return true return true
end end

View file

@ -26,8 +26,10 @@ end, {
end, lang_completions) end, lang_completions)
end end
if ArgLead == "--lang" then if ArgLead:match("^%-") and not ArgLead:match("^--lang") then
return { "--lang" } return vim.tbl_filter(function(completion)
return completion:find(ArgLead, 1, true) == 1
end, { "--lang" })
end end
local args = vim.split(vim.trim(CmdLine), "%s+") local args = vim.split(vim.trim(CmdLine), "%s+")
@ -43,7 +45,6 @@ end, {
if num_args == 2 then if num_args == 2 then
local candidates = { "--lang" } local candidates = { "--lang" }
vim.list_extend(candidates, platforms)
vim.list_extend(candidates, actions) vim.list_extend(candidates, actions)
local cp = require("cp") local cp = require("cp")
local context = cp.get_current_context() local context = cp.get_current_context()
@ -56,6 +57,8 @@ end, {
table.insert(candidates, problem.id) table.insert(candidates, problem.id)
end end
end end
else
vim.list_extend(candidates, platforms)
end end
return vim.tbl_filter(function(cmd) return vim.tbl_filter(function(cmd)
return cmd:find(ArgLead, 1, true) == 1 return cmd:find(ArgLead, 1, true) == 1

View file

@ -30,7 +30,7 @@ def scrape(url: str) -> list[tuple[str, str]]:
lines = [div.get_text().strip() for div in divs] lines = [div.get_text().strip() for div in divs]
text = "\n".join(lines) text = "\n".join(lines)
else: else:
text = inp_pre.get_text().replace("\r", "") text = inp_pre.get_text().replace("\r", "").strip()
all_inputs.append(text) all_inputs.append(text)
for out_section in output_sections: for out_section in output_sections:
@ -41,7 +41,7 @@ def scrape(url: str) -> list[tuple[str, str]]:
lines = [div.get_text().strip() for div in divs] lines = [div.get_text().strip() for div in divs]
text = "\n".join(lines) text = "\n".join(lines)
else: else:
text = out_pre.get_text().replace("\r", "") text = out_pre.get_text().replace("\r", "").strip()
all_outputs.append(text) all_outputs.append(text)
if all_inputs and all_outputs: if all_inputs and all_outputs: