fix(sync): normalize log prefixes and S3 prompt UX (#115)
* feat(s3): create bucket interactively during auth when unconfigured Problem: when a user runs `:Pending s3 auth` with no bucket configured, auth succeeds but offers no way to create the bucket. The user must manually run `aws s3api create-bucket` and update their config. Solution: add `util.input()` coroutine-aware prompt wrapper and a `create_bucket()` flow in `s3.lua` that prompts for bucket name and region, handles the `us-east-1` LocationConstraint quirk, and logs a config snippet on success. Called automatically from `auth()` when `sync.s3.bucket` is absent. * ci: typing * feat(parse): add `parse_duration_to_days` for duration string conversion Problem: The archive command accepted only a bare integer for days, inconsistent with the `+Nd`/`+Nw`/`+Nm` duration syntax used elsewhere. Solution: Add `parse_duration_to_days()` supporting `Nd`, `Nw`, `Nm`, and bare integers. Returns nil on invalid input for caller error handling. * feat(archive): duration syntax and confirmation prompt Problem: `:Pending archive` accepted only a bare integer for days and silently deleted tasks with no confirmation, risking accidental data loss. Solution: Accept duration strings (`7d`, `3w`, `2m`) via `parse.parse_duration_to_days()`, show a `vim.ui.input` confirmation prompt before removing tasks, and skip the prompt when zero tasks match. * feat: add `<C-a>` / `<C-x>` keymaps for priority increment/decrement Problem: Priority could only be cycled with `g!` (0→1→2→3→0), with no way to directly increment or decrement. Solution: Add `adjust_priority()` with clamping at 0 and `max_priority`, exposed as `increment_priority()` / `decrement_priority()` on `<C-a>` / `<C-x>`. Includes `<Plug>` mappings and vimdoc. * fix(s3): use parenthetical defaults in bucket creation prompts Problem: `util.input` with `default` pre-filled the input field, and the success message said "Add to your config" ambiguously. Solution: Show defaults in prompt text as `(default)` instead of pre-filling, and clarify the message to "Add to your pending.nvim config". * ci: format * ci(sync): normalize log prefix to `backend:` across all sync backends Problem: Sync log messages used inconsistent prefixes like `s3 push:`, `gtasks pull:`, `gtasks sync —` instead of the `backend: action` pattern used by auth messages. Solution: Normalize all sync backend logs to `backend: action ...` format across `s3.lua`, `gcal.lua`, and `gtasks.lua`. * ci: fix linter warnings in archive spec and s3 bucket creation
This commit is contained in:
parent
9672af7c08
commit
c37cf7cc3a
4 changed files with 18 additions and 20 deletions
|
|
@ -88,9 +88,7 @@ local function create_bucket()
|
|||
end
|
||||
|
||||
local region = util.input({ prompt = 'AWS region (' .. default_region .. '): ' })
|
||||
if not region then
|
||||
region = default_region
|
||||
elseif region == '' then
|
||||
if not region or region == '' then
|
||||
region = default_region
|
||||
end
|
||||
|
||||
|
|
@ -220,12 +218,12 @@ function M.push()
|
|||
os.remove(tmpfile)
|
||||
|
||||
if result.code ~= 0 then
|
||||
log.error('s3 push: ' .. (result.stderr or 'unknown error'))
|
||||
log.error('s3: push failed — ' .. (result.stderr or 'unknown error'))
|
||||
return
|
||||
end
|
||||
|
||||
util.finish(s)
|
||||
log.info('s3 push: uploaded to s3://' .. s3cfg.bucket .. '/' .. key)
|
||||
log.info('s3: push uploaded to s3://' .. s3cfg.bucket .. '/' .. key)
|
||||
end)
|
||||
end)
|
||||
end
|
||||
|
|
@ -247,7 +245,7 @@ function M.pull()
|
|||
|
||||
if result.code ~= 0 then
|
||||
os.remove(tmpfile)
|
||||
log.error('s3 pull: ' .. (result.stderr or 'unknown error'))
|
||||
log.error('s3: pull failed — ' .. (result.stderr or 'unknown error'))
|
||||
return
|
||||
end
|
||||
|
||||
|
|
@ -258,7 +256,7 @@ function M.pull()
|
|||
end)
|
||||
if not load_ok then
|
||||
os.remove(tmpfile)
|
||||
log.error('s3 pull: failed to parse remote store')
|
||||
log.error('s3: pull failed — could not parse remote store')
|
||||
return
|
||||
end
|
||||
|
||||
|
|
@ -320,7 +318,7 @@ function M.pull()
|
|||
|
||||
os.remove(tmpfile)
|
||||
util.finish(s)
|
||||
log.info('s3 pull: ' .. util.fmt_counts({
|
||||
log.info('s3: pull ' .. util.fmt_counts({
|
||||
{ created, 'added' },
|
||||
{ updated, 'updated' },
|
||||
{ unchanged, 'unchanged' },
|
||||
|
|
@ -416,7 +414,7 @@ function M.sync()
|
|||
|
||||
local f = io.open(s.path, 'r')
|
||||
if not f then
|
||||
log.error('s3 sync: failed to read store file')
|
||||
log.error('s3: sync failed — could not read store file')
|
||||
return
|
||||
end
|
||||
local content = f:read('*a')
|
||||
|
|
@ -425,7 +423,7 @@ function M.sync()
|
|||
local push_tmpfile = vim.fn.tempname() .. '.json'
|
||||
local tf = io.open(push_tmpfile, 'w')
|
||||
if not tf then
|
||||
log.error('s3 sync: failed to create temp file')
|
||||
log.error('s3: sync failed — could not create temp file')
|
||||
return
|
||||
end
|
||||
tf:write(content)
|
||||
|
|
@ -437,13 +435,13 @@ function M.sync()
|
|||
os.remove(push_tmpfile)
|
||||
|
||||
if push_result.code ~= 0 then
|
||||
log.error('s3 sync push: ' .. (push_result.stderr or 'unknown error'))
|
||||
log.error('s3: sync push failed — ' .. (push_result.stderr or 'unknown error'))
|
||||
util.finish(s)
|
||||
return
|
||||
end
|
||||
|
||||
util.finish(s)
|
||||
log.info('s3 sync: pull ' .. util.fmt_counts({
|
||||
log.info('s3: sync ' .. util.fmt_counts({
|
||||
{ created, 'added' },
|
||||
{ updated, 'updated' },
|
||||
}) .. ' | push uploaded')
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue