feat: auth backend (#111)

* refactor(types): extract inline anonymous types into named classes

Problem: several functions used inline `{...}` table types in their
`@param` and `@return` annotations, making them hard to read and
impossible to reference from other modules.

Solution: extract each into a named `---@class`: `pending.Metadata`,
`pending.TaskFields`, `pending.CompletionItem`, `pending.SystemResult`,
and `pending.OAuthClientOpts`.

* refactor(sync): extract shared utilities into `sync/util.lua`

Problem: sync epilogue code (`s:save()`, `_recompute_counts()`,
`buffer.render()`) and `fmt_counts` were duplicated across `gcal.lua`
and `gtasks.lua`. The concurrency guard lived in `oauth.lua`, coupling
non-OAuth backends to the OAuth module.

Solution: create `sync/util.lua` with `async`, `system`, `with_guard`,
`finish`, and `fmt_counts`. Delegate from `oauth.lua` and replace
duplicated code in both backends. Add per-backend `auth()` and
`auth_complete()` methods to `gcal.lua` and `gtasks.lua`.

* feat(sync): auto-discover backends, per-backend auth, S3 backend

Problem: sync backends were hardcoded in `SYNC_BACKENDS` list in
`init.lua`, auth routed directly through `oauth.google_client`, and
adding a non-OAuth backend required editing multiple files.

Solution: replace hardcoded list with `discover_backends()` that globs
`lua/pending/sync/*.lua` at runtime. Rewrite `M.auth()` to dispatch
to per-backend `auth()` methods with `vim.ui.select` fallback. Add
`lua/pending/sync/s3.lua` with push/pull/sync via AWS CLI, per-task
merge by `_s3_sync_id` (UUID), and `pending.S3Config` type.
This commit is contained in:
Barrett Ruth 2026-03-08 19:53:42 -04:00
parent 34a68db6d0
commit d12838abbf
13 changed files with 1173 additions and 107 deletions

407
lua/pending/sync/s3.lua Normal file
View file

@ -0,0 +1,407 @@
local log = require('pending.log')
local util = require('pending.sync.util')
local M = {}
M.name = 's3'
---@return pending.S3Config?
local function get_config()
local cfg = require('pending.config').get()
return cfg.sync and cfg.sync.s3
end
---@return string[]
local function base_cmd()
local s3cfg = get_config() or {}
local cmd = { 'aws' }
if s3cfg.profile then
table.insert(cmd, '--profile')
table.insert(cmd, s3cfg.profile)
end
if s3cfg.region then
table.insert(cmd, '--region')
table.insert(cmd, s3cfg.region)
end
return cmd
end
---@param task pending.Task
---@return string
local function ensure_sync_id(task)
if not task._extra then
task._extra = {}
end
local sync_id = task._extra['_s3_sync_id']
if not sync_id then
local bytes = {}
math.randomseed(vim.uv.hrtime())
for i = 1, 16 do
bytes[i] = math.random(0, 255)
end
bytes[7] = bit.bor(bit.band(bytes[7], 0x0f), 0x40)
bytes[9] = bit.bor(bit.band(bytes[9], 0x3f), 0x80)
sync_id = string.format(
'%02x%02x%02x%02x-%02x%02x-%02x%02x-%02x%02x-%02x%02x%02x%02x%02x%02x',
bytes[1], bytes[2], bytes[3], bytes[4],
bytes[5], bytes[6],
bytes[7], bytes[8],
bytes[9], bytes[10],
bytes[11], bytes[12], bytes[13], bytes[14], bytes[15], bytes[16]
)
task._extra['_s3_sync_id'] = sync_id
task.modified = os.date('!%Y-%m-%dT%H:%M:%SZ') --[[@as string]]
end
return sync_id
end
---@param args? string
---@return nil
function M.auth(args)
if args == 'profile' then
vim.ui.input({ prompt = 'AWS profile name: ' }, function(input)
if not input or input == '' then
local s3cfg = get_config()
if s3cfg and s3cfg.profile then
log.info('s3: current profile: ' .. s3cfg.profile)
else
log.info('s3: no profile configured (using default)')
end
return
end
log.info('s3: set profile in your config: sync = { s3 = { profile = "' .. input .. '" } }')
end)
return
end
util.async(function()
local cmd = base_cmd()
vim.list_extend(cmd, { 'sts', 'get-caller-identity', '--output', 'json' })
local result = util.system(cmd, { text = true })
if result.code == 0 then
local ok, data = pcall(vim.json.decode, result.stdout or '')
if ok and data then
log.info('s3: authenticated as ' .. (data.Arn or data.Account or 'unknown'))
else
log.info('s3: credentials valid')
end
else
local stderr = result.stderr or ''
if stderr:find('SSO') or stderr:find('sso') then
log.info('s3: SSO session expired — running login...')
local login_cmd = base_cmd()
vim.list_extend(login_cmd, { 'sso', 'login' })
local login_result = util.system(login_cmd, { text = true })
if login_result.code == 0 then
log.info('s3: SSO login successful')
else
log.error('s3: SSO login failed — ' .. (login_result.stderr or ''))
end
elseif stderr:find('Unable to locate credentials') or stderr:find('NoCredentialProviders') then
log.error('s3: no AWS credentials configured. See :h pending-s3')
else
log.error('s3: ' .. stderr)
end
end
end)
end
---@return string[]
function M.auth_complete()
return { 'profile' }
end
function M.push()
util.async(function()
util.with_guard('s3', function()
local s3cfg = get_config()
if not s3cfg or not s3cfg.bucket then
log.error('s3: bucket is required. Set sync.s3.bucket in config.')
return
end
local key = s3cfg.key or 'pending.json'
local s = require('pending').store()
for _, task in ipairs(s:tasks()) do
ensure_sync_id(task)
end
local tmpfile = vim.fn.tempname() .. '.json'
s:save()
local store = require('pending.store')
local tmp_store = store.new(s.path)
tmp_store:load()
local f = io.open(s.path, 'r')
if not f then
log.error('s3: failed to read store file')
return
end
local content = f:read('*a')
f:close()
local tf = io.open(tmpfile, 'w')
if not tf then
log.error('s3: failed to create temp file')
return
end
tf:write(content)
tf:close()
local cmd = base_cmd()
vim.list_extend(cmd, { 's3', 'cp', tmpfile, 's3://' .. s3cfg.bucket .. '/' .. key })
local result = util.system(cmd, { text = true })
os.remove(tmpfile)
if result.code ~= 0 then
log.error('s3 push: ' .. (result.stderr or 'unknown error'))
return
end
util.finish(s)
log.info('s3 push: uploaded to s3://' .. s3cfg.bucket .. '/' .. key)
end)
end)
end
function M.pull()
util.async(function()
util.with_guard('s3', function()
local s3cfg = get_config()
if not s3cfg or not s3cfg.bucket then
log.error('s3: bucket is required. Set sync.s3.bucket in config.')
return
end
local key = s3cfg.key or 'pending.json'
local tmpfile = vim.fn.tempname() .. '.json'
local cmd = base_cmd()
vim.list_extend(cmd, { 's3', 'cp', 's3://' .. s3cfg.bucket .. '/' .. key, tmpfile })
local result = util.system(cmd, { text = true })
if result.code ~= 0 then
os.remove(tmpfile)
log.error('s3 pull: ' .. (result.stderr or 'unknown error'))
return
end
local store = require('pending.store')
local s_remote = store.new(tmpfile)
local load_ok = pcall(function()
s_remote:load()
end)
if not load_ok then
os.remove(tmpfile)
log.error('s3 pull: failed to parse remote store')
return
end
local s = require('pending').store()
local created, updated, unchanged = 0, 0, 0
local local_by_sync_id = {}
for _, task in ipairs(s:tasks()) do
local extra = task._extra or {}
local sid = extra['_s3_sync_id']
if sid then
local_by_sync_id[sid] = task
end
end
for _, remote_task in ipairs(s_remote:tasks()) do
local r_extra = remote_task._extra or {}
local r_sid = r_extra['_s3_sync_id']
if not r_sid then
goto continue
end
local local_task = local_by_sync_id[r_sid]
if local_task then
local r_mod = remote_task.modified or ''
local l_mod = local_task.modified or ''
if r_mod > l_mod then
local_task.description = remote_task.description
local_task.status = remote_task.status
local_task.category = remote_task.category
local_task.priority = remote_task.priority
local_task.due = remote_task.due
local_task.recur = remote_task.recur
local_task.recur_mode = remote_task.recur_mode
local_task['end'] = remote_task['end']
local_task._extra = local_task._extra or {}
local_task._extra['_s3_sync_id'] = r_sid
local_task.modified = remote_task.modified
updated = updated + 1
else
unchanged = unchanged + 1
end
else
s:add({
description = remote_task.description,
status = remote_task.status,
category = remote_task.category,
priority = remote_task.priority,
due = remote_task.due,
recur = remote_task.recur,
recur_mode = remote_task.recur_mode,
_extra = { _s3_sync_id = r_sid },
})
created = created + 1
end
::continue::
end
os.remove(tmpfile)
util.finish(s)
log.info('s3 pull: ' .. util.fmt_counts({
{ created, 'added' },
{ updated, 'updated' },
{ unchanged, 'unchanged' },
}))
end)
end)
end
function M.sync()
util.async(function()
util.with_guard('s3', function()
local s3cfg = get_config()
if not s3cfg or not s3cfg.bucket then
log.error('s3: bucket is required. Set sync.s3.bucket in config.')
return
end
local key = s3cfg.key or 'pending.json'
local tmpfile = vim.fn.tempname() .. '.json'
local cmd = base_cmd()
vim.list_extend(cmd, { 's3', 'cp', 's3://' .. s3cfg.bucket .. '/' .. key, tmpfile })
local result = util.system(cmd, { text = true })
local s = require('pending').store()
local created, updated = 0, 0
if result.code == 0 then
local store = require('pending.store')
local s_remote = store.new(tmpfile)
local load_ok = pcall(function()
s_remote:load()
end)
if load_ok then
local local_by_sync_id = {}
for _, task in ipairs(s:tasks()) do
local extra = task._extra or {}
local sid = extra['_s3_sync_id']
if sid then
local_by_sync_id[sid] = task
end
end
for _, remote_task in ipairs(s_remote:tasks()) do
local r_extra = remote_task._extra or {}
local r_sid = r_extra['_s3_sync_id']
if not r_sid then
goto continue
end
local local_task = local_by_sync_id[r_sid]
if local_task then
local r_mod = remote_task.modified or ''
local l_mod = local_task.modified or ''
if r_mod > l_mod then
local_task.description = remote_task.description
local_task.status = remote_task.status
local_task.category = remote_task.category
local_task.priority = remote_task.priority
local_task.due = remote_task.due
local_task.recur = remote_task.recur
local_task.recur_mode = remote_task.recur_mode
local_task['end'] = remote_task['end']
local_task._extra = local_task._extra or {}
local_task._extra['_s3_sync_id'] = r_sid
local_task.modified = remote_task.modified
updated = updated + 1
end
else
s:add({
description = remote_task.description,
status = remote_task.status,
category = remote_task.category,
priority = remote_task.priority,
due = remote_task.due,
recur = remote_task.recur,
recur_mode = remote_task.recur_mode,
_extra = { _s3_sync_id = r_sid },
})
created = created + 1
end
::continue::
end
end
end
os.remove(tmpfile)
for _, task in ipairs(s:tasks()) do
ensure_sync_id(task)
end
s:save()
local f = io.open(s.path, 'r')
if not f then
log.error('s3 sync: failed to read store file')
return
end
local content = f:read('*a')
f:close()
local push_tmpfile = vim.fn.tempname() .. '.json'
local tf = io.open(push_tmpfile, 'w')
if not tf then
log.error('s3 sync: failed to create temp file')
return
end
tf:write(content)
tf:close()
local push_cmd = base_cmd()
vim.list_extend(push_cmd, { 's3', 'cp', push_tmpfile, 's3://' .. s3cfg.bucket .. '/' .. key })
local push_result = util.system(push_cmd, { text = true })
os.remove(push_tmpfile)
if push_result.code ~= 0 then
log.error('s3 sync push: ' .. (push_result.stderr or 'unknown error'))
util.finish(s)
return
end
util.finish(s)
log.info('s3 sync: pull ' .. util.fmt_counts({
{ created, 'added' },
{ updated, 'updated' },
}) .. ' | push uploaded')
end)
end)
end
---@return nil
function M.health()
if vim.fn.executable('aws') == 1 then
vim.health.ok('aws CLI found')
else
vim.health.error('aws CLI not found (required for S3 sync)')
end
local s3cfg = get_config()
if s3cfg and s3cfg.bucket then
vim.health.ok('S3 bucket configured: ' .. s3cfg.bucket)
else
vim.health.warn('S3 bucket not configured — set sync.s3.bucket')
end
end
M._ensure_sync_id = ensure_sync_id
return M