feat: auth backend (#111)
* refactor(types): extract inline anonymous types into named classes
Problem: several functions used inline `{...}` table types in their
`@param` and `@return` annotations, making them hard to read and
impossible to reference from other modules.
Solution: extract each into a named `---@class`: `pending.Metadata`,
`pending.TaskFields`, `pending.CompletionItem`, `pending.SystemResult`,
and `pending.OAuthClientOpts`.
* refactor(sync): extract shared utilities into `sync/util.lua`
Problem: sync epilogue code (`s:save()`, `_recompute_counts()`,
`buffer.render()`) and `fmt_counts` were duplicated across `gcal.lua`
and `gtasks.lua`. The concurrency guard lived in `oauth.lua`, coupling
non-OAuth backends to the OAuth module.
Solution: create `sync/util.lua` with `async`, `system`, `with_guard`,
`finish`, and `fmt_counts`. Delegate from `oauth.lua` and replace
duplicated code in both backends. Add per-backend `auth()` and
`auth_complete()` methods to `gcal.lua` and `gtasks.lua`.
* feat(sync): auto-discover backends, per-backend auth, S3 backend
Problem: sync backends were hardcoded in `SYNC_BACKENDS` list in
`init.lua`, auth routed directly through `oauth.google_client`, and
adding a non-OAuth backend required editing multiple files.
Solution: replace hardcoded list with `discover_backends()` that globs
`lua/pending/sync/*.lua` at runtime. Rewrite `M.auth()` to dispatch
to per-backend `auth()` methods with `vim.ui.select` fallback. Add
`lua/pending/sync/s3.lua` with push/pull/sync via AWS CLI, per-task
merge by `_s3_sync_id` (UUID), and `pending.S3Config` type.
This commit is contained in:
parent
ac02526cf1
commit
fe4c1d0e31
13 changed files with 1173 additions and 107 deletions
|
|
@ -27,10 +27,17 @@
|
|||
---@field client_id? string
|
||||
---@field client_secret? string
|
||||
|
||||
---@class pending.S3Config
|
||||
---@field bucket string
|
||||
---@field key? string
|
||||
---@field profile? string
|
||||
---@field region? string
|
||||
|
||||
---@class pending.SyncConfig
|
||||
---@field remote_delete? boolean
|
||||
---@field gcal? pending.GcalConfig
|
||||
---@field gtasks? pending.GtasksConfig
|
||||
---@field s3? pending.S3Config
|
||||
|
||||
---@class pending.Keymaps
|
||||
---@field close? string|false
|
||||
|
|
|
|||
|
|
@ -933,13 +933,30 @@ function M.add(text)
|
|||
log.info('Task added: ' .. description)
|
||||
end
|
||||
|
||||
---@type string[]
|
||||
local SYNC_BACKENDS = { 'gcal', 'gtasks' }
|
||||
---@type string[]?
|
||||
local _sync_backends = nil
|
||||
|
||||
---@type table<string, true>
|
||||
local SYNC_BACKEND_SET = {}
|
||||
for _, b in ipairs(SYNC_BACKENDS) do
|
||||
SYNC_BACKEND_SET[b] = true
|
||||
---@type table<string, true>?
|
||||
local _sync_backend_set = nil
|
||||
|
||||
---@return string[], table<string, true>
|
||||
local function discover_backends()
|
||||
if _sync_backends then
|
||||
return _sync_backends, _sync_backend_set --[[@as table<string, true>]]
|
||||
end
|
||||
_sync_backends = {}
|
||||
_sync_backend_set = {}
|
||||
local paths = vim.fn.globpath(vim.o.runtimepath, 'lua/pending/sync/*.lua', false, true)
|
||||
for _, path in ipairs(paths) do
|
||||
local name = vim.fn.fnamemodify(path, ':t:r')
|
||||
local ok, mod = pcall(require, 'pending.sync.' .. name)
|
||||
if ok and type(mod) == 'table' and mod.name then
|
||||
table.insert(_sync_backends, mod.name)
|
||||
_sync_backend_set[mod.name] = true
|
||||
end
|
||||
end
|
||||
table.sort(_sync_backends)
|
||||
return _sync_backends, _sync_backend_set
|
||||
end
|
||||
|
||||
---@param backend_name string
|
||||
|
|
@ -954,7 +971,13 @@ local function run_sync(backend_name, action)
|
|||
if not action or action == '' then
|
||||
local actions = {}
|
||||
for k, v in pairs(backend) do
|
||||
if type(v) == 'function' and k:sub(1, 1) ~= '_' and k ~= 'health' then
|
||||
if
|
||||
type(v) == 'function'
|
||||
and k:sub(1, 1) ~= '_'
|
||||
and k ~= 'health'
|
||||
and k ~= 'auth'
|
||||
and k ~= 'auth_complete'
|
||||
then
|
||||
table.insert(actions, k)
|
||||
end
|
||||
end
|
||||
|
|
@ -1246,29 +1269,55 @@ end
|
|||
---@param args? string
|
||||
---@return nil
|
||||
function M.auth(args)
|
||||
local oauth = require('pending.sync.oauth')
|
||||
local parts = {}
|
||||
for w in (args or ''):gmatch('%S+') do
|
||||
table.insert(parts, w)
|
||||
end
|
||||
local action = parts[#parts]
|
||||
if action == parts[1] and (action == 'gtasks' or action == 'gcal') then
|
||||
action = nil
|
||||
|
||||
local backend_name = parts[1]
|
||||
local sub_action = parts[2]
|
||||
|
||||
local backends_list = discover_backends()
|
||||
local auth_backends = {}
|
||||
for _, name in ipairs(backends_list) do
|
||||
local ok, mod = pcall(require, 'pending.sync.' .. name)
|
||||
if ok and type(mod.auth) == 'function' then
|
||||
table.insert(auth_backends, { name = name, mod = mod })
|
||||
end
|
||||
end
|
||||
|
||||
if action == 'clear' then
|
||||
oauth.google_client:clear_tokens()
|
||||
log.info('OAuth tokens cleared — run :Pending auth to re-authenticate.')
|
||||
elseif action == 'reset' then
|
||||
oauth.google_client:_wipe()
|
||||
log.info('OAuth tokens and credentials cleared — run :Pending auth to set up from scratch.')
|
||||
else
|
||||
local creds = oauth.google_client:resolve_credentials()
|
||||
if creds.client_id == oauth.BUNDLED_CLIENT_ID then
|
||||
oauth.google_client:setup()
|
||||
else
|
||||
oauth.google_client:auth()
|
||||
if backend_name then
|
||||
local found = false
|
||||
for _, b in ipairs(auth_backends) do
|
||||
if b.name == backend_name then
|
||||
b.mod.auth(sub_action)
|
||||
found = true
|
||||
break
|
||||
end
|
||||
end
|
||||
if not found then
|
||||
log.error('No auth method for backend: ' .. backend_name)
|
||||
end
|
||||
elseif #auth_backends == 1 then
|
||||
auth_backends[1].mod.auth()
|
||||
elseif #auth_backends > 1 then
|
||||
local names = {}
|
||||
for _, b in ipairs(auth_backends) do
|
||||
table.insert(names, b.name)
|
||||
end
|
||||
vim.ui.select(names, { prompt = 'Authenticate backend: ' }, function(choice)
|
||||
if not choice then
|
||||
return
|
||||
end
|
||||
for _, b in ipairs(auth_backends) do
|
||||
if b.name == choice then
|
||||
b.mod.auth()
|
||||
break
|
||||
end
|
||||
end
|
||||
end)
|
||||
else
|
||||
log.warn('No sync backends with auth support found.')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -1289,7 +1338,7 @@ function M.command(args)
|
|||
M.edit(id_str, edit_rest)
|
||||
elseif cmd == 'auth' then
|
||||
M.auth(rest)
|
||||
elseif SYNC_BACKEND_SET[cmd] then
|
||||
elseif select(2, discover_backends())[cmd] then
|
||||
local action = rest:match('^(%S+)')
|
||||
run_sync(cmd, action)
|
||||
elseif cmd == 'archive' then
|
||||
|
|
@ -1307,12 +1356,13 @@ end
|
|||
|
||||
---@return string[]
|
||||
function M.sync_backends()
|
||||
return SYNC_BACKENDS
|
||||
return (discover_backends())
|
||||
end
|
||||
|
||||
---@return table<string, true>
|
||||
function M.sync_backend_set()
|
||||
return SYNC_BACKEND_SET
|
||||
local _, set = discover_backends()
|
||||
return set
|
||||
end
|
||||
|
||||
return M
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
local config = require('pending.config')
|
||||
local log = require('pending.log')
|
||||
local oauth = require('pending.sync.oauth')
|
||||
local util = require('pending.sync.util')
|
||||
|
||||
local M = {}
|
||||
|
||||
|
|
@ -154,21 +155,6 @@ local function unlink_remote(task, extra, now_ts)
|
|||
task.modified = now_ts
|
||||
end
|
||||
|
||||
---@param parts {[1]: integer, [2]: string}[]
|
||||
---@return string
|
||||
local function fmt_counts(parts)
|
||||
local items = {}
|
||||
for _, p in ipairs(parts) do
|
||||
if p[1] > 0 then
|
||||
table.insert(items, p[1] .. ' ' .. p[2])
|
||||
end
|
||||
end
|
||||
if #items == 0 then
|
||||
return 'nothing to do'
|
||||
end
|
||||
return table.concat(items, ' | ')
|
||||
end
|
||||
|
||||
function M.push()
|
||||
oauth.with_token(oauth.google_client, 'gcal', function(access_token)
|
||||
local calendars, cal_err = get_all_calendars(access_token)
|
||||
|
|
@ -246,13 +232,8 @@ function M.push()
|
|||
end
|
||||
end
|
||||
|
||||
s:save()
|
||||
require('pending')._recompute_counts()
|
||||
local buffer = require('pending.buffer')
|
||||
if buffer.bufnr() and vim.api.nvim_buf_is_valid(buffer.bufnr()) then
|
||||
buffer.render(buffer.bufnr())
|
||||
end
|
||||
log.info('gcal push: ' .. fmt_counts({
|
||||
util.finish(s)
|
||||
log.info('gcal push: ' .. util.fmt_counts({
|
||||
{ created, 'added' },
|
||||
{ updated, 'updated' },
|
||||
{ deleted, 'removed' },
|
||||
|
|
@ -261,6 +242,32 @@ function M.push()
|
|||
end)
|
||||
end
|
||||
|
||||
---@param args? string
|
||||
---@return nil
|
||||
function M.auth(args)
|
||||
if args == 'clear' then
|
||||
oauth.google_client:clear_tokens()
|
||||
log.info('gcal: OAuth tokens cleared — run :Pending auth gcal to re-authenticate.')
|
||||
elseif args == 'reset' then
|
||||
oauth.google_client:_wipe()
|
||||
log.info(
|
||||
'gcal: OAuth tokens and credentials cleared — run :Pending auth gcal to set up from scratch.'
|
||||
)
|
||||
else
|
||||
local creds = oauth.google_client:resolve_credentials()
|
||||
if creds.client_id == oauth.BUNDLED_CLIENT_ID then
|
||||
oauth.google_client:setup()
|
||||
else
|
||||
oauth.google_client:auth()
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
---@return string[]
|
||||
function M.auth_complete()
|
||||
return { 'clear', 'reset' }
|
||||
end
|
||||
|
||||
---@return nil
|
||||
function M.health()
|
||||
oauth.health(M.name)
|
||||
|
|
@ -268,7 +275,7 @@ function M.health()
|
|||
if tokens and tokens.refresh_token then
|
||||
vim.health.ok('gcal tokens found')
|
||||
else
|
||||
vim.health.info('no gcal tokens — run :Pending auth')
|
||||
vim.health.info('no gcal tokens — run :Pending auth gcal')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
local config = require('pending.config')
|
||||
local log = require('pending.log')
|
||||
local oauth = require('pending.sync.oauth')
|
||||
local util = require('pending.sync.util')
|
||||
|
||||
local M = {}
|
||||
|
||||
|
|
@ -195,21 +196,6 @@ local function unlink_remote(task, now_ts)
|
|||
task.modified = now_ts
|
||||
end
|
||||
|
||||
---@param parts {[1]: integer, [2]: string}[]
|
||||
---@return string
|
||||
local function fmt_counts(parts)
|
||||
local items = {}
|
||||
for _, p in ipairs(parts) do
|
||||
if p[1] > 0 then
|
||||
table.insert(items, p[1] .. ' ' .. p[2])
|
||||
end
|
||||
end
|
||||
if #items == 0 then
|
||||
return 'nothing to do'
|
||||
end
|
||||
return table.concat(items, ' | ')
|
||||
end
|
||||
|
||||
---@param task pending.Task
|
||||
---@return table
|
||||
local function task_to_gtask(task)
|
||||
|
|
@ -447,13 +433,8 @@ function M.push()
|
|||
local by_gtasks_id = build_id_index(s)
|
||||
local created, updated, deleted, failed =
|
||||
push_pass(access_token, tasklists, s, now_ts, by_gtasks_id)
|
||||
s:save()
|
||||
require('pending')._recompute_counts()
|
||||
local buffer = require('pending.buffer')
|
||||
if buffer.bufnr() and vim.api.nvim_buf_is_valid(buffer.bufnr()) then
|
||||
buffer.render(buffer.bufnr())
|
||||
end
|
||||
log.info('gtasks push: ' .. fmt_counts({
|
||||
util.finish(s)
|
||||
log.info('gtasks push: ' .. util.fmt_counts({
|
||||
{ created, 'added' },
|
||||
{ updated, 'updated' },
|
||||
{ deleted, 'deleted' },
|
||||
|
|
@ -474,13 +455,8 @@ function M.pull()
|
|||
local created, updated, failed, seen_remote_ids, fetched_list_ids =
|
||||
pull_pass(access_token, tasklists, s, now_ts, by_gtasks_id)
|
||||
local unlinked = detect_remote_deletions(s, seen_remote_ids, fetched_list_ids, now_ts)
|
||||
s:save()
|
||||
require('pending')._recompute_counts()
|
||||
local buffer = require('pending.buffer')
|
||||
if buffer.bufnr() and vim.api.nvim_buf_is_valid(buffer.bufnr()) then
|
||||
buffer.render(buffer.bufnr())
|
||||
end
|
||||
log.info('gtasks pull: ' .. fmt_counts({
|
||||
util.finish(s)
|
||||
log.info('gtasks pull: ' .. util.fmt_counts({
|
||||
{ created, 'added' },
|
||||
{ updated, 'updated' },
|
||||
{ unlinked, 'unlinked' },
|
||||
|
|
@ -503,18 +479,13 @@ function M.sync()
|
|||
local pulled_create, pulled_update, pulled_failed, seen_remote_ids, fetched_list_ids =
|
||||
pull_pass(access_token, tasklists, s, now_ts, by_gtasks_id)
|
||||
local unlinked = detect_remote_deletions(s, seen_remote_ids, fetched_list_ids, now_ts)
|
||||
s:save()
|
||||
require('pending')._recompute_counts()
|
||||
local buffer = require('pending.buffer')
|
||||
if buffer.bufnr() and vim.api.nvim_buf_is_valid(buffer.bufnr()) then
|
||||
buffer.render(buffer.bufnr())
|
||||
end
|
||||
log.info('gtasks sync — push: ' .. fmt_counts({
|
||||
util.finish(s)
|
||||
log.info('gtasks sync — push: ' .. util.fmt_counts({
|
||||
{ pushed_create, 'added' },
|
||||
{ pushed_update, 'updated' },
|
||||
{ pushed_delete, 'deleted' },
|
||||
{ pushed_failed, 'failed' },
|
||||
}) .. ' pull: ' .. fmt_counts({
|
||||
}) .. ' pull: ' .. util.fmt_counts({
|
||||
{ pulled_create, 'added' },
|
||||
{ pulled_update, 'updated' },
|
||||
{ unlinked, 'unlinked' },
|
||||
|
|
@ -533,6 +504,32 @@ M._push_pass = push_pass
|
|||
M._pull_pass = pull_pass
|
||||
M._detect_remote_deletions = detect_remote_deletions
|
||||
|
||||
---@param args? string
|
||||
---@return nil
|
||||
function M.auth(args)
|
||||
if args == 'clear' then
|
||||
oauth.google_client:clear_tokens()
|
||||
log.info('gtasks: OAuth tokens cleared — run :Pending auth gtasks to re-authenticate.')
|
||||
elseif args == 'reset' then
|
||||
oauth.google_client:_wipe()
|
||||
log.info(
|
||||
'gtasks: OAuth tokens and credentials cleared — run :Pending auth gtasks to set up from scratch.'
|
||||
)
|
||||
else
|
||||
local creds = oauth.google_client:resolve_credentials()
|
||||
if creds.client_id == oauth.BUNDLED_CLIENT_ID then
|
||||
oauth.google_client:setup()
|
||||
else
|
||||
oauth.google_client:auth()
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
---@return string[]
|
||||
function M.auth_complete()
|
||||
return { 'clear', 'reset' }
|
||||
end
|
||||
|
||||
---@return nil
|
||||
function M.health()
|
||||
oauth.health(M.name)
|
||||
|
|
@ -540,7 +537,7 @@ function M.health()
|
|||
if tokens and tokens.refresh_token then
|
||||
vim.health.ok('gtasks tokens found')
|
||||
else
|
||||
vim.health.info('no gtasks tokens — run :Pending auth')
|
||||
vim.health.info('no gtasks tokens — run :Pending auth gtasks')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
407
lua/pending/sync/s3.lua
Normal file
407
lua/pending/sync/s3.lua
Normal file
|
|
@ -0,0 +1,407 @@
|
|||
local log = require('pending.log')
|
||||
local util = require('pending.sync.util')
|
||||
|
||||
local M = {}
|
||||
|
||||
M.name = 's3'
|
||||
|
||||
---@return pending.S3Config?
|
||||
local function get_config()
|
||||
local cfg = require('pending.config').get()
|
||||
return cfg.sync and cfg.sync.s3
|
||||
end
|
||||
|
||||
---@return string[]
|
||||
local function base_cmd()
|
||||
local s3cfg = get_config() or {}
|
||||
local cmd = { 'aws' }
|
||||
if s3cfg.profile then
|
||||
table.insert(cmd, '--profile')
|
||||
table.insert(cmd, s3cfg.profile)
|
||||
end
|
||||
if s3cfg.region then
|
||||
table.insert(cmd, '--region')
|
||||
table.insert(cmd, s3cfg.region)
|
||||
end
|
||||
return cmd
|
||||
end
|
||||
|
||||
---@param task pending.Task
|
||||
---@return string
|
||||
local function ensure_sync_id(task)
|
||||
if not task._extra then
|
||||
task._extra = {}
|
||||
end
|
||||
local sync_id = task._extra['_s3_sync_id']
|
||||
if not sync_id then
|
||||
local bytes = {}
|
||||
math.randomseed(vim.uv.hrtime())
|
||||
for i = 1, 16 do
|
||||
bytes[i] = math.random(0, 255)
|
||||
end
|
||||
bytes[7] = bit.bor(bit.band(bytes[7], 0x0f), 0x40)
|
||||
bytes[9] = bit.bor(bit.band(bytes[9], 0x3f), 0x80)
|
||||
sync_id = string.format(
|
||||
'%02x%02x%02x%02x-%02x%02x-%02x%02x-%02x%02x-%02x%02x%02x%02x%02x%02x',
|
||||
bytes[1], bytes[2], bytes[3], bytes[4],
|
||||
bytes[5], bytes[6],
|
||||
bytes[7], bytes[8],
|
||||
bytes[9], bytes[10],
|
||||
bytes[11], bytes[12], bytes[13], bytes[14], bytes[15], bytes[16]
|
||||
)
|
||||
task._extra['_s3_sync_id'] = sync_id
|
||||
task.modified = os.date('!%Y-%m-%dT%H:%M:%SZ') --[[@as string]]
|
||||
end
|
||||
return sync_id
|
||||
end
|
||||
|
||||
---@param args? string
|
||||
---@return nil
|
||||
function M.auth(args)
|
||||
if args == 'profile' then
|
||||
vim.ui.input({ prompt = 'AWS profile name: ' }, function(input)
|
||||
if not input or input == '' then
|
||||
local s3cfg = get_config()
|
||||
if s3cfg and s3cfg.profile then
|
||||
log.info('s3: current profile: ' .. s3cfg.profile)
|
||||
else
|
||||
log.info('s3: no profile configured (using default)')
|
||||
end
|
||||
return
|
||||
end
|
||||
log.info('s3: set profile in your config: sync = { s3 = { profile = "' .. input .. '" } }')
|
||||
end)
|
||||
return
|
||||
end
|
||||
|
||||
util.async(function()
|
||||
local cmd = base_cmd()
|
||||
vim.list_extend(cmd, { 'sts', 'get-caller-identity', '--output', 'json' })
|
||||
local result = util.system(cmd, { text = true })
|
||||
if result.code == 0 then
|
||||
local ok, data = pcall(vim.json.decode, result.stdout or '')
|
||||
if ok and data then
|
||||
log.info('s3: authenticated as ' .. (data.Arn or data.Account or 'unknown'))
|
||||
else
|
||||
log.info('s3: credentials valid')
|
||||
end
|
||||
else
|
||||
local stderr = result.stderr or ''
|
||||
if stderr:find('SSO') or stderr:find('sso') then
|
||||
log.info('s3: SSO session expired — running login...')
|
||||
local login_cmd = base_cmd()
|
||||
vim.list_extend(login_cmd, { 'sso', 'login' })
|
||||
local login_result = util.system(login_cmd, { text = true })
|
||||
if login_result.code == 0 then
|
||||
log.info('s3: SSO login successful')
|
||||
else
|
||||
log.error('s3: SSO login failed — ' .. (login_result.stderr or ''))
|
||||
end
|
||||
elseif stderr:find('Unable to locate credentials') or stderr:find('NoCredentialProviders') then
|
||||
log.error('s3: no AWS credentials configured. See :h pending-s3')
|
||||
else
|
||||
log.error('s3: ' .. stderr)
|
||||
end
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
---@return string[]
|
||||
function M.auth_complete()
|
||||
return { 'profile' }
|
||||
end
|
||||
|
||||
function M.push()
|
||||
util.async(function()
|
||||
util.with_guard('s3', function()
|
||||
local s3cfg = get_config()
|
||||
if not s3cfg or not s3cfg.bucket then
|
||||
log.error('s3: bucket is required. Set sync.s3.bucket in config.')
|
||||
return
|
||||
end
|
||||
local key = s3cfg.key or 'pending.json'
|
||||
local s = require('pending').store()
|
||||
|
||||
for _, task in ipairs(s:tasks()) do
|
||||
ensure_sync_id(task)
|
||||
end
|
||||
|
||||
local tmpfile = vim.fn.tempname() .. '.json'
|
||||
s:save()
|
||||
|
||||
local store = require('pending.store')
|
||||
local tmp_store = store.new(s.path)
|
||||
tmp_store:load()
|
||||
|
||||
local f = io.open(s.path, 'r')
|
||||
if not f then
|
||||
log.error('s3: failed to read store file')
|
||||
return
|
||||
end
|
||||
local content = f:read('*a')
|
||||
f:close()
|
||||
|
||||
local tf = io.open(tmpfile, 'w')
|
||||
if not tf then
|
||||
log.error('s3: failed to create temp file')
|
||||
return
|
||||
end
|
||||
tf:write(content)
|
||||
tf:close()
|
||||
|
||||
local cmd = base_cmd()
|
||||
vim.list_extend(cmd, { 's3', 'cp', tmpfile, 's3://' .. s3cfg.bucket .. '/' .. key })
|
||||
local result = util.system(cmd, { text = true })
|
||||
os.remove(tmpfile)
|
||||
|
||||
if result.code ~= 0 then
|
||||
log.error('s3 push: ' .. (result.stderr or 'unknown error'))
|
||||
return
|
||||
end
|
||||
|
||||
util.finish(s)
|
||||
log.info('s3 push: uploaded to s3://' .. s3cfg.bucket .. '/' .. key)
|
||||
end)
|
||||
end)
|
||||
end
|
||||
|
||||
function M.pull()
|
||||
util.async(function()
|
||||
util.with_guard('s3', function()
|
||||
local s3cfg = get_config()
|
||||
if not s3cfg or not s3cfg.bucket then
|
||||
log.error('s3: bucket is required. Set sync.s3.bucket in config.')
|
||||
return
|
||||
end
|
||||
local key = s3cfg.key or 'pending.json'
|
||||
local tmpfile = vim.fn.tempname() .. '.json'
|
||||
|
||||
local cmd = base_cmd()
|
||||
vim.list_extend(cmd, { 's3', 'cp', 's3://' .. s3cfg.bucket .. '/' .. key, tmpfile })
|
||||
local result = util.system(cmd, { text = true })
|
||||
|
||||
if result.code ~= 0 then
|
||||
os.remove(tmpfile)
|
||||
log.error('s3 pull: ' .. (result.stderr or 'unknown error'))
|
||||
return
|
||||
end
|
||||
|
||||
local store = require('pending.store')
|
||||
local s_remote = store.new(tmpfile)
|
||||
local load_ok = pcall(function()
|
||||
s_remote:load()
|
||||
end)
|
||||
if not load_ok then
|
||||
os.remove(tmpfile)
|
||||
log.error('s3 pull: failed to parse remote store')
|
||||
return
|
||||
end
|
||||
|
||||
local s = require('pending').store()
|
||||
local created, updated, unchanged = 0, 0, 0
|
||||
|
||||
local local_by_sync_id = {}
|
||||
for _, task in ipairs(s:tasks()) do
|
||||
local extra = task._extra or {}
|
||||
local sid = extra['_s3_sync_id']
|
||||
if sid then
|
||||
local_by_sync_id[sid] = task
|
||||
end
|
||||
end
|
||||
|
||||
for _, remote_task in ipairs(s_remote:tasks()) do
|
||||
local r_extra = remote_task._extra or {}
|
||||
local r_sid = r_extra['_s3_sync_id']
|
||||
if not r_sid then
|
||||
goto continue
|
||||
end
|
||||
|
||||
local local_task = local_by_sync_id[r_sid]
|
||||
if local_task then
|
||||
local r_mod = remote_task.modified or ''
|
||||
local l_mod = local_task.modified or ''
|
||||
if r_mod > l_mod then
|
||||
local_task.description = remote_task.description
|
||||
local_task.status = remote_task.status
|
||||
local_task.category = remote_task.category
|
||||
local_task.priority = remote_task.priority
|
||||
local_task.due = remote_task.due
|
||||
local_task.recur = remote_task.recur
|
||||
local_task.recur_mode = remote_task.recur_mode
|
||||
local_task['end'] = remote_task['end']
|
||||
local_task._extra = local_task._extra or {}
|
||||
local_task._extra['_s3_sync_id'] = r_sid
|
||||
local_task.modified = remote_task.modified
|
||||
updated = updated + 1
|
||||
else
|
||||
unchanged = unchanged + 1
|
||||
end
|
||||
else
|
||||
s:add({
|
||||
description = remote_task.description,
|
||||
status = remote_task.status,
|
||||
category = remote_task.category,
|
||||
priority = remote_task.priority,
|
||||
due = remote_task.due,
|
||||
recur = remote_task.recur,
|
||||
recur_mode = remote_task.recur_mode,
|
||||
_extra = { _s3_sync_id = r_sid },
|
||||
})
|
||||
created = created + 1
|
||||
end
|
||||
|
||||
::continue::
|
||||
end
|
||||
|
||||
os.remove(tmpfile)
|
||||
util.finish(s)
|
||||
log.info('s3 pull: ' .. util.fmt_counts({
|
||||
{ created, 'added' },
|
||||
{ updated, 'updated' },
|
||||
{ unchanged, 'unchanged' },
|
||||
}))
|
||||
end)
|
||||
end)
|
||||
end
|
||||
|
||||
function M.sync()
|
||||
util.async(function()
|
||||
util.with_guard('s3', function()
|
||||
local s3cfg = get_config()
|
||||
if not s3cfg or not s3cfg.bucket then
|
||||
log.error('s3: bucket is required. Set sync.s3.bucket in config.')
|
||||
return
|
||||
end
|
||||
local key = s3cfg.key or 'pending.json'
|
||||
local tmpfile = vim.fn.tempname() .. '.json'
|
||||
|
||||
local cmd = base_cmd()
|
||||
vim.list_extend(cmd, { 's3', 'cp', 's3://' .. s3cfg.bucket .. '/' .. key, tmpfile })
|
||||
local result = util.system(cmd, { text = true })
|
||||
|
||||
local s = require('pending').store()
|
||||
local created, updated = 0, 0
|
||||
|
||||
if result.code == 0 then
|
||||
local store = require('pending.store')
|
||||
local s_remote = store.new(tmpfile)
|
||||
local load_ok = pcall(function()
|
||||
s_remote:load()
|
||||
end)
|
||||
|
||||
if load_ok then
|
||||
local local_by_sync_id = {}
|
||||
for _, task in ipairs(s:tasks()) do
|
||||
local extra = task._extra or {}
|
||||
local sid = extra['_s3_sync_id']
|
||||
if sid then
|
||||
local_by_sync_id[sid] = task
|
||||
end
|
||||
end
|
||||
|
||||
for _, remote_task in ipairs(s_remote:tasks()) do
|
||||
local r_extra = remote_task._extra or {}
|
||||
local r_sid = r_extra['_s3_sync_id']
|
||||
if not r_sid then
|
||||
goto continue
|
||||
end
|
||||
|
||||
local local_task = local_by_sync_id[r_sid]
|
||||
if local_task then
|
||||
local r_mod = remote_task.modified or ''
|
||||
local l_mod = local_task.modified or ''
|
||||
if r_mod > l_mod then
|
||||
local_task.description = remote_task.description
|
||||
local_task.status = remote_task.status
|
||||
local_task.category = remote_task.category
|
||||
local_task.priority = remote_task.priority
|
||||
local_task.due = remote_task.due
|
||||
local_task.recur = remote_task.recur
|
||||
local_task.recur_mode = remote_task.recur_mode
|
||||
local_task['end'] = remote_task['end']
|
||||
local_task._extra = local_task._extra or {}
|
||||
local_task._extra['_s3_sync_id'] = r_sid
|
||||
local_task.modified = remote_task.modified
|
||||
updated = updated + 1
|
||||
end
|
||||
else
|
||||
s:add({
|
||||
description = remote_task.description,
|
||||
status = remote_task.status,
|
||||
category = remote_task.category,
|
||||
priority = remote_task.priority,
|
||||
due = remote_task.due,
|
||||
recur = remote_task.recur,
|
||||
recur_mode = remote_task.recur_mode,
|
||||
_extra = { _s3_sync_id = r_sid },
|
||||
})
|
||||
created = created + 1
|
||||
end
|
||||
|
||||
::continue::
|
||||
end
|
||||
end
|
||||
end
|
||||
os.remove(tmpfile)
|
||||
|
||||
for _, task in ipairs(s:tasks()) do
|
||||
ensure_sync_id(task)
|
||||
end
|
||||
s:save()
|
||||
|
||||
local f = io.open(s.path, 'r')
|
||||
if not f then
|
||||
log.error('s3 sync: failed to read store file')
|
||||
return
|
||||
end
|
||||
local content = f:read('*a')
|
||||
f:close()
|
||||
|
||||
local push_tmpfile = vim.fn.tempname() .. '.json'
|
||||
local tf = io.open(push_tmpfile, 'w')
|
||||
if not tf then
|
||||
log.error('s3 sync: failed to create temp file')
|
||||
return
|
||||
end
|
||||
tf:write(content)
|
||||
tf:close()
|
||||
|
||||
local push_cmd = base_cmd()
|
||||
vim.list_extend(push_cmd, { 's3', 'cp', push_tmpfile, 's3://' .. s3cfg.bucket .. '/' .. key })
|
||||
local push_result = util.system(push_cmd, { text = true })
|
||||
os.remove(push_tmpfile)
|
||||
|
||||
if push_result.code ~= 0 then
|
||||
log.error('s3 sync push: ' .. (push_result.stderr or 'unknown error'))
|
||||
util.finish(s)
|
||||
return
|
||||
end
|
||||
|
||||
util.finish(s)
|
||||
log.info('s3 sync: pull ' .. util.fmt_counts({
|
||||
{ created, 'added' },
|
||||
{ updated, 'updated' },
|
||||
}) .. ' | push uploaded')
|
||||
end)
|
||||
end)
|
||||
end
|
||||
|
||||
---@return nil
|
||||
function M.health()
|
||||
if vim.fn.executable('aws') == 1 then
|
||||
vim.health.ok('aws CLI found')
|
||||
else
|
||||
vim.health.error('aws CLI not found (required for S3 sync)')
|
||||
end
|
||||
|
||||
local s3cfg = get_config()
|
||||
if s3cfg and s3cfg.bucket then
|
||||
vim.health.ok('S3 bucket configured: ' .. s3cfg.bucket)
|
||||
else
|
||||
vim.health.warn('S3 bucket not configured — set sync.s3.bucket')
|
||||
end
|
||||
end
|
||||
|
||||
M._ensure_sync_id = ensure_sync_id
|
||||
|
||||
return M
|
||||
|
|
@ -5,6 +5,10 @@ local log = require('pending.log')
|
|||
---@field stdout string
|
||||
---@field stderr string
|
||||
|
||||
---@class pending.CountPart
|
||||
---@field [1] integer
|
||||
---@field [2] string
|
||||
|
||||
---@class pending.sync.util
|
||||
local M = {}
|
||||
|
||||
|
|
@ -61,7 +65,7 @@ function M.finish(s)
|
|||
end
|
||||
end
|
||||
|
||||
---@param parts [integer, string][]
|
||||
---@param parts pending.CountPart[]
|
||||
---@return string
|
||||
function M.fmt_counts(parts)
|
||||
local items = {}
|
||||
|
|
|
|||
|
|
@ -185,8 +185,7 @@ function M.category_view(tasks)
|
|||
status = task.status,
|
||||
category = cat,
|
||||
priority = task.priority,
|
||||
overdue = task.status ~= 'done' and task.due ~= nil and parse.is_overdue(task.due)
|
||||
or nil,
|
||||
overdue = task.status ~= 'done' and task.due ~= nil and parse.is_overdue(task.due) or nil,
|
||||
recur = task.recur,
|
||||
})
|
||||
end
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue