refactor: organize tests and dry (#49)
* refactor(store): convert singleton to Store.new() factory
Problem: store.lua used module-level _data singleton, making
project-local stores impossible and creating hidden global state.
Solution: introduce Store metatable with all operations as instance
methods. M.new(path) constructs an instance; M.resolve_path()
searches upward for .pending.json and falls back to
config.get().data_path. Singleton module API is removed.
* refactor(diff): accept store instance as parameter
Problem: diff.apply called store singleton methods directly, coupling
it to global state and preventing use with project-local stores.
Solution: change signature to apply(lines, s, hidden_ids?) where s is
a pending.Store instance. All store operations now go through s.
* refactor(buffer): add set_store/store accessors, drop singleton dep
Problem: buffer.lua imported store directly and called singleton
methods, preventing it from working with per-project store instances.
Solution: add module-level _store, M.set_store(s), and M.store()
accessors. open() and render() use _store instead of the singleton.
init.lua will call buffer.set_store(s) before buffer.open().
* refactor(complete,health,sync,plugin): update callers to store instance API
Problem: complete.lua, health.lua, sync/gcal.lua, and plugin/pending.lua
all called singleton store methods directly.
Solution: complete.lua uses buffer.store() for category lookups;
health.lua uses store.new(store.resolve_path()) and reports the
resolved path; gcal.lua calls require('pending').store() for task
access; plugin tab-completion creates ephemeral store instances via
store.new(store.resolve_path()). Add 'init' to the subcommands list.
* feat(init): thread Store instance through init, add :Pending init
Problem: init.lua called singleton store methods throughout, and there
was no way to create a project-local .pending.json file.
Solution: add module-level _store and private get_store() that
lazy-constructs via store.new(store.resolve_path()). Add public
M.store() accessor used by specs and sync backends. M.open() calls
buffer.set_store(get_store()) before buffer.open(). All store
callsites converted to get_store():method(). goto_file() and
add_here() derive the data directory from get_store().path.
Add M.init() which creates .pending.json in cwd and dispatches from
M.command() as ':Pending init'.
* test: update all specs for Store instance API
Problem: every spec used the old singleton API (store.unload(),
store.load(), store.add(), etc.) and diff.apply(lines, hidden).
Solution: lower-level specs (store, diff, views, complete, file) use
s = store.new(path); s:load() directly. Higher-level specs (archive,
edit, filter, status, sync) reset package.loaded['pending'] in
before_each and use pending.store() to access the live instance.
diff.apply calls updated to diff.apply(lines, s, hidden_ids).
* docs(pending): document :Pending init and store resolution
Add *pending-store-resolution* section explaining upward .pending.json
discovery and fallback to the global data_path. Document :Pending init
under COMMANDS. Add a cross-reference from the data_path config field.
* ci: format
* ci: remove unused variable
This commit is contained in:
parent
dbd76d6759
commit
41bda24570
19 changed files with 819 additions and 703 deletions
|
|
@ -5,31 +5,30 @@ local store = require('pending.store')
|
|||
|
||||
describe('store', function()
|
||||
local tmpdir
|
||||
local s
|
||||
|
||||
before_each(function()
|
||||
tmpdir = vim.fn.tempname()
|
||||
vim.fn.mkdir(tmpdir, 'p')
|
||||
vim.g.pending = { data_path = tmpdir .. '/tasks.json' }
|
||||
config.reset()
|
||||
store.unload()
|
||||
s = store.new(tmpdir .. '/tasks.json')
|
||||
s:load()
|
||||
end)
|
||||
|
||||
after_each(function()
|
||||
vim.fn.delete(tmpdir, 'rf')
|
||||
vim.g.pending = nil
|
||||
config.reset()
|
||||
end)
|
||||
|
||||
describe('load', function()
|
||||
it('returns empty data when no file exists', function()
|
||||
local data = store.load()
|
||||
local data = s:load()
|
||||
assert.are.equal(1, data.version)
|
||||
assert.are.equal(1, data.next_id)
|
||||
assert.are.same({}, data.tasks)
|
||||
end)
|
||||
|
||||
it('loads existing data', function()
|
||||
local path = config.get().data_path
|
||||
local path = tmpdir .. '/tasks.json'
|
||||
local f = io.open(path, 'w')
|
||||
f:write(vim.json.encode({
|
||||
version = 1,
|
||||
|
|
@ -52,7 +51,7 @@ describe('store', function()
|
|||
},
|
||||
}))
|
||||
f:close()
|
||||
local data = store.load()
|
||||
local data = s:load()
|
||||
assert.are.equal(3, data.next_id)
|
||||
assert.are.equal(2, #data.tasks)
|
||||
assert.are.equal('Pending one', data.tasks[1].description)
|
||||
|
|
@ -60,7 +59,7 @@ describe('store', function()
|
|||
end)
|
||||
|
||||
it('preserves unknown fields', function()
|
||||
local path = config.get().data_path
|
||||
local path = tmpdir .. '/tasks.json'
|
||||
local f = io.open(path, 'w')
|
||||
f:write(vim.json.encode({
|
||||
version = 1,
|
||||
|
|
@ -77,8 +76,8 @@ describe('store', function()
|
|||
},
|
||||
}))
|
||||
f:close()
|
||||
store.load()
|
||||
local task = store.get(1)
|
||||
s:load()
|
||||
local task = s:get(1)
|
||||
assert.is_not_nil(task._extra)
|
||||
assert.are.equal('hello', task._extra.custom_field)
|
||||
end)
|
||||
|
|
@ -86,9 +85,8 @@ describe('store', function()
|
|||
|
||||
describe('add', function()
|
||||
it('creates a task with incremented id', function()
|
||||
store.load()
|
||||
local t1 = store.add({ description = 'First' })
|
||||
local t2 = store.add({ description = 'Second' })
|
||||
local t1 = s:add({ description = 'First' })
|
||||
local t2 = s:add({ description = 'Second' })
|
||||
assert.are.equal(1, t1.id)
|
||||
assert.are.equal(2, t2.id)
|
||||
assert.are.equal('pending', t1.status)
|
||||
|
|
@ -96,60 +94,54 @@ describe('store', function()
|
|||
end)
|
||||
|
||||
it('uses provided category', function()
|
||||
store.load()
|
||||
local t = store.add({ description = 'Test', category = 'Work' })
|
||||
local t = s:add({ description = 'Test', category = 'Work' })
|
||||
assert.are.equal('Work', t.category)
|
||||
end)
|
||||
end)
|
||||
|
||||
describe('update', function()
|
||||
it('updates fields and sets modified', function()
|
||||
store.load()
|
||||
local t = store.add({ description = 'Original' })
|
||||
local t = s:add({ description = 'Original' })
|
||||
t.modified = '2025-01-01T00:00:00Z'
|
||||
store.update(t.id, { description = 'Updated' })
|
||||
local updated = store.get(t.id)
|
||||
s:update(t.id, { description = 'Updated' })
|
||||
local updated = s:get(t.id)
|
||||
assert.are.equal('Updated', updated.description)
|
||||
assert.is_not.equal('2025-01-01T00:00:00Z', updated.modified)
|
||||
end)
|
||||
|
||||
it('sets end timestamp on completion', function()
|
||||
store.load()
|
||||
local t = store.add({ description = 'Test' })
|
||||
local t = s:add({ description = 'Test' })
|
||||
assert.is_nil(t['end'])
|
||||
store.update(t.id, { status = 'done' })
|
||||
local updated = store.get(t.id)
|
||||
s:update(t.id, { status = 'done' })
|
||||
local updated = s:get(t.id)
|
||||
assert.is_not_nil(updated['end'])
|
||||
end)
|
||||
|
||||
it('does not overwrite id or entry', function()
|
||||
store.load()
|
||||
local t = store.add({ description = 'Immutable fields' })
|
||||
local t = s:add({ description = 'Immutable fields' })
|
||||
local original_id = t.id
|
||||
local original_entry = t.entry
|
||||
store.update(t.id, { id = 999, entry = 'x' })
|
||||
local updated = store.get(original_id)
|
||||
s:update(t.id, { id = 999, entry = 'x' })
|
||||
local updated = s:get(original_id)
|
||||
assert.are.equal(original_id, updated.id)
|
||||
assert.are.equal(original_entry, updated.entry)
|
||||
end)
|
||||
|
||||
it('does not overwrite end on second completion', function()
|
||||
store.load()
|
||||
local t = store.add({ description = 'Complete twice' })
|
||||
store.update(t.id, { status = 'done', ['end'] = '2026-01-15T10:00:00Z' })
|
||||
local first_end = store.get(t.id)['end']
|
||||
store.update(t.id, { status = 'done' })
|
||||
local task = store.get(t.id)
|
||||
local t = s:add({ description = 'Complete twice' })
|
||||
s:update(t.id, { status = 'done', ['end'] = '2026-01-15T10:00:00Z' })
|
||||
local first_end = s:get(t.id)['end']
|
||||
s:update(t.id, { status = 'done' })
|
||||
local task = s:get(t.id)
|
||||
assert.are.equal(first_end, task['end'])
|
||||
end)
|
||||
end)
|
||||
|
||||
describe('delete', function()
|
||||
it('marks task as deleted', function()
|
||||
store.load()
|
||||
local t = store.add({ description = 'To delete' })
|
||||
store.delete(t.id)
|
||||
local deleted = store.get(t.id)
|
||||
local t = s:add({ description = 'To delete' })
|
||||
s:delete(t.id)
|
||||
local deleted = s:get(t.id)
|
||||
assert.are.equal('deleted', deleted.status)
|
||||
assert.is_not_nil(deleted['end'])
|
||||
end)
|
||||
|
|
@ -157,12 +149,10 @@ describe('store', function()
|
|||
|
||||
describe('save and round-trip', function()
|
||||
it('persists and reloads correctly', function()
|
||||
store.load()
|
||||
store.add({ description = 'Persisted', category = 'Work', priority = 1 })
|
||||
store.save()
|
||||
store.unload()
|
||||
store.load()
|
||||
local tasks = store.active_tasks()
|
||||
s:add({ description = 'Persisted', category = 'Work', priority = 1 })
|
||||
s:save()
|
||||
s:load()
|
||||
local tasks = s:active_tasks()
|
||||
assert.are.equal(1, #tasks)
|
||||
assert.are.equal('Persisted', tasks[1].description)
|
||||
assert.are.equal('Work', tasks[1].category)
|
||||
|
|
@ -170,7 +160,7 @@ describe('store', function()
|
|||
end)
|
||||
|
||||
it('round-trips unknown fields', function()
|
||||
local path = config.get().data_path
|
||||
local path = tmpdir .. '/tasks.json'
|
||||
local f = io.open(path, 'w')
|
||||
f:write(vim.json.encode({
|
||||
version = 1,
|
||||
|
|
@ -187,45 +177,38 @@ describe('store', function()
|
|||
},
|
||||
}))
|
||||
f:close()
|
||||
store.load()
|
||||
store.save()
|
||||
store.unload()
|
||||
store.load()
|
||||
local task = store.get(1)
|
||||
s:load()
|
||||
s:save()
|
||||
s:load()
|
||||
local task = s:get(1)
|
||||
assert.are.equal('abc123', task._extra._gcal_event_id)
|
||||
end)
|
||||
end)
|
||||
|
||||
describe('recurrence fields', function()
|
||||
it('persists recur and recur_mode through round-trip', function()
|
||||
store.load()
|
||||
store.add({ description = 'Recurring', recur = 'weekly', recur_mode = 'scheduled' })
|
||||
store.save()
|
||||
store.unload()
|
||||
store.load()
|
||||
local task = store.get(1)
|
||||
s:add({ description = 'Recurring', recur = 'weekly', recur_mode = 'scheduled' })
|
||||
s:save()
|
||||
s:load()
|
||||
local task = s:get(1)
|
||||
assert.are.equal('weekly', task.recur)
|
||||
assert.are.equal('scheduled', task.recur_mode)
|
||||
end)
|
||||
|
||||
it('persists recur without recur_mode', function()
|
||||
store.load()
|
||||
store.add({ description = 'Simple recur', recur = 'daily' })
|
||||
store.save()
|
||||
store.unload()
|
||||
store.load()
|
||||
local task = store.get(1)
|
||||
s:add({ description = 'Simple recur', recur = 'daily' })
|
||||
s:save()
|
||||
s:load()
|
||||
local task = s:get(1)
|
||||
assert.are.equal('daily', task.recur)
|
||||
assert.is_nil(task.recur_mode)
|
||||
end)
|
||||
|
||||
it('omits recur fields when not set', function()
|
||||
store.load()
|
||||
store.add({ description = 'No recur' })
|
||||
store.save()
|
||||
store.unload()
|
||||
store.load()
|
||||
local task = store.get(1)
|
||||
s:add({ description = 'No recur' })
|
||||
s:save()
|
||||
s:load()
|
||||
local task = s:get(1)
|
||||
assert.is_nil(task.recur)
|
||||
assert.is_nil(task.recur_mode)
|
||||
end)
|
||||
|
|
@ -233,11 +216,10 @@ describe('store', function()
|
|||
|
||||
describe('active_tasks', function()
|
||||
it('excludes deleted tasks', function()
|
||||
store.load()
|
||||
store.add({ description = 'Active' })
|
||||
local t2 = store.add({ description = 'To delete' })
|
||||
store.delete(t2.id)
|
||||
local active = store.active_tasks()
|
||||
s:add({ description = 'Active' })
|
||||
local t2 = s:add({ description = 'To delete' })
|
||||
s:delete(t2.id)
|
||||
local active = s:active_tasks()
|
||||
assert.are.equal(1, #active)
|
||||
assert.are.equal('Active', active[1].description)
|
||||
end)
|
||||
|
|
@ -245,27 +227,24 @@ describe('store', function()
|
|||
|
||||
describe('snapshot', function()
|
||||
it('returns a table of tasks', function()
|
||||
store.load()
|
||||
store.add({ description = 'Snap one' })
|
||||
store.add({ description = 'Snap two' })
|
||||
local snap = store.snapshot()
|
||||
s:add({ description = 'Snap one' })
|
||||
s:add({ description = 'Snap two' })
|
||||
local snap = s:snapshot()
|
||||
assert.are.equal(2, #snap)
|
||||
end)
|
||||
|
||||
it('returns a copy that does not affect the store', function()
|
||||
store.load()
|
||||
local t = store.add({ description = 'Original' })
|
||||
local snap = store.snapshot()
|
||||
local t = s:add({ description = 'Original' })
|
||||
local snap = s:snapshot()
|
||||
snap[1].description = 'Mutated'
|
||||
local live = store.get(t.id)
|
||||
local live = s:get(t.id)
|
||||
assert.are.equal('Original', live.description)
|
||||
end)
|
||||
|
||||
it('excludes deleted tasks', function()
|
||||
store.load()
|
||||
local t = store.add({ description = 'Will be deleted' })
|
||||
store.delete(t.id)
|
||||
local snap = store.snapshot()
|
||||
local t = s:add({ description = 'Will be deleted' })
|
||||
s:delete(t.id)
|
||||
local snap = s:snapshot()
|
||||
assert.are.equal(0, #snap)
|
||||
end)
|
||||
end)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue