fix: detect duplicate filenames in buffer

This commit is contained in:
Steven Arcangeli 2023-01-11 23:54:51 -08:00
parent 9a03af7cb7
commit bcb99ae95a
3 changed files with 55 additions and 3 deletions

View file

@ -3,6 +3,8 @@ local M = {}
---@type boolean
M.is_windows = vim.loop.os_uname().version:match("Windows")
M.is_mac = vim.loop.os_uname().sysname == "Darwin"
---@type string
M.sep = M.is_windows and "\\" or "/"

View file

@ -1,5 +1,6 @@
local cache = require("oil.cache")
local columns = require("oil.columns")
local fs = require("oil.fs")
local util = require("oil.util")
local view = require("oil.view")
local FIELD = require("oil.constants").FIELD
@ -104,7 +105,7 @@ M.parse = function(bufnr)
local adapter = util.get_adapter(bufnr)
if not adapter then
table.insert(errors, {
lnum = 1,
lnum = 0,
col = 0,
message = string.format("Cannot parse buffer '%s': No adapter", bufname),
})
@ -121,6 +122,18 @@ M.parse = function(bufnr)
original_entries[child[FIELD.name]] = child[FIELD.id]
end
end
local seen_names = {}
local function check_dupe(name, i)
if fs.is_mac or fs.is_windows then
-- mac and windows use case-insensitive filesystems
name = name:lower()
end
if seen_names[name] then
table.insert(errors, { message = "Duplicate filename", lnum = i - 1, col = 0 })
else
seen_names[name] = true
end
end
for i, line in ipairs(lines) do
if line:match("^/%d+") then
local parsed_entry, entry, err = M.parse_line(adapter, line, column_defs)
@ -143,11 +156,12 @@ M.parse = function(bufnr)
end
table.insert(errors, {
message = message,
lnum = i,
lnum = i - 1,
col = 0,
})
goto continue
end
check_dupe(parsed_entry.name, i)
local meta = entry[FIELD.meta]
if original_entries[parsed_entry.name] == parsed_entry.id then
if entry[FIELD.type] == "link" and (not meta or meta.link ~= parsed_entry.link_target) then
@ -187,7 +201,7 @@ M.parse = function(bufnr)
if vim.startswith(name, "/") then
table.insert(errors, {
message = "Paths cannot start with '/'",
lnum = i,
lnum = i - 1,
col = 0,
})
goto continue
@ -200,6 +214,7 @@ M.parse = function(bufnr)
entry_type = "link"
name, link = unpack(link_pieces)
end
check_dupe(name, i)
table.insert(diffs, {
type = "new",
name = name,