feat: new adapter for S3 buckets (#677)
* Added s3 support * Save work * Various bug fixes * Minor cleanup * Minor bug fixes * Fix typo * Update following feedback + minor bug fix * Fix CI * Cleanup and remove bucket entry_type * Make suggested changes * Better aws existence check * Fix typo * refactor: don't bother caching aws executable status --------- Co-authored-by: Steven Arcangeli <506791+stevearc@users.noreply.github.com>
This commit is contained in:
parent
01cb3a8ad7
commit
e5bd931edb
6 changed files with 565 additions and 4 deletions
12
README.md
12
README.md
|
|
@ -242,6 +242,8 @@ require("oil").setup({
|
||||||
},
|
},
|
||||||
-- Extra arguments to pass to SCP when moving/copying files over SSH
|
-- Extra arguments to pass to SCP when moving/copying files over SSH
|
||||||
extra_scp_args = {},
|
extra_scp_args = {},
|
||||||
|
-- Extra arguments to pass to aws s3 when creating/deleting/moving/copying files using aws s3
|
||||||
|
extra_s3_args = {},
|
||||||
-- EXPERIMENTAL support for performing file operations with git
|
-- EXPERIMENTAL support for performing file operations with git
|
||||||
git = {
|
git = {
|
||||||
-- Return true to automatically git add/mv/rm files
|
-- Return true to automatically git add/mv/rm files
|
||||||
|
|
@ -355,6 +357,16 @@ This may look familiar. In fact, this is the same url format that netrw uses.
|
||||||
|
|
||||||
Note that at the moment the ssh adapter does not support Windows machines, and it requires the server to have a `/bin/sh` binary as well as standard unix commands (`ls`, `rm`, `mv`, `mkdir`, `chmod`, `cp`, `touch`, `ln`, `echo`).
|
Note that at the moment the ssh adapter does not support Windows machines, and it requires the server to have a `/bin/sh` binary as well as standard unix commands (`ls`, `rm`, `mv`, `mkdir`, `chmod`, `cp`, `touch`, `ln`, `echo`).
|
||||||
|
|
||||||
|
### S3
|
||||||
|
|
||||||
|
This adapter allows you to browse files stored in aws s3. To use it, make sure `aws` is setup correctly and then simply open a buffer using the following name template:
|
||||||
|
|
||||||
|
```
|
||||||
|
nvim oil-s3://[bucket]/[path]
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that older versions of Neovim don't support numbers in the url, so for Neovim 0.11 and older the url starts with `oil-sss`.
|
||||||
|
|
||||||
## Recipes
|
## Recipes
|
||||||
|
|
||||||
- [Toggle file detail view](doc/recipes.md#toggle-file-detail-view)
|
- [Toggle file detail view](doc/recipes.md#toggle-file-detail-view)
|
||||||
|
|
|
||||||
|
|
@ -124,6 +124,8 @@ CONFIG *oil-confi
|
||||||
},
|
},
|
||||||
-- Extra arguments to pass to SCP when moving/copying files over SSH
|
-- Extra arguments to pass to SCP when moving/copying files over SSH
|
||||||
extra_scp_args = {},
|
extra_scp_args = {},
|
||||||
|
-- Extra arguments to pass to aws s3 when creating/deleting/moving/copying files using aws s3
|
||||||
|
extra_s3_args = {},
|
||||||
-- EXPERIMENTAL support for performing file operations with git
|
-- EXPERIMENTAL support for performing file operations with git
|
||||||
git = {
|
git = {
|
||||||
-- Return true to automatically git add/mv/rm files
|
-- Return true to automatically git add/mv/rm files
|
||||||
|
|
@ -428,7 +430,7 @@ icon *column-ico
|
||||||
the icon
|
the icon
|
||||||
|
|
||||||
size *column-size*
|
size *column-size*
|
||||||
Adapters: files, ssh
|
Adapters: files, ssh, s3
|
||||||
Sortable: this column can be used in view_props.sort
|
Sortable: this column can be used in view_props.sort
|
||||||
The size of the file
|
The size of the file
|
||||||
|
|
||||||
|
|
@ -476,7 +478,7 @@ atime *column-atim
|
||||||
{format} `string` Format string (see :help strftime)
|
{format} `string` Format string (see :help strftime)
|
||||||
|
|
||||||
birthtime *column-birthtime*
|
birthtime *column-birthtime*
|
||||||
Adapters: files
|
Adapters: files, s3
|
||||||
Sortable: this column can be used in view_props.sort
|
Sortable: this column can be used in view_props.sort
|
||||||
The time the file was created
|
The time the file was created
|
||||||
|
|
||||||
|
|
|
||||||
389
lua/oil/adapters/s3.lua
Normal file
389
lua/oil/adapters/s3.lua
Normal file
|
|
@ -0,0 +1,389 @@
|
||||||
|
local config = require("oil.config")
|
||||||
|
local constants = require("oil.constants")
|
||||||
|
local files = require("oil.adapters.files")
|
||||||
|
local fs = require("oil.fs")
|
||||||
|
local loading = require("oil.loading")
|
||||||
|
local pathutil = require("oil.pathutil")
|
||||||
|
local s3fs = require("oil.adapters.s3.s3fs")
|
||||||
|
local util = require("oil.util")
|
||||||
|
local M = {}
|
||||||
|
|
||||||
|
local FIELD_META = constants.FIELD_META
|
||||||
|
|
||||||
|
---@class (exact) oil.s3Url
|
||||||
|
---@field scheme string
|
||||||
|
---@field bucket nil|string
|
||||||
|
---@field path nil|string
|
||||||
|
|
||||||
|
---@param oil_url string
|
||||||
|
---@return oil.s3Url
|
||||||
|
M.parse_url = function(oil_url)
|
||||||
|
local scheme, url = util.parse_url(oil_url)
|
||||||
|
assert(scheme and url, string.format("Malformed input url '%s'", oil_url))
|
||||||
|
local ret = { scheme = scheme }
|
||||||
|
local bucket, path = url:match("^([^/]+)/?(.*)$")
|
||||||
|
ret.bucket = bucket
|
||||||
|
ret.path = path ~= "" and path or nil
|
||||||
|
if not ret.bucket and ret.path then
|
||||||
|
error(string.format("Parsing error for s3 url: %s", oil_url))
|
||||||
|
end
|
||||||
|
---@cast ret oil.s3Url
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
|
||||||
|
---@param url oil.s3Url
|
||||||
|
---@return string
|
||||||
|
local function url_to_str(url)
|
||||||
|
local pieces = { url.scheme }
|
||||||
|
if url.bucket then
|
||||||
|
assert(url.bucket ~= "")
|
||||||
|
table.insert(pieces, url.bucket)
|
||||||
|
table.insert(pieces, "/")
|
||||||
|
end
|
||||||
|
if url.path then
|
||||||
|
assert(url.path ~= "")
|
||||||
|
table.insert(pieces, url.path)
|
||||||
|
end
|
||||||
|
return table.concat(pieces, "")
|
||||||
|
end
|
||||||
|
|
||||||
|
---@param url oil.s3Url
|
||||||
|
---@param is_folder boolean
|
||||||
|
---@return string
|
||||||
|
local function url_to_s3(url, is_folder)
|
||||||
|
local pieces = { "s3://" }
|
||||||
|
if url.bucket then
|
||||||
|
assert(url.bucket ~= "")
|
||||||
|
table.insert(pieces, url.bucket)
|
||||||
|
table.insert(pieces, "/")
|
||||||
|
end
|
||||||
|
if url.path then
|
||||||
|
assert(url.path ~= "")
|
||||||
|
table.insert(pieces, url.path)
|
||||||
|
if is_folder and not vim.endswith(url.path, "/") then
|
||||||
|
table.insert(pieces, "/")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return table.concat(pieces, "")
|
||||||
|
end
|
||||||
|
|
||||||
|
---@param url oil.s3Url
|
||||||
|
---@return boolean
|
||||||
|
local function is_bucket(url)
|
||||||
|
assert(url.bucket and url.bucket ~= "")
|
||||||
|
if url.path then
|
||||||
|
assert(url.path ~= "")
|
||||||
|
return false
|
||||||
|
end
|
||||||
|
return true
|
||||||
|
end
|
||||||
|
|
||||||
|
local s3_columns = {}
|
||||||
|
s3_columns.size = {
|
||||||
|
render = function(entry, conf)
|
||||||
|
local meta = entry[FIELD_META]
|
||||||
|
if not meta or not meta.size then
|
||||||
|
return ""
|
||||||
|
elseif meta.size >= 1e9 then
|
||||||
|
return string.format("%.1fG", meta.size / 1e9)
|
||||||
|
elseif meta.size >= 1e6 then
|
||||||
|
return string.format("%.1fM", meta.size / 1e6)
|
||||||
|
elseif meta.size >= 1e3 then
|
||||||
|
return string.format("%.1fk", meta.size / 1e3)
|
||||||
|
else
|
||||||
|
return string.format("%d", meta.size)
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
|
||||||
|
parse = function(line, conf)
|
||||||
|
return line:match("^(%d+%S*)%s+(.*)$")
|
||||||
|
end,
|
||||||
|
|
||||||
|
get_sort_value = function(entry)
|
||||||
|
local meta = entry[FIELD_META]
|
||||||
|
if meta and meta.size then
|
||||||
|
return meta.size
|
||||||
|
else
|
||||||
|
return 0
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
}
|
||||||
|
|
||||||
|
s3_columns.birthtime = {
|
||||||
|
render = function(entry, conf)
|
||||||
|
local meta = entry[FIELD_META]
|
||||||
|
if not meta or not meta.date then
|
||||||
|
return ""
|
||||||
|
else
|
||||||
|
return meta.date
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
|
||||||
|
parse = function(line, conf)
|
||||||
|
return line:match("^(%d+%-%d+%-%d+%s%d+:%d+:%d+)%s+(.*)$")
|
||||||
|
end,
|
||||||
|
|
||||||
|
get_sort_value = function(entry)
|
||||||
|
local meta = entry[FIELD_META]
|
||||||
|
if meta and meta.date then
|
||||||
|
local year, month, day, hour, min, sec =
|
||||||
|
meta.date:match("^(%d+)%-(%d+)%-(%d+)%s(%d+):(%d+):(%d+)$")
|
||||||
|
local time =
|
||||||
|
os.time({ year = year, month = month, day = day, hour = hour, min = min, sec = sec })
|
||||||
|
return time
|
||||||
|
else
|
||||||
|
return 0
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
}
|
||||||
|
|
||||||
|
---@param name string
|
||||||
|
---@return nil|oil.ColumnDefinition
|
||||||
|
M.get_column = function(name)
|
||||||
|
return s3_columns[name]
|
||||||
|
end
|
||||||
|
|
||||||
|
---@param bufname string
|
||||||
|
---@return string
|
||||||
|
M.get_parent = function(bufname)
|
||||||
|
local res = M.parse_url(bufname)
|
||||||
|
if res.path then
|
||||||
|
assert(res.path ~= "")
|
||||||
|
local path = pathutil.parent(res.path)
|
||||||
|
res.path = path ~= "" and path or nil
|
||||||
|
else
|
||||||
|
res.bucket = nil
|
||||||
|
end
|
||||||
|
return url_to_str(res)
|
||||||
|
end
|
||||||
|
|
||||||
|
---@param url string
|
||||||
|
---@param callback fun(url: string)
|
||||||
|
M.normalize_url = function(url, callback)
|
||||||
|
local res = M.parse_url(url)
|
||||||
|
callback(url_to_str(res))
|
||||||
|
end
|
||||||
|
|
||||||
|
---@param url string
|
||||||
|
---@param column_defs string[]
|
||||||
|
---@param callback fun(err?: string, entries?: oil.InternalEntry[], fetch_more?: fun())
|
||||||
|
M.list = function(url, column_defs, callback)
|
||||||
|
if vim.fn.executable("aws") ~= 1 then
|
||||||
|
callback("`aws` is not executable. Can you run `aws s3 ls`?")
|
||||||
|
return
|
||||||
|
end
|
||||||
|
|
||||||
|
local res = M.parse_url(url)
|
||||||
|
s3fs.list_dir(url, url_to_s3(res, true), callback)
|
||||||
|
end
|
||||||
|
|
||||||
|
---@param bufnr integer
|
||||||
|
---@return boolean
|
||||||
|
M.is_modifiable = function(bufnr)
|
||||||
|
-- default assumption is that everything is modifiable
|
||||||
|
return true
|
||||||
|
end
|
||||||
|
|
||||||
|
---@param action oil.Action
|
||||||
|
---@return string
|
||||||
|
M.render_action = function(action)
|
||||||
|
local is_folder = action.entry_type == "directory"
|
||||||
|
if action.type == "create" then
|
||||||
|
local res = M.parse_url(action.url)
|
||||||
|
local extra = is_bucket(res) and "BUCKET " or ""
|
||||||
|
return string.format("CREATE %s%s", extra, url_to_s3(res, is_folder))
|
||||||
|
elseif action.type == "delete" then
|
||||||
|
local res = M.parse_url(action.url)
|
||||||
|
local extra = is_bucket(res) and "BUCKET " or ""
|
||||||
|
return string.format("DELETE %s%s", extra, url_to_s3(res, is_folder))
|
||||||
|
elseif action.type == "move" or action.type == "copy" then
|
||||||
|
local src = action.src_url
|
||||||
|
local dest = action.dest_url
|
||||||
|
if config.get_adapter_by_scheme(src) ~= M then
|
||||||
|
local _, path = util.parse_url(src)
|
||||||
|
assert(path)
|
||||||
|
src = files.to_short_os_path(path, action.entry_type)
|
||||||
|
dest = url_to_s3(M.parse_url(dest), is_folder)
|
||||||
|
elseif config.get_adapter_by_scheme(dest) ~= M then
|
||||||
|
local _, path = util.parse_url(dest)
|
||||||
|
assert(path)
|
||||||
|
dest = files.to_short_os_path(path, action.entry_type)
|
||||||
|
src = url_to_s3(M.parse_url(src), is_folder)
|
||||||
|
end
|
||||||
|
return string.format(" %s %s -> %s", action.type:upper(), src, dest)
|
||||||
|
else
|
||||||
|
error(string.format("Bad action type: '%s'", action.type))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
---@param action oil.Action
|
||||||
|
---@param cb fun(err: nil|string)
|
||||||
|
M.perform_action = function(action, cb)
|
||||||
|
local is_folder = action.entry_type == "directory"
|
||||||
|
if action.type == "create" then
|
||||||
|
local res = M.parse_url(action.url)
|
||||||
|
local bucket = is_bucket(res)
|
||||||
|
|
||||||
|
if action.entry_type == "directory" and bucket then
|
||||||
|
s3fs.mb(url_to_s3(res, true), cb)
|
||||||
|
elseif action.entry_type == "directory" or action.entry_type == "file" then
|
||||||
|
s3fs.touch(url_to_s3(res, is_folder), cb)
|
||||||
|
else
|
||||||
|
cb(string.format("Bad entry type on s3 create action: %s", action.entry_type))
|
||||||
|
end
|
||||||
|
elseif action.type == "delete" then
|
||||||
|
local res = M.parse_url(action.url)
|
||||||
|
local bucket = is_bucket(res)
|
||||||
|
|
||||||
|
if action.entry_type == "directory" and bucket then
|
||||||
|
s3fs.rb(url_to_s3(res, true), cb)
|
||||||
|
elseif action.entry_type == "directory" or action.entry_type == "file" then
|
||||||
|
s3fs.rm(url_to_s3(res, is_folder), is_folder, cb)
|
||||||
|
else
|
||||||
|
cb(string.format("Bad entry type on s3 delete action: %s", action.entry_type))
|
||||||
|
end
|
||||||
|
elseif action.type == "move" then
|
||||||
|
local src_adapter = assert(config.get_adapter_by_scheme(action.src_url))
|
||||||
|
local dest_adapter = assert(config.get_adapter_by_scheme(action.dest_url))
|
||||||
|
if
|
||||||
|
(src_adapter ~= M and src_adapter ~= files) or (dest_adapter ~= M and dest_adapter ~= files)
|
||||||
|
then
|
||||||
|
cb(
|
||||||
|
string.format(
|
||||||
|
"We should never attempt to move from the %s adapter to the %s adapter.",
|
||||||
|
src_adapter.name,
|
||||||
|
dest_adapter.name
|
||||||
|
)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
local src, _
|
||||||
|
if src_adapter == M then
|
||||||
|
local src_res = M.parse_url(action.src_url)
|
||||||
|
src = url_to_s3(src_res, is_folder)
|
||||||
|
else
|
||||||
|
_, src = util.parse_url(action.src_url)
|
||||||
|
end
|
||||||
|
assert(src)
|
||||||
|
|
||||||
|
local dest
|
||||||
|
if dest_adapter == M then
|
||||||
|
local dest_res = M.parse_url(action.dest_url)
|
||||||
|
dest = url_to_s3(dest_res, is_folder)
|
||||||
|
else
|
||||||
|
_, dest = util.parse_url(action.dest_url)
|
||||||
|
end
|
||||||
|
assert(dest)
|
||||||
|
|
||||||
|
s3fs.mv(src, dest, is_folder, cb)
|
||||||
|
elseif action.type == "copy" then
|
||||||
|
local src_adapter = assert(config.get_adapter_by_scheme(action.src_url))
|
||||||
|
local dest_adapter = assert(config.get_adapter_by_scheme(action.dest_url))
|
||||||
|
if
|
||||||
|
(src_adapter ~= M and src_adapter ~= files) or (dest_adapter ~= M and dest_adapter ~= files)
|
||||||
|
then
|
||||||
|
cb(
|
||||||
|
string.format(
|
||||||
|
"We should never attempt to copy from the %s adapter to the %s adapter.",
|
||||||
|
src_adapter.name,
|
||||||
|
dest_adapter.name
|
||||||
|
)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
local src, _
|
||||||
|
if src_adapter == M then
|
||||||
|
local src_res = M.parse_url(action.src_url)
|
||||||
|
src = url_to_s3(src_res, is_folder)
|
||||||
|
else
|
||||||
|
_, src = util.parse_url(action.src_url)
|
||||||
|
end
|
||||||
|
assert(src)
|
||||||
|
|
||||||
|
local dest
|
||||||
|
if dest_adapter == M then
|
||||||
|
local dest_res = M.parse_url(action.dest_url)
|
||||||
|
dest = url_to_s3(dest_res, is_folder)
|
||||||
|
else
|
||||||
|
_, dest = util.parse_url(action.dest_url)
|
||||||
|
end
|
||||||
|
assert(dest)
|
||||||
|
|
||||||
|
s3fs.cp(src, dest, is_folder, cb)
|
||||||
|
else
|
||||||
|
cb(string.format("Bad action type: %s", action.type))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
M.supported_cross_adapter_actions = { files = "move" }
|
||||||
|
|
||||||
|
---@param bufnr integer
|
||||||
|
M.read_file = function(bufnr)
|
||||||
|
loading.set_loading(bufnr, true)
|
||||||
|
local bufname = vim.api.nvim_buf_get_name(bufnr)
|
||||||
|
local url = M.parse_url(bufname)
|
||||||
|
local basename = pathutil.basename(bufname)
|
||||||
|
local cache_dir = vim.fn.stdpath("cache")
|
||||||
|
assert(type(cache_dir) == "string")
|
||||||
|
local tmpdir = fs.join(cache_dir, "oil")
|
||||||
|
fs.mkdirp(tmpdir)
|
||||||
|
local fd, tmpfile = vim.loop.fs_mkstemp(fs.join(tmpdir, "s3_XXXXXX"))
|
||||||
|
if fd then
|
||||||
|
vim.loop.fs_close(fd)
|
||||||
|
end
|
||||||
|
local tmp_bufnr = vim.fn.bufadd(tmpfile)
|
||||||
|
|
||||||
|
s3fs.cp(url_to_s3(url, false), tmpfile, false, function(err)
|
||||||
|
loading.set_loading(bufnr, false)
|
||||||
|
vim.bo[bufnr].modifiable = true
|
||||||
|
vim.cmd.doautocmd({ args = { "BufReadPre", bufname }, mods = { silent = true } })
|
||||||
|
if err then
|
||||||
|
vim.api.nvim_buf_set_lines(bufnr, 0, -1, true, vim.split(err, "\n"))
|
||||||
|
else
|
||||||
|
vim.api.nvim_buf_set_lines(bufnr, 0, -1, true, {})
|
||||||
|
vim.api.nvim_buf_call(bufnr, function()
|
||||||
|
vim.cmd.read({ args = { tmpfile }, mods = { silent = true } })
|
||||||
|
end)
|
||||||
|
vim.loop.fs_unlink(tmpfile)
|
||||||
|
vim.api.nvim_buf_set_lines(bufnr, 0, 1, true, {})
|
||||||
|
end
|
||||||
|
vim.bo[bufnr].modified = false
|
||||||
|
local filetype = vim.filetype.match({ buf = bufnr, filename = basename })
|
||||||
|
if filetype then
|
||||||
|
vim.bo[bufnr].filetype = filetype
|
||||||
|
end
|
||||||
|
vim.cmd.doautocmd({ args = { "BufReadPost", bufname }, mods = { silent = true } })
|
||||||
|
vim.api.nvim_buf_delete(tmp_bufnr, { force = true })
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
---@param bufnr integer
|
||||||
|
M.write_file = function(bufnr)
|
||||||
|
local bufname = vim.api.nvim_buf_get_name(bufnr)
|
||||||
|
local url = M.parse_url(bufname)
|
||||||
|
local cache_dir = vim.fn.stdpath("cache")
|
||||||
|
assert(type(cache_dir) == "string")
|
||||||
|
local tmpdir = fs.join(cache_dir, "oil")
|
||||||
|
local fd, tmpfile = vim.loop.fs_mkstemp(fs.join(tmpdir, "s3_XXXXXXXX"))
|
||||||
|
if fd then
|
||||||
|
vim.loop.fs_close(fd)
|
||||||
|
end
|
||||||
|
vim.cmd.doautocmd({ args = { "BufWritePre", bufname }, mods = { silent = true } })
|
||||||
|
vim.bo[bufnr].modifiable = false
|
||||||
|
vim.cmd.write({ args = { tmpfile }, bang = true, mods = { silent = true, noautocmd = true } })
|
||||||
|
local tmp_bufnr = vim.fn.bufadd(tmpfile)
|
||||||
|
|
||||||
|
s3fs.cp(tmpfile, url_to_s3(url, false), false, function(err)
|
||||||
|
vim.bo[bufnr].modifiable = true
|
||||||
|
if err then
|
||||||
|
vim.notify(string.format("Error writing file: %s", err), vim.log.levels.ERROR)
|
||||||
|
else
|
||||||
|
vim.bo[bufnr].modified = false
|
||||||
|
vim.cmd.doautocmd({ args = { "BufWritePost", bufname }, mods = { silent = true } })
|
||||||
|
end
|
||||||
|
vim.loop.fs_unlink(tmpfile)
|
||||||
|
vim.api.nvim_buf_delete(tmp_bufnr, { force = true })
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
return M
|
||||||
149
lua/oil/adapters/s3/s3fs.lua
Normal file
149
lua/oil/adapters/s3/s3fs.lua
Normal file
|
|
@ -0,0 +1,149 @@
|
||||||
|
local cache = require("oil.cache")
|
||||||
|
local config = require("oil.config")
|
||||||
|
local constants = require("oil.constants")
|
||||||
|
local shell = require("oil.shell")
|
||||||
|
local util = require("oil.util")
|
||||||
|
|
||||||
|
local M = {}
|
||||||
|
|
||||||
|
local FIELD_META = constants.FIELD_META
|
||||||
|
|
||||||
|
---@param line string
|
||||||
|
---@return string Name of entry
|
||||||
|
---@return oil.EntryType
|
||||||
|
---@return table Metadata for entry
|
||||||
|
local function parse_ls_line_bucket(line)
|
||||||
|
local date, name = line:match("^(%d+%-%d+%-%d+%s%d+:%d+:%d+)%s+(.*)$")
|
||||||
|
if not date or not name then
|
||||||
|
error(string.format("Could not parse '%s'", line))
|
||||||
|
end
|
||||||
|
local type = "directory"
|
||||||
|
local meta = { date = date }
|
||||||
|
return name, type, meta
|
||||||
|
end
|
||||||
|
|
||||||
|
---@param line string
|
||||||
|
---@return string Name of entry
|
||||||
|
---@return oil.EntryType
|
||||||
|
---@return table Metadata for entry
|
||||||
|
local function parse_ls_line_file(line)
|
||||||
|
local name = line:match("^%s+PRE%s+(.*)/$")
|
||||||
|
local type = "directory"
|
||||||
|
local meta = {}
|
||||||
|
if name then
|
||||||
|
return name, type, meta
|
||||||
|
end
|
||||||
|
local date, size
|
||||||
|
date, size, name = line:match("^(%d+%-%d+%-%d+%s%d+:%d+:%d+)%s+(%d+)%s+(.*)$")
|
||||||
|
if not name then
|
||||||
|
error(string.format("Could not parse '%s'", line))
|
||||||
|
end
|
||||||
|
type = "file"
|
||||||
|
meta = { date = date, size = tonumber(size) }
|
||||||
|
return name, type, meta
|
||||||
|
end
|
||||||
|
|
||||||
|
---@param cmd string[] cmd and flags
|
||||||
|
---@return string[] Shell command to run
|
||||||
|
local function create_s3_command(cmd)
|
||||||
|
local full_cmd = vim.list_extend({ "aws", "s3" }, cmd)
|
||||||
|
return vim.list_extend(full_cmd, config.extra_s3_args)
|
||||||
|
end
|
||||||
|
|
||||||
|
---@param url string
|
||||||
|
---@param path string
|
||||||
|
---@param callback fun(err?: string, entries?: oil.InternalEntry[], fetch_more?: fun())
|
||||||
|
function M.list_dir(url, path, callback)
|
||||||
|
local cmd = create_s3_command({ "ls", path, "--color=off", "--no-cli-pager" })
|
||||||
|
shell.run(cmd, function(err, lines)
|
||||||
|
if err then
|
||||||
|
return callback(err)
|
||||||
|
end
|
||||||
|
assert(lines)
|
||||||
|
local cache_entries = {}
|
||||||
|
local url_path, _
|
||||||
|
_, url_path = util.parse_url(url)
|
||||||
|
local is_top_level = url_path == nil or url_path:match("/") == nil
|
||||||
|
local parse_ls_line = is_top_level and parse_ls_line_bucket or parse_ls_line_file
|
||||||
|
for _, line in ipairs(lines) do
|
||||||
|
if line ~= "" then
|
||||||
|
local name, type, meta = parse_ls_line(line)
|
||||||
|
-- in s3 '-' can be used to create an "empty folder"
|
||||||
|
if name ~= "-" then
|
||||||
|
local cache_entry = cache.create_entry(url, name, type)
|
||||||
|
table.insert(cache_entries, cache_entry)
|
||||||
|
cache_entry[FIELD_META] = meta
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
callback(nil, cache_entries)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
--- Create files
|
||||||
|
---@param path string
|
||||||
|
---@param callback fun(err: nil|string)
|
||||||
|
function M.touch(path, callback)
|
||||||
|
-- here "-" means that we copy from stdin
|
||||||
|
local cmd = create_s3_command({ "cp", "-", path })
|
||||||
|
shell.run(cmd, { stdin = "null" }, callback)
|
||||||
|
end
|
||||||
|
|
||||||
|
--- Remove files
|
||||||
|
---@param path string
|
||||||
|
---@param is_folder boolean
|
||||||
|
---@param callback fun(err: nil|string)
|
||||||
|
function M.rm(path, is_folder, callback)
|
||||||
|
local main_cmd = { "rm", path }
|
||||||
|
if is_folder then
|
||||||
|
table.insert(main_cmd, "--recursive")
|
||||||
|
end
|
||||||
|
local cmd = create_s3_command(main_cmd)
|
||||||
|
shell.run(cmd, callback)
|
||||||
|
end
|
||||||
|
|
||||||
|
--- Remove bucket
|
||||||
|
---@param bucket string
|
||||||
|
---@param callback fun(err: nil|string)
|
||||||
|
function M.rb(bucket, callback)
|
||||||
|
local cmd = create_s3_command({ "rb", bucket })
|
||||||
|
shell.run(cmd, callback)
|
||||||
|
end
|
||||||
|
|
||||||
|
--- Make bucket
|
||||||
|
---@param bucket string
|
||||||
|
---@param callback fun(err: nil|string)
|
||||||
|
function M.mb(bucket, callback)
|
||||||
|
local cmd = create_s3_command({ "mb", bucket })
|
||||||
|
shell.run(cmd, callback)
|
||||||
|
end
|
||||||
|
|
||||||
|
--- Move files
|
||||||
|
---@param src string
|
||||||
|
---@param dest string
|
||||||
|
---@param is_folder boolean
|
||||||
|
---@param callback fun(err: nil|string)
|
||||||
|
function M.mv(src, dest, is_folder, callback)
|
||||||
|
local main_cmd = { "mv", src, dest }
|
||||||
|
if is_folder then
|
||||||
|
table.insert(main_cmd, "--recursive")
|
||||||
|
end
|
||||||
|
local cmd = create_s3_command(main_cmd)
|
||||||
|
shell.run(cmd, callback)
|
||||||
|
end
|
||||||
|
|
||||||
|
--- Copy files
|
||||||
|
---@param src string
|
||||||
|
---@param dest string
|
||||||
|
---@param is_folder boolean
|
||||||
|
---@param callback fun(err: nil|string)
|
||||||
|
function M.cp(src, dest, is_folder, callback)
|
||||||
|
local main_cmd = { "cp", src, dest }
|
||||||
|
if is_folder then
|
||||||
|
table.insert(main_cmd, "--recursive")
|
||||||
|
end
|
||||||
|
local cmd = create_s3_command(main_cmd)
|
||||||
|
shell.run(cmd, callback)
|
||||||
|
end
|
||||||
|
|
||||||
|
return M
|
||||||
|
|
@ -107,6 +107,8 @@ local default_config = {
|
||||||
},
|
},
|
||||||
-- Extra arguments to pass to SCP when moving/copying files over SSH
|
-- Extra arguments to pass to SCP when moving/copying files over SSH
|
||||||
extra_scp_args = {},
|
extra_scp_args = {},
|
||||||
|
-- Extra arguments to pass to aws s3 when creating/deleting/moving/copying files using aws s3
|
||||||
|
extra_s3_args = {},
|
||||||
-- EXPERIMENTAL support for performing file operations with git
|
-- EXPERIMENTAL support for performing file operations with git
|
||||||
git = {
|
git = {
|
||||||
-- Return true to automatically git add/mv/rm files
|
-- Return true to automatically git add/mv/rm files
|
||||||
|
|
@ -204,9 +206,14 @@ local default_config = {
|
||||||
-- The adapter API hasn't really stabilized yet. We're not ready to advertise or encourage people to
|
-- The adapter API hasn't really stabilized yet. We're not ready to advertise or encourage people to
|
||||||
-- write their own adapters, and so there's no real reason to edit these config options. For that
|
-- write their own adapters, and so there's no real reason to edit these config options. For that
|
||||||
-- reason, I'm taking them out of the section above so they won't show up in the autogen docs.
|
-- reason, I'm taking them out of the section above so they won't show up in the autogen docs.
|
||||||
|
|
||||||
|
-- not "oil-s3://" on older neovim versions, since it doesn't open buffers correctly with a number
|
||||||
|
-- in the name
|
||||||
|
local oil_s3_string = vim.fn.has("nvim-0.12") == 1 and "oil-s3://" or "oil-sss://"
|
||||||
default_config.adapters = {
|
default_config.adapters = {
|
||||||
["oil://"] = "files",
|
["oil://"] = "files",
|
||||||
["oil-ssh://"] = "ssh",
|
["oil-ssh://"] = "ssh",
|
||||||
|
[oil_s3_string] = "s3",
|
||||||
["oil-trash://"] = "trash",
|
["oil-trash://"] = "trash",
|
||||||
}
|
}
|
||||||
default_config.adapter_aliases = {}
|
default_config.adapter_aliases = {}
|
||||||
|
|
@ -234,6 +241,7 @@ default_config.view_options.highlight_filename = nil
|
||||||
---@field use_default_keymaps boolean
|
---@field use_default_keymaps boolean
|
||||||
---@field view_options oil.ViewOptions
|
---@field view_options oil.ViewOptions
|
||||||
---@field extra_scp_args string[]
|
---@field extra_scp_args string[]
|
||||||
|
---@field extra_s3_args string[]
|
||||||
---@field git oil.GitOptions
|
---@field git oil.GitOptions
|
||||||
---@field float oil.FloatWindowConfig
|
---@field float oil.FloatWindowConfig
|
||||||
---@field preview_win oil.PreviewWindowConfig
|
---@field preview_win oil.PreviewWindowConfig
|
||||||
|
|
@ -262,6 +270,7 @@ local M = {}
|
||||||
---@field use_default_keymaps? boolean Set to false to disable all of the above keymaps
|
---@field use_default_keymaps? boolean Set to false to disable all of the above keymaps
|
||||||
---@field view_options? oil.SetupViewOptions Configure which files are shown and how they are shown.
|
---@field view_options? oil.SetupViewOptions Configure which files are shown and how they are shown.
|
||||||
---@field extra_scp_args? string[] Extra arguments to pass to SCP when moving/copying files over SSH
|
---@field extra_scp_args? string[] Extra arguments to pass to SCP when moving/copying files over SSH
|
||||||
|
---@field extra_s3_args? string[] Extra arguments to pass to aws s3 when moving/copying files using aws s3
|
||||||
---@field git? oil.SetupGitOptions EXPERIMENTAL support for performing file operations with git
|
---@field git? oil.SetupGitOptions EXPERIMENTAL support for performing file operations with git
|
||||||
---@field float? oil.SetupFloatWindowConfig Configuration for the floating window in oil.open_float
|
---@field float? oil.SetupFloatWindowConfig Configuration for the floating window in oil.open_float
|
||||||
---@field preview_win? oil.SetupPreviewWindowConfig Configuration for the file preview window
|
---@field preview_win? oil.SetupPreviewWindowConfig Configuration for the file preview window
|
||||||
|
|
|
||||||
|
|
@ -2,9 +2,9 @@ if exists("b:current_syntax")
|
||||||
finish
|
finish
|
||||||
endif
|
endif
|
||||||
|
|
||||||
syn match oilCreate /^CREATE /
|
syn match oilCreate /^CREATE\( BUCKET\)\? /
|
||||||
syn match oilMove /^ MOVE /
|
syn match oilMove /^ MOVE /
|
||||||
syn match oilDelete /^DELETE /
|
syn match oilDelete /^DELETE\( BUCKET\)\? /
|
||||||
syn match oilCopy /^ COPY /
|
syn match oilCopy /^ COPY /
|
||||||
syn match oilChange /^CHANGE /
|
syn match oilChange /^CHANGE /
|
||||||
" Trash operations
|
" Trash operations
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue