mirror of
https://github.com/L3MON4D3/LuaSnip
synced 2024-09-16 21:54:03 +02:00
feat: major overhaul of loaders. Check DOC.md-changes for the gist.
Previously, we could not * add files that were not present when `load/lazy_load` was called to the collection. This is pretty annoying if one wants to add project-local snippets, or snippets for a new filetype (ofc). * load collections whose directory/package.json(c) did not exist when `load` was called. This is also an annoyance when creating project-local snippets, since a re-`load()` is required for the snippets to be picked up. * pick up on changes to the snippet-files from another neovim-instance (due to reloading on BufWritePost) This patch fixes all of these by modularizing the loaders a bit more, into one component ("Collection") which takes care of all the logic of loading different files, and another ("fswatchers") which notify the collections when a file-change is detected. This allows, first of all, a better design where the first concern can be nullified, and secondly, us to use libuvs api for file-watching, to implement the last two (if a potentially non-existing collection should be loaded, we can use libuv to wait for the collection-root/manifest-file, and create the collection once that exists). Another cool addition is the loader-snippet-cache, which makes it so that the snippet files (for vscode and snipmate) are only loaded once for all filetypes, and not once for each filetype. That's probably not noticeable though, except if a collection with many extends/languages for one json-file is loaded :D
This commit is contained in:
parent
d0d6456fa8
commit
ce400352e6
19 changed files with 2536 additions and 776 deletions
19
DOC.md
19
DOC.md
|
@ -2544,12 +2544,28 @@ where `opts` can contain the following keys:
|
|||
- `snipmate`: similar to lua, but the directory has to be `"snippets"`.
|
||||
- `vscode`: any directory in `runtimepath` that contains a
|
||||
`package.json` contributing snippets.
|
||||
- `lazy_paths`: behaves essentially like `paths`, with two exceptions: if it is
|
||||
`nil`, it does not default to `runtimepath`, and the paths listed here do not
|
||||
need to exist, and will be loaded on creation.
|
||||
LuaSnip will do its best to determine the path that this should resolve to,
|
||||
but since the resolving we do is not very sophisticated it may produce
|
||||
incorrect paths. Definitely check the log if snippets are not loaded as
|
||||
expected.
|
||||
- `exclude`: List of languages to exclude, empty by default.
|
||||
- `include`: List of languages to include, includes everything by default.
|
||||
- `{override,default}_priority`: These keys are passed straight to the
|
||||
`add_snippets`-calls (documented in [API](#api)) and can therefore change the
|
||||
priority of snippets loaded from some colletion (or, in combination with
|
||||
`{in,ex}clude`, only some of its snippets).
|
||||
- `fs_event_providers`: `table<string, boolean>?`, specifies which mechanisms
|
||||
should be used to watch files for updates/creation.
|
||||
If `autocmd` is set to `true`, a `BufWritePost`-hook watches files of this
|
||||
collection, if `libuv` is set, the file-watcher-api exposed by libuv is used
|
||||
to watch for updates.
|
||||
Use `libuv` if you want snippets to update from other neovim-instances, and
|
||||
`autocmd` if the collection resides on a filesystem where the libuv-watchers
|
||||
may not work correctly. Or, of course, just enable both :D
|
||||
By default, only `autocmd` is enabled.
|
||||
|
||||
While `load` will immediately load the snippets, `lazy_load` will defer loading until
|
||||
the snippets are actually needed (whenever a new buffer is created, or the
|
||||
|
@ -2723,6 +2739,9 @@ If `scope` is not set, the snippet will be added to the global filetype (`all`).
|
|||
- `{override,default}_priority`: These keys are passed straight to the
|
||||
`add_snippets`-calls (documented in [API](#api)) and can be used to change
|
||||
the priority of the loaded snippets.
|
||||
- `lazy`: `boolean`, if it is set, the file does not have to exist when
|
||||
`load_standalone` is called, and it will be loaded on creation.
|
||||
`false` by default.
|
||||
|
||||
**Example**:
|
||||
`a.code-snippets`:
|
||||
|
|
|
@ -3,3 +3,6 @@
|
|||
---@class LuaSnip.MatchRegion 0-based region
|
||||
---@field row integer 0-based row
|
||||
---@field col_range { [1]: integer, [2]: integer } 0-based column range, from-in, to-exclusive
|
||||
|
||||
---@alias LuaSnip.Addable table
|
||||
---Anything that can be passed to ls.add_snippets().
|
||||
|
|
|
@ -1,77 +0,0 @@
|
|||
local Cache = {}
|
||||
|
||||
function Cache:clean()
|
||||
self.lazy_load_paths = {}
|
||||
self.ft_paths = {}
|
||||
self.path_snippets = {}
|
||||
-- We do not clean lazy_loaded_ft!!
|
||||
--
|
||||
-- It is preserved to accomodate a workflow where the luasnip-config
|
||||
-- begins with `ls.cleanup()`, which should make it completely reloadable.
|
||||
-- This would not be the case if lazy_loaded_ft was cleaned:
|
||||
-- the autocommands for loading lazy_load-snippets will not necessarily be
|
||||
-- triggered before the next expansion occurs, at which point the snippets
|
||||
-- should be available (but won't be, because the actual load wasn't
|
||||
-- triggered).
|
||||
-- As the list is not cleaned, the snippets will be loaded when
|
||||
-- `lazy_load()` is called (where a check for already-loaded filetypes is
|
||||
-- done explicitly).
|
||||
end
|
||||
|
||||
local function new_cache()
|
||||
-- returns the table the metatable was set on.
|
||||
return setmetatable({
|
||||
-- maps ft to list of files. Each file provides snippets for the given
|
||||
-- filetype.
|
||||
-- The file-paths are normalized.
|
||||
-- In snipmate:
|
||||
-- {
|
||||
-- lua = {"~/snippets/lua.snippets"},
|
||||
-- c = {"~/snippets/c.snippets", "/othersnippets/c.snippets"}
|
||||
-- }
|
||||
lazy_load_paths = {},
|
||||
|
||||
-- ft -> {true, nil}.
|
||||
-- Keep track of which filetypes were already lazy_loaded to prevent
|
||||
-- duplicates.
|
||||
--
|
||||
-- load "all" by default, makes no sense to include it again and again,
|
||||
-- just once on startup seems nicer.
|
||||
lazy_loaded_ft = { all = true },
|
||||
|
||||
-- key is file type, value are normalized!! paths of .snippets files.
|
||||
-- shall contain all files loaded by any loader.
|
||||
ft_paths = {},
|
||||
|
||||
-- key is _normalized!!!!_ file path, value are loader-specific.
|
||||
-- Might contain the snippets from the file, or the filetype(s) it
|
||||
-- contributes to.
|
||||
--
|
||||
-- for vscode:
|
||||
-- stores {
|
||||
-- snippets, -- the snippets provided by the file
|
||||
-- filetype_add_opts, -- add_opts for some filetype
|
||||
-- filetypes -- filetypes for which this file is active (important for
|
||||
-- reload).
|
||||
-- }
|
||||
path_snippets = {},
|
||||
}, {
|
||||
__index = Cache,
|
||||
})
|
||||
end
|
||||
|
||||
local M = {
|
||||
vscode_packages = new_cache(),
|
||||
vscode_standalone = new_cache(),
|
||||
snipmate = new_cache(),
|
||||
lua = new_cache(),
|
||||
}
|
||||
|
||||
function M.cleanup()
|
||||
M.vscode_packages:clean()
|
||||
M.vscode_standalone:clean()
|
||||
M.snipmate:clean()
|
||||
M.lua:clean()
|
||||
end
|
||||
|
||||
return M
|
27
lua/luasnip/loaders/data.lua
Normal file
27
lua/luasnip/loaders/data.lua
Normal file
|
@ -0,0 +1,27 @@
|
|||
--- This module stores all files loaded by any of the loaders, ordered by their
|
||||
--- filetype, and other data.
|
||||
--- This is to facilitate luasnip.loaders.edit_snippets, and to handle
|
||||
--- persistency of data, which is not given if it is stored in the module-file,
|
||||
--- since the module-name we use (luasnip.loaders.*) is not necessarily the one
|
||||
--- used by the user (luasnip/loader/*, for example), and the returned modules
|
||||
--- are different tables.
|
||||
|
||||
local autotable = require("luasnip.util.auto_table").autotable
|
||||
|
||||
local M = {
|
||||
lua_collections = {},
|
||||
lua_ft_paths = autotable(2),
|
||||
|
||||
snipmate_collections = {},
|
||||
snipmate_ft_paths = autotable(2),
|
||||
-- set by loader.
|
||||
snipmate_cache = nil,
|
||||
|
||||
vscode_package_collections = {},
|
||||
vscode_standalone_watchers = {},
|
||||
vscode_ft_paths = autotable(2),
|
||||
-- set by loader.
|
||||
vscode_cache = nil,
|
||||
}
|
||||
|
||||
return M
|
|
@ -21,13 +21,18 @@
|
|||
-- all files for some ft since add_opts might be different (they might be from
|
||||
-- different lazy_load-calls).
|
||||
|
||||
local cache = require("luasnip.loaders._caches").lua
|
||||
local path_mod = require("luasnip.util.path")
|
||||
local loader_util = require("luasnip.loaders.util")
|
||||
local ls = require("luasnip")
|
||||
local log = require("luasnip.util.log").new("lua-loader")
|
||||
local session = require("luasnip.session")
|
||||
local util = require("luasnip.util.util")
|
||||
local autotable = require("luasnip.util.auto_table").autotable
|
||||
local tree_watcher = require("luasnip.loaders.fs_watchers").tree
|
||||
local path_watcher = require("luasnip.loaders.fs_watchers").path
|
||||
local digraph = require("luasnip.util.directed_graph")
|
||||
local refresh_notify = require("luasnip.session.enqueueable_operations").refresh_notify
|
||||
local clean_invalidated = require("luasnip.session.enqueueable_operations").clean_invalidated
|
||||
|
||||
local Data = require("luasnip.loaders.data")
|
||||
|
||||
local M = {}
|
||||
|
||||
|
@ -44,20 +49,20 @@ local function get_loaded_file_debuginfo()
|
|||
-- ... (here anything is going on, could be 0 stackframes, could be many)
|
||||
-- n-2 (at least 3) is the loaded file
|
||||
-- n-1 (at least 4) is pcall
|
||||
-- n (at least 5) is _luasnip_load_files
|
||||
-- n (at least 5) is _luasnip_load_file
|
||||
local current_call_depth = 4
|
||||
local debuginfo
|
||||
|
||||
repeat
|
||||
current_call_depth = current_call_depth + 1
|
||||
debuginfo = debug.getinfo(current_call_depth, "n")
|
||||
until debuginfo.name == "_luasnip_load_files"
|
||||
until debuginfo.name == "_luasnip_load_file"
|
||||
|
||||
-- ret is stored into a local, and not returned immediately to prevent tail
|
||||
-- call optimization, which seems to invalidate the stackframe-numbers
|
||||
-- determined earlier.
|
||||
--
|
||||
-- current_call_depth-0 is _luasnip_load_files,
|
||||
-- current_call_depth-0 is _luasnip_load_file,
|
||||
-- current_call_depth-1 is pcall, and
|
||||
-- current_call_depth-2 is the lua-loaded file.
|
||||
-- "Sl": get only source-file and current line.
|
||||
|
@ -65,211 +70,328 @@ local function get_loaded_file_debuginfo()
|
|||
return ret
|
||||
end
|
||||
|
||||
local function _luasnip_load_files(ft, files, add_opts)
|
||||
for _, file in ipairs(files) do
|
||||
local function search_lua_rtp(modulename)
|
||||
-- essentially stolen from vim.loader.
|
||||
local rtp_lua_path = package.path
|
||||
for _, path in ipairs(vim.api.nvim_get_runtime_file("", true)) do
|
||||
rtp_lua_path = rtp_lua_path .. (";%s/lua/?.lua;%s/lua/?/init.lua"):format(path, path)
|
||||
end
|
||||
|
||||
return package.searchpath(modulename, rtp_lua_path)
|
||||
end
|
||||
|
||||
local function _luasnip_load_file(file)
|
||||
-- vim.loader.enabled does not seem to be official api, so always reset
|
||||
-- if the loader is available.
|
||||
-- To be sure, even pcall it, in case there are conditions under which
|
||||
-- it might error.
|
||||
if vim.loader then
|
||||
-- pcall, not sure if this can fail in some way..
|
||||
-- Does not seem like it though
|
||||
local ok, res = pcall(vim.loader.reset, file)
|
||||
if not ok then
|
||||
log.warn("Could not reset cache for file %s\n: %s", file, res)
|
||||
end
|
||||
if vim.loader then
|
||||
-- pcall, not sure if this can fail in some way..
|
||||
-- Does not seem like it though
|
||||
local ok, res = pcall(vim.loader.reset, file)
|
||||
if not ok then
|
||||
log.warn("Could not reset cache for file %s\n: %s", file, res)
|
||||
end
|
||||
|
||||
local func, error_msg = loadfile(file)
|
||||
if error_msg then
|
||||
log.error("Failed to load %s\n: %s", file, error_msg)
|
||||
error(string.format("Failed to load %s\n: %s", file, error_msg))
|
||||
end
|
||||
|
||||
-- the loaded file may add snippets to these tables, they'll be
|
||||
-- combined with the snippets returned regularly.
|
||||
local file_added_snippets = {}
|
||||
local file_added_autosnippets = {}
|
||||
|
||||
-- setup snip_env in func
|
||||
local func_env = vim.tbl_extend(
|
||||
"force",
|
||||
-- extend the current(expected!) globals with the snip_env, and the
|
||||
-- two tables.
|
||||
_G,
|
||||
ls.get_snip_env(),
|
||||
{
|
||||
ls_file_snippets = file_added_snippets,
|
||||
ls_file_autosnippets = file_added_autosnippets,
|
||||
}
|
||||
)
|
||||
-- defaults snip-env requires metatable for resolving
|
||||
-- lazily-initialized keys. If we have to combine this with an eventual
|
||||
-- metatable of _G, look into unifying ls.setup_snip_env and this.
|
||||
setmetatable(func_env, getmetatable(ls.get_snip_env()))
|
||||
setfenv(func, func_env)
|
||||
|
||||
-- Since this function has to reach the snippet-constructor, and fenvs
|
||||
-- aren't inherited by called functions, we have to set it in the global
|
||||
-- environment.
|
||||
_G.__luasnip_get_loaded_file_frame_debuginfo = util.ternary(
|
||||
session.config.loaders_store_source,
|
||||
get_loaded_file_debuginfo,
|
||||
nil
|
||||
)
|
||||
local run_ok, file_snippets, file_autosnippets = pcall(func)
|
||||
-- immediately nil it.
|
||||
_G.__luasnip_get_loaded_file_frame_debuginfo = nil
|
||||
|
||||
if not run_ok then
|
||||
log.error("Failed to execute\n: %s", file, file_snippets)
|
||||
error("Failed to execute " .. file .. "\n: " .. file_snippets)
|
||||
end
|
||||
|
||||
-- make sure these aren't nil.
|
||||
file_snippets = file_snippets or {}
|
||||
file_autosnippets = file_autosnippets or {}
|
||||
|
||||
vim.list_extend(file_snippets, file_added_snippets)
|
||||
vim.list_extend(file_autosnippets, file_added_autosnippets)
|
||||
|
||||
-- keep track of snippet-source.
|
||||
cache.path_snippets[file] = {
|
||||
add_opts = add_opts,
|
||||
ft = ft,
|
||||
}
|
||||
|
||||
ls.add_snippets(
|
||||
ft,
|
||||
file_snippets,
|
||||
vim.tbl_extend("keep", {
|
||||
type = "snippets",
|
||||
key = "__snippets_" .. file,
|
||||
-- prevent refresh here, will be done outside loop.
|
||||
refresh_notify = false,
|
||||
}, add_opts)
|
||||
)
|
||||
ls.add_snippets(
|
||||
ft,
|
||||
file_autosnippets,
|
||||
vim.tbl_extend("keep", {
|
||||
type = "autosnippets",
|
||||
key = "__autosnippets_" .. file,
|
||||
-- prevent refresh here, will be done outside loop.
|
||||
refresh_notify = false,
|
||||
}, add_opts)
|
||||
)
|
||||
log.info(
|
||||
"Adding %s snippets and %s autosnippets from %s to ft `%s`",
|
||||
#file_snippets,
|
||||
#file_autosnippets,
|
||||
file,
|
||||
ft
|
||||
)
|
||||
end
|
||||
|
||||
ls.refresh_notify(ft)
|
||||
local func, error_msg = loadfile(file)
|
||||
if error_msg then
|
||||
log.error("Failed to load %s\n: %s", file, error_msg)
|
||||
error(string.format("Failed to load %s\n: %s", file, error_msg))
|
||||
end
|
||||
|
||||
-- the loaded file may add snippets to these tables, they'll be
|
||||
-- combined with the snippets returned regularly.
|
||||
local file_added_snippets = {}
|
||||
local file_added_autosnippets = {}
|
||||
|
||||
local dependent_files = {}
|
||||
|
||||
-- setup snip_env in func
|
||||
local func_env
|
||||
local function ls_tracked_dofile(filename)
|
||||
local package_func, err_msg = loadfile(filename)
|
||||
if package_func then
|
||||
setfenv(package_func, func_env)
|
||||
table.insert(dependent_files, filename)
|
||||
else
|
||||
error(("File %s could not be loaded: %s"):format(filename, err_msg))
|
||||
end
|
||||
|
||||
return package_func()
|
||||
end
|
||||
func_env = vim.tbl_extend(
|
||||
"force",
|
||||
-- extend the current(expected!) globals with the snip_env, and the
|
||||
-- two tables.
|
||||
_G,
|
||||
session.get_snip_env(),
|
||||
{
|
||||
ls_file_snippets = file_added_snippets,
|
||||
ls_file_autosnippets = file_added_autosnippets,
|
||||
ls_tracked_dofile = ls_tracked_dofile,
|
||||
ls_tracked_dopackage = function(package_name)
|
||||
local package_file = search_lua_rtp(package_name)
|
||||
if not package_file then
|
||||
error(("Could not find package %s in rtp and package.path"):format(package_name))
|
||||
end
|
||||
return ls_tracked_dofile(package_file)
|
||||
end
|
||||
}
|
||||
)
|
||||
-- defaults snip-env requires metatable for resolving
|
||||
-- lazily-initialized keys. If we have to combine this with an eventual
|
||||
-- metatable of _G, look into unifying ls.setup_snip_env and this.
|
||||
setmetatable(func_env, getmetatable(session.get_snip_env()))
|
||||
setfenv(func, func_env)
|
||||
|
||||
-- Since this function has to reach the snippet-constructor, and fenvs
|
||||
-- aren't inherited by called functions, we have to set it in the global
|
||||
-- environment.
|
||||
_G.__luasnip_get_loaded_file_frame_debuginfo = util.ternary(
|
||||
session.config.loaders_store_source,
|
||||
get_loaded_file_debuginfo,
|
||||
nil
|
||||
)
|
||||
local run_ok, file_snippets, file_autosnippets = pcall(func)
|
||||
-- immediately nil it.
|
||||
_G.__luasnip_get_loaded_file_frame_debuginfo = nil
|
||||
|
||||
if not run_ok then
|
||||
log.error("Failed to execute\n: %s", file, file_snippets)
|
||||
error("Failed to execute " .. file .. "\n: " .. file_snippets)
|
||||
end
|
||||
|
||||
-- make sure these aren't nil.
|
||||
file_snippets = file_snippets or {}
|
||||
file_autosnippets = file_autosnippets or {}
|
||||
|
||||
vim.list_extend(file_snippets, file_added_snippets)
|
||||
vim.list_extend(file_autosnippets, file_added_autosnippets)
|
||||
|
||||
return file_snippets, file_autosnippets, dependent_files
|
||||
end
|
||||
|
||||
local function lua_package_file_filter(fname)
|
||||
return fname:match("%.lua$")
|
||||
end
|
||||
|
||||
--- Collection watches all files that belong to a collection of snippets below
|
||||
--- some root, and registers new files.
|
||||
local Collection = {}
|
||||
local Collection_mt = {
|
||||
__index = Collection
|
||||
}
|
||||
|
||||
function Collection.new(root, lazy, include_ft, exclude_ft, add_opts, lazy_watcher, fs_event_providers)
|
||||
local ft_filter = loader_util.ft_filter(include_ft, exclude_ft)
|
||||
local o = setmetatable({
|
||||
root = root,
|
||||
file_filter = function(path)
|
||||
if not path:sub(1, #root) == root then
|
||||
log.warn("Tried to filter file `%s`, which is not inside the root `%s`.", path, root)
|
||||
return false
|
||||
end
|
||||
return lua_package_file_filter(path) and ft_filter(path)
|
||||
end,
|
||||
add_opts = add_opts,
|
||||
lazy = lazy,
|
||||
-- store ft -> set of files that should be lazy-loaded.
|
||||
lazy_files = autotable(2, {warn = false}),
|
||||
-- store, for all files in this collection, their filetype.
|
||||
-- No need to always recompute it, and we can use this to store which
|
||||
-- files belong to the collection.
|
||||
loaded_path_ft = {},
|
||||
file_dependencies = digraph.new_labeled(),
|
||||
-- store fs_watchers for files the snippets-files depend on.
|
||||
dependency_watchers = {},
|
||||
fs_event_providers = fs_event_providers
|
||||
}, Collection_mt)
|
||||
|
||||
-- only register files up to a depth of 2.
|
||||
local ok, err_or_watcher = pcall(tree_watcher, root, 2, {
|
||||
-- don't handle removals for now.
|
||||
new_file = function(path)
|
||||
-- detected new file, make sure it is allowed by our filters.
|
||||
if o.file_filter(path) then
|
||||
o:add_file(path, loader_util.collection_file_ft(o.root, path))
|
||||
end
|
||||
end,
|
||||
change_file = function(path)
|
||||
o:reload(path)
|
||||
end
|
||||
}, {lazy = lazy_watcher, fs_event_providers = fs_event_providers})
|
||||
|
||||
if not ok then
|
||||
error(("Could not create watcher: %s"):format(err_or_watcher))
|
||||
end
|
||||
|
||||
o.watcher = err_or_watcher
|
||||
|
||||
log.info("Initialized snippet-collection at `%s`", root)
|
||||
|
||||
return o
|
||||
end
|
||||
|
||||
-- Add file with some filetype to collection.
|
||||
function Collection:add_file(path, ft)
|
||||
Data.lua_ft_paths[ft][path] = true
|
||||
|
||||
if self.lazy then
|
||||
if not session.loaded_fts[ft] then
|
||||
log.info("Registering lazy-load-snippets for ft `%s` from file `%s`", ft, path)
|
||||
|
||||
-- only register to load later.
|
||||
self.lazy_files[ft][path] = true
|
||||
return
|
||||
else
|
||||
log.info(
|
||||
"Filetype `%s` is already active, loading immediately.",
|
||||
ft
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
self:load_file(path, ft)
|
||||
end
|
||||
function Collection:load_file(path, ft)
|
||||
log.info(
|
||||
"Adding snippets for filetype `%s` from file `%s`",
|
||||
ft,
|
||||
path
|
||||
)
|
||||
self.loaded_path_ft[path] = ft
|
||||
|
||||
local snippets, autosnippets, dependent_files = _luasnip_load_file(path)
|
||||
|
||||
-- ignored if it already exists.
|
||||
self.file_dependencies:set_vertex(path)
|
||||
-- make sure we don't retain any old dependencies.
|
||||
self.file_dependencies:clear_edges(path)
|
||||
|
||||
for _, file_dependency in ipairs(dependent_files) do
|
||||
-- ignored if it already exists.
|
||||
self.file_dependencies:set_vertex(file_dependency)
|
||||
-- path depends on dependent_file => if dependent_file is changed, path
|
||||
-- should be updated.
|
||||
self.file_dependencies:set_edge(file_dependency, path, path)
|
||||
|
||||
if not self.dependency_watchers[file_dependency] then
|
||||
self.dependency_watchers[file_dependency] = path_watcher(file_dependency, {
|
||||
change = function(_)
|
||||
local depending_files = self.file_dependencies:connected_component(file_dependency, "Forward")
|
||||
for _, file in ipairs(depending_files) do
|
||||
-- Prevent loading one of the utility-files as a snippet-file.
|
||||
-- This will not reject any snippet-file in
|
||||
-- depending_files. This is because since they are in
|
||||
-- depending_files, we have their dependency-information,
|
||||
-- which can only be obtained by loading them, and so there
|
||||
-- can't be any unloaded files in there.
|
||||
if self.loaded_path_ft[file] then
|
||||
self:load_file(file, self.loaded_path_ft[file])
|
||||
end
|
||||
end
|
||||
end
|
||||
}, {lazy = false, fs_event_providers = self.fs_event_providers})
|
||||
end
|
||||
end
|
||||
|
||||
loader_util.add_file_snippets(ft, path, snippets, autosnippets, self.add_opts)
|
||||
|
||||
refresh_notify(ft)
|
||||
end
|
||||
function Collection:do_lazy_load(ft)
|
||||
for file, _ in pairs(self.lazy_files[ft]) do
|
||||
if not self.loaded_path_ft[file] then
|
||||
self:load_file(file, ft)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- will only do something, if the file at `path` was loaded previously.
|
||||
function Collection:reload(path)
|
||||
local path_ft = self.loaded_path_ft[path]
|
||||
if not path_ft then
|
||||
-- file not yet loaded.
|
||||
return
|
||||
end
|
||||
|
||||
-- will override previously-loaded snippets from this path.
|
||||
self:load_file(path, path_ft)
|
||||
|
||||
-- clean snippets if enough were removed.
|
||||
clean_invalidated()
|
||||
end
|
||||
|
||||
function Collection:stop()
|
||||
self.watcher:stop()
|
||||
for _, watcher in pairs(self.dependency_watchers) do
|
||||
watcher:stop()
|
||||
end
|
||||
end
|
||||
|
||||
function M._load_lazy_loaded_ft(ft)
|
||||
for _, load_call_paths in ipairs(cache.lazy_load_paths) do
|
||||
_luasnip_load_files(
|
||||
ft,
|
||||
load_call_paths[ft] or {},
|
||||
load_call_paths.add_opts
|
||||
)
|
||||
log.info("Loading lazy-load-snippets for filetype `%s`", ft)
|
||||
|
||||
for _, collection in ipairs(Data.lua_collections) do
|
||||
collection:do_lazy_load(ft)
|
||||
end
|
||||
end
|
||||
|
||||
function M._load_lazy_loaded(bufnr)
|
||||
local fts = loader_util.get_load_fts(bufnr)
|
||||
local function _load(lazy, opts)
|
||||
local o = loader_util.normalize_opts(opts)
|
||||
|
||||
for _, ft in ipairs(fts) do
|
||||
if not cache.lazy_loaded_ft[ft] then
|
||||
log.info("Loading lazy-load-snippets for filetype `%s`", ft)
|
||||
M._load_lazy_loaded_ft(ft)
|
||||
cache.lazy_loaded_ft[ft] = true
|
||||
end
|
||||
local collection_roots = loader_util.resolve_root_paths(o.paths, "luasnippets")
|
||||
local lazy_roots = loader_util.resolve_lazy_root_paths(o.lazy_paths)
|
||||
|
||||
log.info("Found roots `%s` for paths `%s`.", vim.inspect(collection_roots), vim.inspect(o.paths))
|
||||
if o.paths and #o.paths ~= #collection_roots then
|
||||
log.warn("Could not resolve all collection-roots for paths `%s`: only found `%s`", vim.inspect(o.paths), vim.inspect(collection_roots))
|
||||
end
|
||||
end
|
||||
|
||||
function M.load(opts)
|
||||
opts = opts or {}
|
||||
|
||||
local add_opts = loader_util.add_opts(opts)
|
||||
|
||||
local collections =
|
||||
loader_util.get_load_paths_snipmate_like(opts, "luasnippets", "lua")
|
||||
for _, collection in ipairs(collections) do
|
||||
local load_paths = collection.load_paths
|
||||
log.info("Loading snippet-collection:\n%s", vim.inspect(load_paths))
|
||||
|
||||
-- also add files from collection to cache (collection of all loaded
|
||||
-- files by filetype, useful for editing files for some filetype).
|
||||
loader_util.extend_ft_paths(cache.ft_paths, load_paths)
|
||||
|
||||
for ft, files in pairs(load_paths) do
|
||||
_luasnip_load_files(ft, files, add_opts)
|
||||
end
|
||||
log.info("Determined roots `%s` for lazy_paths `%s`.", vim.inspect(lazy_roots), vim.inspect(o.lazy_paths))
|
||||
if o.lazy_paths and #o.lazy_paths ~= #lazy_roots then
|
||||
log.warn("Could not resolve all collection-roots for lazy_paths `%s`: only found `%s`", vim.inspect(o.lazy_paths), vim.inspect(lazy_roots))
|
||||
end
|
||||
end
|
||||
|
||||
function M.lazy_load(opts)
|
||||
opts = opts or {}
|
||||
for paths_lazy, roots in pairs({[true] = lazy_roots, [false] = collection_roots}) do
|
||||
for _, collection_root in ipairs(roots) do
|
||||
local ok, coll_or_err = pcall(Collection.new, collection_root, lazy, o.include, o.exclude, o.add_opts, paths_lazy, o.fs_event_providers)
|
||||
|
||||
local add_opts = loader_util.add_opts(opts)
|
||||
|
||||
local collections =
|
||||
loader_util.get_load_paths_snipmate_like(opts, "luasnippets", "lua")
|
||||
for _, collection in ipairs(collections) do
|
||||
local load_paths = collection.load_paths
|
||||
|
||||
loader_util.extend_ft_paths(cache.ft_paths, load_paths)
|
||||
|
||||
for ft, files in pairs(load_paths) do
|
||||
if cache.lazy_loaded_ft[ft] then
|
||||
-- instantly load snippets if they were already loaded...
|
||||
log.info(
|
||||
"Immediately loading lazy-load-snippets for already-active filetype `%s` from files:\n%s",
|
||||
ft,
|
||||
vim.inspect(files)
|
||||
)
|
||||
_luasnip_load_files(ft, files, add_opts)
|
||||
|
||||
-- don't load these files again.
|
||||
load_paths[ft] = nil
|
||||
if not ok then
|
||||
log.error("Could not create collection at %s: %s", collection_root, coll_or_err)
|
||||
else
|
||||
table.insert(Data.lua_collections, coll_or_err)
|
||||
end
|
||||
end
|
||||
|
||||
log.info("Registering lazy-load-snippets:\n%s", vim.inspect(load_paths))
|
||||
|
||||
load_paths.add_opts = add_opts
|
||||
table.insert(cache.lazy_load_paths, load_paths)
|
||||
end
|
||||
end
|
||||
|
||||
--- Load lua-snippet-collections immediately.
|
||||
--- @param opts LuaSnip.Loaders.LoadOpts?
|
||||
function M.load(opts)
|
||||
_load(false, opts)
|
||||
end
|
||||
|
||||
--- Load lua-snippet-collections on demand.
|
||||
--- @param opts LuaSnip.Loaders.LoadOpts?
|
||||
function M.lazy_load(opts)
|
||||
_load(true, opts)
|
||||
|
||||
-- load for current buffer on startup.
|
||||
M._load_lazy_loaded(vim.api.nvim_get_current_buf())
|
||||
end
|
||||
|
||||
-- Make sure filename is normalized
|
||||
function M._reload_file(filename)
|
||||
local file_cache = cache.path_snippets[filename]
|
||||
-- only clear and load(!!! snippets may not actually be loaded, lazy_load)
|
||||
-- if the snippets were really loaded.
|
||||
-- normally file_cache should exist if the autocommand was registered, just
|
||||
-- be safe here.
|
||||
if file_cache then
|
||||
local add_opts = file_cache.add_opts
|
||||
local ft = file_cache.ft
|
||||
|
||||
log.info("Re-loading snippets contributed by %s", filename)
|
||||
_luasnip_load_files(ft, { filename }, add_opts)
|
||||
ls.clean_invalidated({ inv_limit = 100 })
|
||||
for _, ft in ipairs(loader_util.get_load_fts(vim.api.nvim_get_current_buf())) do
|
||||
M._load_lazy_loaded_ft(ft)
|
||||
end
|
||||
end
|
||||
|
||||
function M.edit_snippet_files()
|
||||
loader_util.edit_snippet_files(cache.ft_paths)
|
||||
function M.clean()
|
||||
for _, collection in ipairs(Data.lua_collections) do
|
||||
collection:stop()
|
||||
end
|
||||
-- bit ugly, keep in sync with defaults in data.lua.
|
||||
-- Don't anticipate those changing, so fine I guess.
|
||||
Data.lua_collections = {}
|
||||
Data.lua_ft_paths = autotable(2)
|
||||
end
|
||||
|
||||
return M
|
||||
|
|
|
@ -1,15 +1,36 @@
|
|||
local ls = require("luasnip")
|
||||
local cache = require("luasnip.loaders._caches").snipmate
|
||||
local loader_util = require("luasnip.loaders.util")
|
||||
local util = require("luasnip.util.util")
|
||||
local tbl_util = require("luasnip.util.table")
|
||||
local Path = require("luasnip.util.path")
|
||||
local autotable = require("luasnip.util.auto_table").autotable
|
||||
local digraph = require("luasnip.util.directed_graph")
|
||||
local tree_watcher = require("luasnip.loaders.fs_watchers").tree
|
||||
local Data = require("luasnip.loaders.data")
|
||||
local session = require("luasnip.session")
|
||||
local snippetcache = require("luasnip.loaders.snippet_cache")
|
||||
local refresh_notify = require("luasnip.session.enqueueable_operations").refresh_notify
|
||||
local clean_invalidated = require("luasnip.session.enqueueable_operations").clean_invalidated
|
||||
|
||||
local log = require("luasnip.util.log").new("snipmate-loader")
|
||||
|
||||
local function parse_snipmate(buffer, filename)
|
||||
--- Load data from any snippet-file.
|
||||
--- @param filename string
|
||||
--- @return LuaSnip.Loaders.SnippetFileData
|
||||
local function load_snipmate(filename)
|
||||
local buffer_ok, buffer = pcall(Path.read_file, filename)
|
||||
if not buffer_ok then
|
||||
log.error(("Could not read file %s: %s"):format(filename, buffer))
|
||||
-- return dummy-data.
|
||||
return {
|
||||
snippets = {},
|
||||
autosnippets = {},
|
||||
misc = {}
|
||||
}
|
||||
end
|
||||
|
||||
local sp = require("luasnip.nodes.snippetProxy")
|
||||
local snipmate_parse_fn = require("luasnip.util.parser").parse_snipmate
|
||||
local source = require("luasnip.session.snippet_collection.source")
|
||||
local session = require("luasnip.session")
|
||||
|
||||
-- could also be separate variables, but easier to access this way.
|
||||
local snippets = {
|
||||
|
@ -85,7 +106,7 @@ local function parse_snipmate(buffer, filename)
|
|||
_parse("autosnippet", snipmate_opts)
|
||||
snipmate_opts = {}
|
||||
elseif vim.startswith(line, "extends") then
|
||||
extends = vim.split(vim.trim(line:sub(8)), "[,%s]+")
|
||||
vim.list_extend(extends, vim.split(vim.trim(line:sub(8)), "[,%s]+"))
|
||||
i = i + 1
|
||||
elseif vim.startswith(line, "#") or line:find("^%s*$") then
|
||||
-- comment and blank line
|
||||
|
@ -99,197 +120,315 @@ local function parse_snipmate(buffer, filename)
|
|||
end
|
||||
end
|
||||
|
||||
return snippets.snippet, snippets.autosnippet, extends
|
||||
return {
|
||||
snippets = snippets.snippet,
|
||||
autosnippets = snippets.autosnippet,
|
||||
misc = extends
|
||||
}
|
||||
end
|
||||
|
||||
local function load_snippet_files(add_ft, paths, collection_files, add_opts)
|
||||
for _, path in ipairs(paths) do
|
||||
if not Path.exists(path) then
|
||||
return
|
||||
end
|
||||
-- cache snippets without filetype-association for reuse.
|
||||
Data.snipmate_cache = snippetcache.new(load_snipmate)
|
||||
|
||||
local snippet, autosnippet, extends
|
||||
--- Collection watches all files that belong to a collection of snippets below
|
||||
--- some root, and registers new files.
|
||||
local Collection = {}
|
||||
local Collection_mt = {
|
||||
__index = Collection
|
||||
}
|
||||
|
||||
if cache.path_snippets[path] then
|
||||
snippet = vim.deepcopy(cache.path_snippets[path].snippet)
|
||||
autosnippet = vim.deepcopy(cache.path_snippets[path].autosnippet)
|
||||
extends = cache.path_snippets[path].extends
|
||||
cache.path_snippets[path].fts[add_ft] = true
|
||||
else
|
||||
local buffer_ok, buffer = pcall(Path.read_file, path)
|
||||
if buffer_ok then
|
||||
snippet, autosnippet, extends = parse_snipmate(buffer, path)
|
||||
cache.path_snippets[path] = {
|
||||
snippet = vim.deepcopy(snippet),
|
||||
autosnippet = vim.deepcopy(autosnippet),
|
||||
extends = extends,
|
||||
-- store for reload.
|
||||
add_opts = add_opts,
|
||||
fts = { [add_ft] = true },
|
||||
}
|
||||
else
|
||||
log.error(("Could not read file %s: %s"):format(path, buffer))
|
||||
local function snipmate_package_file_filter(fname)
|
||||
return fname:match("%.snippets$")
|
||||
end
|
||||
|
||||
function Collection.new(root, lazy, include_ft, exclude_ft, add_opts, lazy_watcher, fs_event_providers)
|
||||
local ft_filter = loader_util.ft_filter(include_ft, exclude_ft)
|
||||
local o = setmetatable({
|
||||
root = root,
|
||||
|
||||
--- @alias LuaSnip.Loaders.Snipmate.FileCategory
|
||||
--- | '"collection"' File only belongs to the collection
|
||||
--- | '"load"' File should be loaded
|
||||
|
||||
--- Determine whether a file should be loaded, belongs to the
|
||||
--- collection, or doesn't.
|
||||
--- This distinction is important because we need to know about all
|
||||
--- files to correctly resolve `extend <someft>`, but only want to load
|
||||
--- the filetypes allowed by in/exclude.
|
||||
--- @param path string
|
||||
---@return LuaSnip.Loaders.Snipmate.FileCategory?
|
||||
categorize_file = function(path)
|
||||
if not path:sub(1, #root) == root then
|
||||
log.warn("Tried to filter file `%s`, which is not inside the root `%s`.", path, root)
|
||||
return nil
|
||||
end
|
||||
if snipmate_package_file_filter(path) then
|
||||
if ft_filter(path) then
|
||||
return "load"
|
||||
end
|
||||
return "collection"
|
||||
end
|
||||
return nil
|
||||
end,
|
||||
|
||||
add_opts = add_opts,
|
||||
lazy = lazy,
|
||||
-- store ft -> set of files that should be lazy-loaded.
|
||||
lazy_files = autotable(2, {warn = false}),
|
||||
-- store for each path the set of filetypes it has been loaded with.
|
||||
loaded_path_fts = autotable(2, {warn = false}),
|
||||
-- model filetype-extensions (`extends <someft>` in `ft.snippets`).
|
||||
-- Better than a flat table with t[ft] = {someft=true, somotherft=true}
|
||||
-- since transitive dependencies are easier to understand/query.
|
||||
-- There is an edge with source src to destination dst, if snippets for
|
||||
-- filetype src also contribute to filetype dst.
|
||||
-- Since we respect transitive `extends`, we can get all filetypes a
|
||||
-- snippet-file for some filetype A contributes to by querying the
|
||||
-- connected component of A (all filetype-vertices reachable from A).
|
||||
ft_extensions = digraph.new_labeled(),
|
||||
-- store all files in the collection, by their filetype.
|
||||
-- This information is necessary to handle `extends` even for files
|
||||
-- that are not actually loaded (due to in/exclude).
|
||||
collection_files_by_ft = autotable(2, {warn = false}),
|
||||
-- set if creation successful.
|
||||
watcher = nil,
|
||||
}, Collection_mt)
|
||||
|
||||
-- only register files up to a depth of 2.
|
||||
local ok, err_or_watcher = pcall(tree_watcher, root, 2, {
|
||||
-- don't handle removals for now.
|
||||
new_file = function(path)
|
||||
---@as LuaSnip.Loaders.Snipmate.FileCategory
|
||||
local file_category = o.categorize_file(path)
|
||||
|
||||
if file_category then
|
||||
-- know it's at least in the collection -> can register it.
|
||||
local file_ft = loader_util.collection_file_ft(o.root, path)
|
||||
o:register_file(path, file_ft)
|
||||
if file_category == "load" then
|
||||
-- actually load if allowed by in/exclude.
|
||||
o:add_file(path, file_ft)
|
||||
end
|
||||
end
|
||||
end,
|
||||
change_file = function(path)
|
||||
vim.schedule_wrap(function()
|
||||
o:reload(path)
|
||||
end)()
|
||||
end
|
||||
}, {lazy = lazy_watcher, fs_event_providers = fs_event_providers})
|
||||
|
||||
ls.add_snippets(
|
||||
add_ft,
|
||||
snippet,
|
||||
vim.tbl_extend("keep", {
|
||||
type = "snippets",
|
||||
-- key has to include the filetype since one file may be loaded in
|
||||
-- multiple filetypes (via `extends`).
|
||||
key = string.format("__%s_snippets_%s", add_ft, path),
|
||||
}, add_opts)
|
||||
)
|
||||
ls.add_snippets(
|
||||
add_ft,
|
||||
autosnippet,
|
||||
vim.tbl_extend("keep", {
|
||||
type = "autosnippets",
|
||||
key = string.format("__%s_autosnippets_%s", add_ft, path),
|
||||
}, add_opts)
|
||||
)
|
||||
log.info(
|
||||
"Adding %s snippets and %s autosnippets for filetype `%s` from %s",
|
||||
#snippet,
|
||||
#autosnippet,
|
||||
add_ft,
|
||||
path
|
||||
)
|
||||
if not ok then
|
||||
error(("Could not create watcher: %s"):format(err_or_watcher))
|
||||
end
|
||||
|
||||
for _, ft in ipairs(extends) do
|
||||
load_snippet_files(
|
||||
add_ft,
|
||||
-- "or {}" because the ft might (if the extended filetype is not
|
||||
-- actually present in the collection) be nil.
|
||||
collection_files[ft] or {},
|
||||
collection_files,
|
||||
add_opts
|
||||
o.watcher = err_or_watcher
|
||||
|
||||
log.info("Initialized snippet-collection at `%s`", root)
|
||||
|
||||
return o
|
||||
end
|
||||
|
||||
--- Makes the file known to the collection, but does not load its snippets.
|
||||
--- This is important because `extends` may require loading a file excluded by
|
||||
--- `file_filter`, ie `include` and `exclude`.
|
||||
--- @param path string
|
||||
--- @param ft string
|
||||
function Collection:register_file(path, ft)
|
||||
self.collection_files_by_ft[ft][path] = true
|
||||
end
|
||||
|
||||
--- Register a file-filetype-association with the collection.
|
||||
--- @param path string Path to a file that belongs to this collection.
|
||||
--- @param add_ft string The original filetype this file should be added as.
|
||||
--- Since we have to support filetype-extensions, this may
|
||||
--- add the snippets of the file to several other
|
||||
--- filetypes.
|
||||
function Collection:add_file(path, add_ft)
|
||||
-- register known file.
|
||||
Data.snipmate_ft_paths[add_ft][path] = true
|
||||
|
||||
if self.lazy then
|
||||
if not session.loaded_fts[add_ft] then
|
||||
log.info("Registering lazy-load-snippets for ft `%s` from file `%s`", add_ft, path)
|
||||
|
||||
-- only register to load later.
|
||||
self.lazy_files[add_ft][path] = true
|
||||
return
|
||||
else
|
||||
log.info(
|
||||
"Filetype `%s` is already active, loading immediately.",
|
||||
add_ft
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
-- extended filetypes will be loaded in load_file.
|
||||
self:load_file(path, add_ft, "SkipIfLoaded")
|
||||
end
|
||||
|
||||
--- @alias LuaSnip.Loaders.Snipmate.SkipLoad
|
||||
--- | '"ForceLoad"' Always load, even if it was already loaded.
|
||||
--- | '"SkipIfLoaded"' Skip the load if the file has been loaded already.
|
||||
|
||||
-- loads the fts that extend load_ft as well.
|
||||
-- skip_load_mode allows this code to both prevent unnecessary loads (which
|
||||
-- could be caused if some file is added to the same filetype more than once),
|
||||
-- while still handling reload (where the files has to be loaded again for
|
||||
-- every filetype, even if it already is loaded (since it may have different
|
||||
-- snippets))
|
||||
function Collection:load_file(path, ft, skip_load_mode)
|
||||
if skip_load_mode == "SkipIfLoaded" and self.loaded_path_fts[path][ft] then
|
||||
return
|
||||
end
|
||||
|
||||
log.info(
|
||||
"Adding snippets for filetype `%s` from file `%s`",
|
||||
ft,
|
||||
path
|
||||
)
|
||||
|
||||
-- Set here to skip loads triggered for the same path-file-combination in
|
||||
-- subsequent code, which would trigger and endless loop.
|
||||
self.loaded_path_fts[path][ft] = true
|
||||
|
||||
-- this may already be set, but setting again here ensures that a file is
|
||||
-- certainly associated with each filetype it's loaded for. (for example,
|
||||
-- file-ft-combinations loaded as a dependency from another file may not be
|
||||
-- set already).
|
||||
Data.snipmate_ft_paths[ft][path] = true
|
||||
|
||||
-- snippets may already be loaded -> get them from cache.
|
||||
local data = Data.snipmate_cache:fetch(path)
|
||||
local snippets = data.snippets
|
||||
local autosnippets = data.autosnippets
|
||||
-- data.misc is user-input, clean it here.
|
||||
local extended_fts = util.deduplicate(data.misc)
|
||||
|
||||
-- ignored if it already exists.
|
||||
self.ft_extensions:set_vertex(ft)
|
||||
-- make sure we don't retain any old dependencies.
|
||||
self.ft_extensions:clear_edges(path)
|
||||
|
||||
for _, extended_ft in pairs(extended_fts) do
|
||||
-- ignored if it already exists.
|
||||
self.ft_extensions:set_vertex(extended_ft)
|
||||
-- snippets for extended_ft should also be loaded if ft is loaded
|
||||
-- label edge with path, so all edges from this file can be updated on
|
||||
-- reload.
|
||||
self.ft_extensions:set_edge(extended_ft, ft, path)
|
||||
end
|
||||
|
||||
loader_util.add_file_snippets(ft, path, snippets, autosnippets, self.add_opts)
|
||||
|
||||
-- get all filetypes this one extends (directly or transitively), and load
|
||||
-- their files.
|
||||
local load_fts = self.ft_extensions:connected_component(ft, "Backward")
|
||||
for _, extended_ft in ipairs(load_fts) do
|
||||
for file, _ in pairs(self.collection_files_by_ft[extended_ft]) do
|
||||
for _, file_ft in ipairs(self.ft_extensions:connected_component(extended_ft, "Forward")) do
|
||||
-- skips load if the file is already loaded for the given filetype.
|
||||
-- One bad side-effect of this current implementation is that
|
||||
-- the edges in the graph will be reset/set multiple times,
|
||||
-- until they are retained in the last load_file-call to the
|
||||
-- last filetype.
|
||||
self:load_file(file, file_ft, "SkipIfLoaded")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
refresh_notify(ft)
|
||||
end
|
||||
|
||||
function Collection:do_lazy_load(lazy_ft)
|
||||
for file, _ in pairs(self.lazy_files[lazy_ft]) do
|
||||
for _, ft in ipairs(self.ft_extensions:connected_component(lazy_ft, "Forward")) do
|
||||
-- skips load if the file is already loaded for the given filetype.
|
||||
self:load_file(file, ft, "SkipIfLoaded")
|
||||
end
|
||||
end
|
||||
end
|
||||
-- will only do something, if the file at `path` is actually in the collection.
|
||||
function Collection:reload(path)
|
||||
local loaded_fts = tbl_util.set_to_list(self.loaded_path_fts[path])
|
||||
for _, loaded_ft in ipairs(loaded_fts) do
|
||||
-- will override previously-loaded snippets from this path.
|
||||
self:load_file(path, loaded_ft, "ForceLoad")
|
||||
end
|
||||
|
||||
-- clean snippets if enough were removed.
|
||||
clean_invalidated()
|
||||
end
|
||||
|
||||
function Collection:stop()
|
||||
self.watcher:stop()
|
||||
end
|
||||
|
||||
local M = {}
|
||||
|
||||
function M.load(opts)
|
||||
opts = opts or {}
|
||||
|
||||
local add_opts = loader_util.add_opts(opts)
|
||||
|
||||
-- we need all paths available in the collection for `extends`.
|
||||
-- only load_paths is influenced by in/exclude.
|
||||
local collections_load_paths =
|
||||
loader_util.get_load_paths_snipmate_like(opts, "snippets", "snippets")
|
||||
|
||||
for _, collection in ipairs(collections_load_paths) do
|
||||
local load_paths = collection.load_paths
|
||||
log.info("Loading snippet-collection:\n%s", vim.inspect(load_paths))
|
||||
|
||||
local collection_paths = collection.collection_paths
|
||||
|
||||
-- also add files from load_paths to cache (collection of all loaded
|
||||
-- files by filetype, useful for editing files for some filetype).
|
||||
loader_util.extend_ft_paths(cache.ft_paths, load_paths)
|
||||
|
||||
for ft, paths in pairs(load_paths) do
|
||||
load_snippet_files(ft, paths, collection_paths, add_opts)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function M._load_lazy_loaded_ft(ft)
|
||||
for _, collection_load_paths in ipairs(cache.lazy_load_paths) do
|
||||
-- don't load if this ft wasn't included/was excluded.
|
||||
if collection_load_paths[ft] then
|
||||
load_snippet_files(
|
||||
ft,
|
||||
collection_load_paths[ft],
|
||||
collection_load_paths.collection,
|
||||
collection_load_paths.add_opts
|
||||
)
|
||||
end
|
||||
log.info("Loading lazy-load-snippets for filetype `%s`", ft)
|
||||
|
||||
for _, collection in ipairs(Data.snipmate_collections) do
|
||||
collection:do_lazy_load(ft)
|
||||
end
|
||||
end
|
||||
|
||||
function M._load_lazy_loaded(bufnr)
|
||||
local fts = loader_util.get_load_fts(bufnr)
|
||||
--- Generalized loading of collections.
|
||||
--- @param lazy boolean Whether the collection should be loaded lazily.
|
||||
--- @param opts LuaSnip.Loaders.LoadOpts?
|
||||
local function _load(lazy, opts)
|
||||
local o = loader_util.normalize_opts(opts)
|
||||
|
||||
for _, ft in ipairs(fts) do
|
||||
if not cache.lazy_loaded_ft[ft] then
|
||||
log.info("Loading lazy-load-snippets for filetype `%s`", ft)
|
||||
M._load_lazy_loaded_ft(ft)
|
||||
cache.lazy_loaded_ft[ft] = true
|
||||
end
|
||||
local collection_roots = loader_util.resolve_root_paths(o.paths, "snippets")
|
||||
local lazy_roots = loader_util.resolve_lazy_root_paths(o.lazy_paths)
|
||||
|
||||
log.info("Found roots `%s` for paths `%s`.", vim.inspect(collection_roots), vim.inspect(o.paths))
|
||||
if o.paths and #o.paths ~= #collection_roots then
|
||||
log.warn("Could not resolve all collection-roots for paths `%s`: only found `%s`", vim.inspect(o.paths), vim.inspect(collection_roots))
|
||||
end
|
||||
end
|
||||
|
||||
function M.lazy_load(opts)
|
||||
opts = opts or {}
|
||||
log.info("Determined roots `%s` for lazy_paths `%s`.", vim.inspect(lazy_roots), vim.inspect(o.lazy_paths))
|
||||
if o.lazy_paths and #o.lazy_paths ~= #lazy_roots then
|
||||
log.warn("Could not resolve all collection-roots for lazy_paths `%s`: only found `%s`", vim.inspect(o.lazy_paths), vim.inspect(lazy_roots))
|
||||
end
|
||||
|
||||
local add_opts = loader_util.add_opts(opts)
|
||||
for paths_lazy, roots in pairs({[true] = lazy_roots, [false] = collection_roots}) do
|
||||
for _, collection_root in ipairs(roots) do
|
||||
local ok, coll_or_err = pcall(Collection.new, collection_root, lazy, o.include, o.exclude, o.add_opts, paths_lazy, o.fs_event_providers)
|
||||
|
||||
local collections_load_paths =
|
||||
loader_util.get_load_paths_snipmate_like(opts, "snippets", "snippets")
|
||||
|
||||
for _, collection in ipairs(collections_load_paths) do
|
||||
local load_paths = collection.load_paths
|
||||
local collection_paths = collection.collection_paths
|
||||
|
||||
loader_util.extend_ft_paths(cache.ft_paths, load_paths)
|
||||
|
||||
for ft, paths in pairs(load_paths) do
|
||||
if cache.lazy_loaded_ft[ft] then
|
||||
-- instantly load snippets if the ft is already loaded...
|
||||
log.info(
|
||||
"Immediately loading lazy-load-snippets for already-active filetype `%s` from files:\n%s",
|
||||
ft,
|
||||
vim.inspect(paths)
|
||||
)
|
||||
load_snippet_files(ft, paths, collection_paths, add_opts)
|
||||
-- clear from load_paths to prevent duplicat loads.
|
||||
load_paths[ft] = nil
|
||||
if not ok then
|
||||
log.error("Could not create collection at %s: %s", collection_root, coll_or_err)
|
||||
else
|
||||
table.insert(Data.snipmate_collections, coll_or_err)
|
||||
end
|
||||
end
|
||||
|
||||
log.info("Registering lazy-load-snippets:\n%s", vim.inspect(load_paths))
|
||||
|
||||
load_paths.collection = collection_paths
|
||||
load_paths.add_opts = add_opts
|
||||
table.insert(cache.lazy_load_paths, load_paths)
|
||||
end
|
||||
end
|
||||
|
||||
--- Load snipmate-snippet-collections immediately.
|
||||
--- @param opts LuaSnip.Loaders.LoadOpts?
|
||||
function M.load(opts)
|
||||
_load(false, opts)
|
||||
end
|
||||
|
||||
--- Load snipmate-snippet-collections on demand.
|
||||
--- @param opts LuaSnip.Loaders.LoadOpts?
|
||||
function M.lazy_load(opts)
|
||||
_load(true, opts)
|
||||
-- load for current buffer on startup.
|
||||
M._load_lazy_loaded(vim.api.nvim_get_current_buf())
|
||||
for _, ft in ipairs(loader_util.get_load_fts(vim.api.nvim_get_current_buf())) do
|
||||
M._load_lazy_loaded_ft(ft)
|
||||
end
|
||||
end
|
||||
|
||||
function M.edit_snippet_files()
|
||||
loader_util.edit_snippet_files(cache.ft_paths)
|
||||
end
|
||||
|
||||
-- Make sure filename is normalized.
|
||||
function M._reload_file(filename)
|
||||
local cached_data = cache.path_snippets[filename]
|
||||
if not cached_data then
|
||||
return
|
||||
end
|
||||
|
||||
local add_opts = cached_data.add_opts
|
||||
cache.path_snippets[filename] = nil
|
||||
|
||||
for ft, _ in pairs(cached_data.fts) do
|
||||
log.info(
|
||||
"Re-loading snippets contributed by %s for filetype `%s`",
|
||||
filename,
|
||||
ft
|
||||
)
|
||||
-- we can safely set collection to empty, the `extends` are already
|
||||
-- "set up", eg are included via cached_data.fts.
|
||||
load_snippet_files(ft, { filename }, {}, add_opts)
|
||||
|
||||
ls.clean_invalidated({ inv_limit = 100 })
|
||||
function M.clean()
|
||||
for _, collection in ipairs(Data.snipmate_collections) do
|
||||
collection:stop()
|
||||
end
|
||||
Data.snipmate_ft_paths = autotable(2)
|
||||
-- don't reset cache, snippets are correctly updated on file-change anyway,
|
||||
-- and there is no persistent data passed on.
|
||||
end
|
||||
|
||||
return M
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
local ls = require("luasnip")
|
||||
local package_cache = require("luasnip.loaders._caches").vscode_packages
|
||||
local standalone_cache = require("luasnip.loaders._caches").vscode_standalone
|
||||
local util = require("luasnip.util.util")
|
||||
local loader_util = require("luasnip.loaders.util")
|
||||
local Path = require("luasnip.util.path")
|
||||
local log = require("luasnip.util.log").new("vscode-loader")
|
||||
local autotable = require("luasnip.util.auto_table").autotable
|
||||
local path_watcher = require("luasnip.loaders.fs_watchers").path
|
||||
local Data = require("luasnip.loaders.data")
|
||||
local session = require("luasnip.session")
|
||||
local refresh_notify = require("luasnip.session.enqueueable_operations").refresh_notify
|
||||
local clean_invalidated = require("luasnip.session.enqueueable_operations").clean_invalidated
|
||||
|
||||
local json_decoders = {
|
||||
json = util.json_decode,
|
||||
|
@ -38,20 +41,27 @@ local function read_json(fname)
|
|||
end
|
||||
end
|
||||
|
||||
-- return all snippets in `file`.
|
||||
--- Load snippets from vscode-snippet-file.
|
||||
--- @param file string Path to file
|
||||
---@return LuaSnip.Loaders.SnippetFileData
|
||||
local function get_file_snippets(file)
|
||||
local sp = require("luasnip.nodes.snippetProxy")
|
||||
local session = require("luasnip.session")
|
||||
local source = require("luasnip.session.snippet_collection.source")
|
||||
local multisnippet = require("luasnip.nodes.multiSnippet")
|
||||
|
||||
-- since most snippets we load don't have a scope-field, we just insert this here by default.
|
||||
-- since most snippets we load don't have a scope-field, we just insert
|
||||
-- them here by default.
|
||||
local snippets = {}
|
||||
|
||||
local snippet_set_data = read_json(file)
|
||||
if snippet_set_data == nil then
|
||||
log.error("Reading json from file `%s` failed, skipping it.", file)
|
||||
return {}
|
||||
|
||||
return {
|
||||
snippets = {},
|
||||
autosnippets = {},
|
||||
misc = {}
|
||||
}
|
||||
end
|
||||
|
||||
for name, parts in pairs(snippet_set_data) do
|
||||
|
@ -111,80 +121,36 @@ local function get_file_snippets(file)
|
|||
end
|
||||
end
|
||||
|
||||
return snippets
|
||||
return {
|
||||
snippets = snippets,
|
||||
autosnippets = {},
|
||||
misc = {}
|
||||
}
|
||||
end
|
||||
|
||||
-- `refresh` to optionally delay refresh_notify.
|
||||
-- (it has to be called by the caller, for filetype!)
|
||||
-- opts may contain:
|
||||
-- `refresh_notify`: refresh snippets for filetype immediately, default false.
|
||||
-- `force_reload`: don't use cache when reloading, default false
|
||||
local function load_snippet_file(file, filetype, add_opts, opts)
|
||||
local duplicate = require("luasnip.nodes.duplicate")
|
||||
-- has to be set in separate module to allow different module-path-separators
|
||||
-- in `require`.
|
||||
Data.vscode_cache = require("luasnip.loaders.snippet_cache").new(get_file_snippets)
|
||||
|
||||
opts = opts or {}
|
||||
local refresh_notify =
|
||||
util.ternary(opts.refresh_notify ~= nil, opts.refresh_notify, false)
|
||||
local force_reload =
|
||||
util.ternary(opts.force_reload ~= nil, opts.force_reload, false)
|
||||
|
||||
if not Path.exists(file) then
|
||||
log.error(
|
||||
"Trying to read snippets from file %s, but it does not exist.",
|
||||
file
|
||||
)
|
||||
return
|
||||
end
|
||||
|
||||
local file_snippets
|
||||
local cache = package_cache.path_snippets[file]
|
||||
if cache.snippets and not force_reload then
|
||||
file_snippets = vim.tbl_map(duplicate.duplicate_addable, cache.snippets)
|
||||
else
|
||||
file_snippets = get_file_snippets(file)
|
||||
|
||||
-- store snippets as-is (eg. don't copy), they will be copied when read
|
||||
-- from.
|
||||
package_cache.path_snippets[file].snippets = file_snippets
|
||||
end
|
||||
|
||||
ls.add_snippets(
|
||||
filetype,
|
||||
-- only load snippets matching the language set in `package.json`.
|
||||
file_snippets,
|
||||
vim.tbl_extend("keep", {
|
||||
-- include filetype, a file may contribute snippets to multiple
|
||||
-- filetypes, and we don't want to remove snippets for ft1 when
|
||||
-- adding those for ft2.
|
||||
key = string.format("__%s_snippets_%s", filetype, file),
|
||||
refresh_notify = refresh_notify,
|
||||
}, add_opts)
|
||||
)
|
||||
log.info("Adding %s snippets from %s", #file_snippets, file)
|
||||
end
|
||||
|
||||
--- Find all files+associated filetypes in a package.
|
||||
---@param root string, directory of the package (immediate parent of the
|
||||
--- package.json)
|
||||
---@param filter function that filters filetypes, generate from in/exclude-list
|
||||
--- via loader_util.ft_filter.
|
||||
---@return table: string -> string[] (ft -> files)
|
||||
local function package_files(root, filter)
|
||||
local package = Path.join(root, "package.json")
|
||||
--- Parse package.json(c), determine all files that contribute snippets, and
|
||||
--- which filetype is associated with them.
|
||||
--- @param manifest string
|
||||
--- @return table<string, table<string, true|nil>>
|
||||
local function get_snippet_files(manifest)
|
||||
-- if root doesn't contain a package.json, or it contributes no snippets,
|
||||
-- return no snippets.
|
||||
if not Path.exists(package) then
|
||||
if not Path.exists(manifest) then
|
||||
log.warn(
|
||||
"Looked for `package.json` in `root`, does not exist.",
|
||||
package
|
||||
"Manifest %s does not exist",
|
||||
manifest
|
||||
)
|
||||
return {}
|
||||
end
|
||||
|
||||
local package_data = read_json(package)
|
||||
local package_data = read_json(manifest)
|
||||
if not package_data then
|
||||
-- since it is a `.json`, the json not being correct should be an error.
|
||||
log.error("Could not read json from `%s`", package)
|
||||
-- since it is a `.json/jsonc`, the json not being correct should be an error.
|
||||
log.error("Could not read json from `%s`", manifest)
|
||||
return {}
|
||||
end
|
||||
|
||||
|
@ -192,271 +158,390 @@ local function package_files(root, filter)
|
|||
not package_data.contributes or not package_data.contributes.snippets
|
||||
then
|
||||
log.warn(
|
||||
"Package %s does not contribute any snippets, skipping it",
|
||||
package
|
||||
"Manifest %s does not contribute any snippets.",
|
||||
manifest
|
||||
)
|
||||
return {}
|
||||
end
|
||||
|
||||
-- stores ft -> files(string[]).
|
||||
local ft_files = {}
|
||||
-- stores ft -> files -> true|nil, allow iterating files and their
|
||||
-- filetypes while preventing duplicates.
|
||||
local ft_file_set = autotable(2, {warn = false})
|
||||
|
||||
-- parent-directory of package.json(c), all files in the package.json(c)
|
||||
-- are relative to it.
|
||||
local package_parent = Path.parent(manifest)
|
||||
|
||||
for _, snippet_entry in pairs(package_data.contributes.snippets) do
|
||||
local langs = snippet_entry.language
|
||||
local absolute_path = Path.join(package_parent, snippet_entry.path)
|
||||
|
||||
local normalized_snippet_file =
|
||||
Path.normalize(absolute_path)
|
||||
|
||||
if not normalized_snippet_file then
|
||||
-- path does not exist (yet), try and guess the correct path anyway.
|
||||
normalized_snippet_file = Path.normalize_nonexisting(absolute_path)
|
||||
log.warn(
|
||||
"Could not find file %s advertised in %s, guessing %s as the absolute and normalized path.",
|
||||
absolute_path,
|
||||
manifest,
|
||||
normalized_snippet_file
|
||||
)
|
||||
end
|
||||
|
||||
local langs = snippet_entry.language
|
||||
if type(langs) ~= "table" then
|
||||
langs = { langs }
|
||||
end
|
||||
for _, ft in ipairs(langs) do
|
||||
if filter(ft) then
|
||||
if not ft_files[ft] then
|
||||
ft_files[ft] = {}
|
||||
end
|
||||
local normalized_snippet_file =
|
||||
Path.normalize(Path.join(root, snippet_entry.path))
|
||||
ft_file_set[ft][normalized_snippet_file] = true
|
||||
end
|
||||
end
|
||||
|
||||
-- the file might not exist..
|
||||
if normalized_snippet_file then
|
||||
table.insert(ft_files[ft], normalized_snippet_file)
|
||||
else
|
||||
log.warn(
|
||||
"Could not find file %s from advertised in %s",
|
||||
snippet_entry.path,
|
||||
root
|
||||
)
|
||||
return ft_file_set
|
||||
end
|
||||
|
||||
-- Responsible for watching a single json-snippet-file.
|
||||
local SnippetfileWatcher = {}
|
||||
local SnippetfileWatcher_mt = {__index = SnippetfileWatcher}
|
||||
|
||||
function SnippetfileWatcher.new(path, initial_ft, fs_event_providers, lazy, load_cb)
|
||||
local o = setmetatable({
|
||||
path = path,
|
||||
load_cb = load_cb,
|
||||
-- track which filetypes this file has been loaded for, so we can
|
||||
-- reload for all of them.
|
||||
loaded_fts = {[initial_ft] = true}
|
||||
}, SnippetfileWatcher_mt)
|
||||
|
||||
local load_all_fts = function()
|
||||
for ft, _ in pairs(o.loaded_fts) do
|
||||
load_cb(path, ft)
|
||||
refresh_notify(ft)
|
||||
end
|
||||
end
|
||||
local ok, err_or_watcher = pcall(path_watcher, path, {
|
||||
add = load_all_fts,
|
||||
change = function()
|
||||
load_all_fts()
|
||||
|
||||
-- clean snippets if enough were removed.
|
||||
clean_invalidated()
|
||||
end
|
||||
},
|
||||
{ lazy = lazy, fs_event_providers = fs_event_providers})
|
||||
|
||||
if not ok then
|
||||
-- has to be handled by caller, we can't really proceed if the creation
|
||||
-- failed.
|
||||
error(("Could not create path_watcher for path %s: %s"):format(path, err_or_watcher))
|
||||
end
|
||||
|
||||
o.watcher = err_or_watcher
|
||||
|
||||
return o
|
||||
end
|
||||
|
||||
-- called by collection.
|
||||
function SnippetfileWatcher:add_ft(ft)
|
||||
if self.loaded_fts[ft] then
|
||||
-- already loaded.
|
||||
return
|
||||
end
|
||||
self.loaded_fts[ft] = true
|
||||
self.load_cb(self.path, ft)
|
||||
end
|
||||
|
||||
function SnippetfileWatcher:stop()
|
||||
self.watcher:stop()
|
||||
end
|
||||
|
||||
--- Collection watches all files that belong to a collection of snippets below
|
||||
--- some root, and registers new files.
|
||||
local Collection = {}
|
||||
local Collection_mt = {
|
||||
__index = Collection
|
||||
}
|
||||
|
||||
function Collection.new(manifest_path, lazy, include_ft, exclude_ft, add_opts, lazy_watcher, fs_event_providers)
|
||||
local ft_filter = loader_util.ft_filter(include_ft, exclude_ft)
|
||||
local o = setmetatable({
|
||||
lazy = lazy,
|
||||
-- store ft -> set of files that should be lazy-loaded.
|
||||
lazy_files = autotable(2, {warn = false}),
|
||||
fs_event_providers = fs_event_providers,
|
||||
|
||||
-- store path-watchers (so we don't register more than one for one
|
||||
-- path), and so we can disable them.
|
||||
path_watchers = {},
|
||||
-- for really loading a file.
|
||||
-- this is not done in Collection:load itself, since it may have to be
|
||||
-- performed as a callback on file-creation.
|
||||
load_callback = function(path, ft)
|
||||
local data = Data.vscode_cache:fetch(path)
|
||||
-- autosnippets are included in snippets for this loader.
|
||||
local snippets = data.snippets
|
||||
loader_util.add_file_snippets(ft, path, snippets, {}, add_opts)
|
||||
end,
|
||||
-- initialized in a bit, we have to store+reset a watcher for the manifest-file.
|
||||
manifest_watcher = nil
|
||||
}, Collection_mt)
|
||||
|
||||
-- callback for updating the file-filetype-associations from the manifest.
|
||||
local update_manifest = function()
|
||||
local manifest_ft_paths = get_snippet_files(manifest_path)
|
||||
for ft, path_set in pairs(manifest_ft_paths) do
|
||||
if ft_filter(ft) then
|
||||
for path, _ in pairs(path_set) do
|
||||
o:add_file(path, ft)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return ft_files
|
||||
local watcher_ok, err = pcall(path_watcher, manifest_path, {
|
||||
-- don't handle removals for now.
|
||||
add = update_manifest,
|
||||
change = update_manifest
|
||||
}, {lazy = lazy_watcher, fs_event_providers = fs_event_providers})
|
||||
|
||||
if not watcher_ok then
|
||||
error(("Could not create watcher: %s"):format(err))
|
||||
end
|
||||
o.manifest_watcher = watcher_ok
|
||||
|
||||
log.info("Initialized snippet-collection with manifest %s", manifest_path)
|
||||
|
||||
return o
|
||||
end
|
||||
|
||||
local function get_snippet_rtp()
|
||||
return vim.tbl_map(function(itm)
|
||||
return vim.fn.fnamemodify(itm, ":h")
|
||||
end, vim.api.nvim_get_runtime_file("package.json", true))
|
||||
-- Add file with some filetype to collection, load according to lazy_load.
|
||||
function Collection:add_file(path, ft)
|
||||
Data.vscode_ft_paths[ft][path] = true
|
||||
|
||||
if self.lazy then
|
||||
if not session.loaded_fts[ft] then
|
||||
log.info("Registering lazy-load-snippets for ft `%s` from file `%s`", ft, path)
|
||||
|
||||
-- only register to load later.
|
||||
self.lazy_files[ft][path] = true
|
||||
return
|
||||
else
|
||||
log.info(
|
||||
"Filetype `%s` is already active, loading immediately.",
|
||||
ft
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
self:load_file(path, ft)
|
||||
end
|
||||
|
||||
-- sanitizes opts and returns
|
||||
-- * ft -> files-map for `opts` (respects in/exclude).
|
||||
-- * files -> ft-map (need to look up which filetypes a file contributes).
|
||||
local function get_snippet_files(opts)
|
||||
local paths
|
||||
-- list of paths to crawl for loading (could be a table or a comma-separated-list)
|
||||
if not opts.paths then
|
||||
paths = get_snippet_rtp()
|
||||
elseif type(opts.paths) == "string" then
|
||||
paths = vim.split(opts.paths, ",")
|
||||
function Collection:load_file(path, ft)
|
||||
log.info(
|
||||
"Registering file %s with filetype %s for loading.",
|
||||
path,
|
||||
ft
|
||||
)
|
||||
if not self.path_watchers[path] then
|
||||
-- always register these lazily, that way an upate to the package.json
|
||||
-- without the snippet-file existing will work!
|
||||
-- Also make sure we use the same fs_event_providers.
|
||||
local ok, watcher_or_err = pcall(SnippetfileWatcher.new, path, ft, self.fs_event_providers, true, self.load_callback)
|
||||
if not ok then
|
||||
log.error("Could not create SnippetFileWatcher for path %s: %s", path, watcher_or_err)
|
||||
return
|
||||
end
|
||||
self.path_watchers[path] = watcher_or_err
|
||||
else
|
||||
paths = opts.paths
|
||||
-- make new filetype known to existing watcher.
|
||||
self.path_watchers[path]:add_ft(ft)
|
||||
end
|
||||
|
||||
paths = vim.tbl_map(Path.expand, paths) -- Expand before deduping, fake paths will become nil
|
||||
paths = vim.tbl_filter(function(v)
|
||||
return v
|
||||
end, paths) -- ditch nil
|
||||
paths = util.deduplicate(paths) -- Remove doppelgänger paths
|
||||
|
||||
local ft_paths = {}
|
||||
|
||||
local ft_filter = loader_util.ft_filter(opts.exclude, opts.include)
|
||||
for _, root_path in ipairs(paths) do
|
||||
loader_util.extend_ft_paths(
|
||||
ft_paths,
|
||||
package_files(root_path, ft_filter)
|
||||
)
|
||||
end
|
||||
|
||||
return ft_paths
|
||||
end
|
||||
|
||||
-- initializes ft_paths for `file`, and stores the add_opts for the filetype-file combination.
|
||||
-- We can't just store add_opts for a single file, since via in/exclude, they
|
||||
-- may differ for a single file which contributes multiple snippet-filetypes.
|
||||
local function update_cache(cache, file, filetype, add_opts)
|
||||
local filecache = cache.path_snippets[file]
|
||||
if not filecache then
|
||||
filecache = {
|
||||
filetype_add_opts = {},
|
||||
filetypes = {},
|
||||
}
|
||||
cache.path_snippets[file] = filecache
|
||||
-- stop all watchers associated with this collection, to make sure no snippets
|
||||
-- are added from this collection again.
|
||||
function Collection:stop()
|
||||
self.manifest_watcher:stop()
|
||||
for _, watcher in pairs(self.path_watchers) do
|
||||
watcher:stop()
|
||||
end
|
||||
end
|
||||
|
||||
filecache.filetype_add_opts[filetype] = add_opts
|
||||
filecache.filetypes[filetype] = true
|
||||
function Collection:do_lazy_load(ft)
|
||||
for file, _ in pairs(self.lazy_files[ft]) do
|
||||
self:load_file(file, ft)
|
||||
end
|
||||
end
|
||||
|
||||
local M = {}
|
||||
function M.load(opts)
|
||||
opts = opts or {}
|
||||
|
||||
-- applies in/exclude.
|
||||
local ft_files = get_snippet_files(opts)
|
||||
local add_opts = loader_util.add_opts(opts)
|
||||
local function get_rtp_paths()
|
||||
return vim.list_extend(
|
||||
-- would be very surprised if this yields duplicates :D
|
||||
vim.api.nvim_get_runtime_file("package.json", true),
|
||||
vim.api.nvim_get_runtime_file("package.jsonc", true) )
|
||||
end
|
||||
|
||||
loader_util.extend_ft_paths(package_cache.ft_paths, ft_files)
|
||||
|
||||
log.info("Loading snippet:", vim.inspect(ft_files))
|
||||
for ft, files in pairs(ft_files) do
|
||||
for _, file in ipairs(files) do
|
||||
update_cache(package_cache, file, ft, add_opts)
|
||||
|
||||
-- `false`: don't refresh while adding.
|
||||
load_snippet_file(file, ft, add_opts, { refresh_notify = false })
|
||||
--- Generate list of manifest-paths from list of directory-paths.
|
||||
--- If nil, search rtp.
|
||||
--- If a given directory, or the mani
|
||||
---
|
||||
--- @param paths string|table? List of existing directories. If nil, search runtimepath.
|
||||
---@return string[] manifest_paths
|
||||
local function get_manifests(paths)
|
||||
local manifest_paths = {}
|
||||
-- list of paths to crawl for loading (could be a table or a comma-separated-list)
|
||||
if paths then
|
||||
-- Get path to package.json/package.jsonc, or continue if it does not exist.
|
||||
for _, dir in ipairs(paths) do
|
||||
local tentative_manifest_path = Path.expand(Path.join(dir, "package.json"))
|
||||
-- expand returns nil for paths that don't exist.
|
||||
if tentative_manifest_path then
|
||||
table.insert(manifest_paths, tentative_manifest_path)
|
||||
else
|
||||
tentative_manifest_path = Path.expand(Path.join(dir, "package.jsonc"))
|
||||
if tentative_manifest_path then
|
||||
table.insert(manifest_paths, tentative_manifest_path)
|
||||
else
|
||||
log.warn("Could not find package.json(c) in path %s (expanded to %s).", dir, Path.expand(dir))
|
||||
end
|
||||
end
|
||||
end
|
||||
else
|
||||
manifest_paths = get_rtp_paths()
|
||||
end
|
||||
|
||||
return manifest_paths
|
||||
end
|
||||
|
||||
--- Generate list of paths to manifests that may not yet exist, from list of
|
||||
--- directories (which also may not yet exist).
|
||||
--- One peculiarity: This will generate two paths for each directory, since we
|
||||
--- don't know if the package.json or the package.jsonc will be created.
|
||||
--- This may cause a bit of overhead (not much due to snippet-cache) if both
|
||||
--- are created and contribute the same snippets, but that's unlikely and/or
|
||||
--- user error :P
|
||||
--- @param paths string[]
|
||||
---@return string[]
|
||||
local function get_lazy_manifests(paths)
|
||||
local lazy_manifest_paths = {}
|
||||
if paths then
|
||||
-- list of directories, convert to list of existing manifest-files.
|
||||
if type(paths) == "string" then
|
||||
paths = vim.split(paths, ",")
|
||||
end
|
||||
for _, dir in ipairs(paths) do
|
||||
local absolute_dir = Path.expand_maybe_nonexisting(dir)
|
||||
|
||||
table.insert(lazy_manifest_paths, Path.join(absolute_dir, "package.json"))
|
||||
table.insert(lazy_manifest_paths, Path.join(absolute_dir, "package.jsonc"))
|
||||
end
|
||||
end
|
||||
|
||||
return lazy_manifest_paths
|
||||
end
|
||||
|
||||
local function _load(lazy, opts)
|
||||
local o = loader_util.normalize_opts(opts)
|
||||
|
||||
local manifests = get_manifests(o.paths)
|
||||
local lazy_manifests = get_lazy_manifests(o.lazy_paths)
|
||||
|
||||
log.info("Found manifests `%s` for paths `%s`.", vim.inspect(manifests), vim.inspect(o.paths))
|
||||
if o.paths and #o.paths ~= #manifests then
|
||||
log.warn("Could not resolve all manifests for paths `%s`: only found `%s`", vim.inspect(o.paths), vim.inspect(manifests))
|
||||
end
|
||||
|
||||
log.info("Determined roots `%s` for lazy_paths `%s`.", vim.inspect(lazy_manifests), vim.inspect(o.lazy_paths))
|
||||
-- two lazy manifests from each lazy directory.
|
||||
if o.lazy_paths and #o.lazy_paths ~= 2*#lazy_manifests then
|
||||
log.warn("Could not resolve all manifests for lazy_paths `%s`: only found `%s`", vim.inspect(o.lazy_paths), vim.inspect(lazy_manifests))
|
||||
end
|
||||
|
||||
for is_lazy, manifest_paths in pairs({[true] = lazy_manifests, [false] = manifests}) do
|
||||
for _, manifest_path in ipairs(manifest_paths) do
|
||||
local ok, coll_or_err = pcall(Collection.new, manifest_path, lazy, o.include, o.exclude, o.add_opts, is_lazy, o.fs_event_providers)
|
||||
|
||||
if not ok then
|
||||
log.error("Could not create collection for manifest %s: %s", manifest_path, coll_or_err)
|
||||
else
|
||||
table.insert(Data.vscode_package_collections, coll_or_err)
|
||||
end
|
||||
end
|
||||
ls.refresh_notify(ft)
|
||||
end
|
||||
end
|
||||
|
||||
function M._load_lazy_loaded_ft(ft)
|
||||
for _, file in ipairs(package_cache.lazy_load_paths[ft] or {}) do
|
||||
load_snippet_file(
|
||||
file,
|
||||
ft,
|
||||
package_cache.path_snippets[file].filetype_add_opts[ft],
|
||||
{ refresh_notify = false }
|
||||
)
|
||||
log.info("Loading lazy-load-snippets for filetype `%s`", ft)
|
||||
|
||||
for _, collection in ipairs(Data.vscode_package_collections) do
|
||||
collection:do_lazy_load(ft)
|
||||
end
|
||||
ls.refresh_notify(ft)
|
||||
-- no need to lazy_load standalone-snippets.
|
||||
end
|
||||
|
||||
function M._load_lazy_loaded(bufnr)
|
||||
local fts = loader_util.get_load_fts(bufnr)
|
||||
|
||||
for _, ft in ipairs(fts) do
|
||||
if not package_cache.lazy_loaded_ft[ft] then
|
||||
M._load_lazy_loaded_ft(ft)
|
||||
log.info("Loading lazy-load-snippets for filetype `%s`", ft)
|
||||
package_cache.lazy_loaded_ft[ft] = true
|
||||
end
|
||||
end
|
||||
function M.load(opts)
|
||||
_load(false, opts)
|
||||
end
|
||||
|
||||
function M.lazy_load(opts)
|
||||
opts = opts or {}
|
||||
|
||||
-- get two maps, one mapping filetype->associated files, and another
|
||||
-- mapping files->default-filetypes.
|
||||
local ft_files = get_snippet_files(opts)
|
||||
local add_opts = loader_util.add_opts(opts)
|
||||
|
||||
loader_util.extend_ft_paths(package_cache.ft_paths, ft_files)
|
||||
|
||||
-- immediately load filetypes that have already been loaded.
|
||||
-- They will not be loaded otherwise.
|
||||
for ft, files in pairs(ft_files) do
|
||||
-- first register add_opts for all files, then iterate over files again
|
||||
-- if they are already loaded.
|
||||
|
||||
for _, file in ipairs(files) do
|
||||
update_cache(package_cache, file, ft, add_opts)
|
||||
end
|
||||
|
||||
if package_cache.lazy_loaded_ft[ft] then
|
||||
for _, file in ipairs(files) do
|
||||
-- instantly load snippets if they were already loaded...
|
||||
load_snippet_file(
|
||||
file,
|
||||
ft,
|
||||
add_opts,
|
||||
{ refresh_notify = false }
|
||||
)
|
||||
log.info(
|
||||
"Immediately loading lazy-load-snippets for already-active filetype %s from files:\n%s",
|
||||
ft,
|
||||
vim.inspect(files)
|
||||
)
|
||||
end
|
||||
ls.refresh_notify(ft)
|
||||
|
||||
-- don't load these files again.
|
||||
-- clearing while iterating is fine: https://www.lua.org/manual/5.1/manual.html#pdf-next
|
||||
ft_files[ft] = nil
|
||||
end
|
||||
end
|
||||
log.info("Registering lazy-load-snippets:\n%s", vim.inspect(ft_files))
|
||||
|
||||
loader_util.extend_ft_paths(package_cache.lazy_load_paths, ft_files)
|
||||
_load(true, opts)
|
||||
|
||||
-- load for current buffer on startup.
|
||||
M._load_lazy_loaded(vim.api.nvim_get_current_buf())
|
||||
end
|
||||
|
||||
function M.edit_snippet_files()
|
||||
loader_util.edit_snippet_files(package_cache.ft_paths)
|
||||
end
|
||||
|
||||
local function standalone_add(path, add_opts)
|
||||
local file_snippets = get_file_snippets(path)
|
||||
|
||||
ls.add_snippets(
|
||||
-- nil: provided snippets are a table mapping filetype->snippets.
|
||||
"all",
|
||||
file_snippets,
|
||||
vim.tbl_extend("keep", {
|
||||
key = string.format("__snippets_%s", path),
|
||||
}, add_opts)
|
||||
)
|
||||
for _, ft in ipairs(loader_util.get_load_fts(vim.api.nvim_get_current_buf())) do
|
||||
M._load_lazy_loaded_ft(ft)
|
||||
end
|
||||
end
|
||||
|
||||
function M.load_standalone(opts)
|
||||
opts = opts or {}
|
||||
local path = Path.expand(opts.path)
|
||||
local add_opts = loader_util.add_opts(opts)
|
||||
|
||||
-- register file for `all`-filetype in cache.
|
||||
if not standalone_cache.ft_paths.all then
|
||||
standalone_cache.ft_paths.all = {}
|
||||
local lazy = vim.F.if_nil(opts.lazy, false)
|
||||
local add_opts = loader_util.make_add_opts(opts)
|
||||
local fs_event_providers = vim.F.if_nil(opts.fs_event_providers, {autocmd = true, libuv = false})
|
||||
|
||||
local path
|
||||
if not lazy then
|
||||
path = Path.expand(opts.path)
|
||||
if not path then
|
||||
log.error("Expanding path %s does not produce an existing path.", opts.path)
|
||||
return
|
||||
end
|
||||
else
|
||||
path = Path.expand_maybe_nonexisting(opts.path)
|
||||
end
|
||||
|
||||
-- record in cache, so edit_snippet_files can find it.
|
||||
-- Store under "all" for now, alternative: collect all filetypes the
|
||||
-- snippets contribute to.
|
||||
-- Since .code-snippets are mainly (?) project-local, that behaviour does
|
||||
-- not seem to bad.
|
||||
table.insert(standalone_cache.ft_paths.all, path)
|
||||
Data.vscode_ft_paths["all"][path] = true
|
||||
|
||||
-- only store add_opts, we don't need to remember filetypes and the like,
|
||||
-- and here the filename is enough to identify add_opts.
|
||||
standalone_cache.path_snippets[path] = add_opts
|
||||
local ok, watcher_or_err = pcall(SnippetfileWatcher.new, path, "all", fs_event_providers, lazy, function()
|
||||
local data = Data.vscode_cache:fetch(path)
|
||||
-- autosnippets are included in snippets for this loader.
|
||||
local snippets = data.snippets
|
||||
loader_util.add_file_snippets("all", path, snippets, {}, add_opts)
|
||||
end)
|
||||
|
||||
standalone_add(path, add_opts)
|
||||
if not ok then
|
||||
log.error("Could not create SnippetFileWatcher for path %s: %s", path, watcher_or_err)
|
||||
return
|
||||
end
|
||||
|
||||
table.insert(Data.vscode_standalone_watchers, watcher_or_err)
|
||||
end
|
||||
|
||||
-- filename is normalized
|
||||
function M._reload_file(filename)
|
||||
local package_cached_data = package_cache.path_snippets[filename]
|
||||
if package_cached_data then
|
||||
log.info("Re-loading snippets contributed by %s", filename)
|
||||
|
||||
-- reload file for all filetypes it occurs in.
|
||||
-- only the first call actually needs to force-reload, all other can
|
||||
-- just use its snippets.
|
||||
local force_reload = true
|
||||
for ft, _ in pairs(package_cached_data.filetypes) do
|
||||
load_snippet_file(
|
||||
filename,
|
||||
ft,
|
||||
package_cached_data.filetype_add_opts[ft],
|
||||
{ force_reload = force_reload }
|
||||
)
|
||||
-- only force-reload once, then reuse updated snippets.
|
||||
force_reload = false
|
||||
end
|
||||
|
||||
ls.clean_invalidated({ inv_limit = 100 })
|
||||
function M.clean()
|
||||
for _, collection in ipairs(Data.vscode_package_collections) do
|
||||
collection:stop()
|
||||
end
|
||||
|
||||
local standalone_cached_data = standalone_cache.path_snippets[filename]
|
||||
if standalone_cached_data then
|
||||
log.info("Re-loading snippets contributed by %s", filename)
|
||||
local add_opts = standalone_cached_data
|
||||
|
||||
standalone_add(filename, add_opts)
|
||||
ls.clean_invalidated({ inv_limit = 100 })
|
||||
Data.vscode_package_collections = {}
|
||||
for _, standalone_watcher in ipairs(Data.vscode_standalone_watchers) do
|
||||
standalone_watcher:stop()
|
||||
end
|
||||
Data.vscode_standalone_watchers = {}
|
||||
|
||||
Data.vscode_ft_paths = autotable(2)
|
||||
-- don't reset cache, there's no reason to discard the already-loaded
|
||||
-- snippets as long as they're unchanged.
|
||||
end
|
||||
|
||||
return M
|
||||
|
|
619
lua/luasnip/loaders/fs_watchers.lua
Normal file
619
lua/luasnip/loaders/fs_watchers.lua
Normal file
|
@ -0,0 +1,619 @@
|
|||
local Path = require("luasnip.util.path")
|
||||
local uv = vim.uv or vim.loop
|
||||
local util = require("luasnip.util.util")
|
||||
local log_tree = require("luasnip.util.log").new("tree-watcher")
|
||||
local log_path = require("luasnip.util.log").new("path-watcher")
|
||||
local log = require("luasnip.util.log").new("fs-watchers")
|
||||
|
||||
local M = {}
|
||||
|
||||
-- used by both watchers.
|
||||
local callback_mt = {
|
||||
__index = function() return util.nop end
|
||||
}
|
||||
|
||||
--- @alias LuaSnip.FSWatcher.FSEventProviders
|
||||
--- | '"autocmd"' Hook into BufWritePost to receive notifications on file-changes.
|
||||
--- | '"libuv"' Register uv.fs_event to receive notifications on file-changes.
|
||||
|
||||
--- @alias LuaSnip.FSWatcher.Callback fun(full_path: string)
|
||||
|
||||
--- @class LuaSnip.FSWatcher.TreeCallbacks
|
||||
--- @field new_file LuaSnip.FSWatcher.Callback?
|
||||
--- @field new_dir LuaSnip.FSWatcher.Callback?
|
||||
--- @field remove_file LuaSnip.FSWatcher.Callback?
|
||||
--- @field remove_dir LuaSnip.FSWatcher.Callback?
|
||||
--- @field remove_root LuaSnip.FSWatcher.Callback?
|
||||
--- @field change_file LuaSnip.FSWatcher.Callback?
|
||||
--- @field change_dir LuaSnip.FSWatcher.Callback?
|
||||
--- The callbacks are called with the full path to the file/directory that is
|
||||
--- affected.
|
||||
--- Callbacks that are not set will be replaced by a nop.
|
||||
|
||||
--- @class LuaSnip.FSWatcher.PathCallbacks
|
||||
--- @field add LuaSnip.FSWatcher.Callback?
|
||||
--- @field remove LuaSnip.FSWatcher.Callback?
|
||||
--- @field change LuaSnip.FSWatcher.Callback?
|
||||
--- The callbacks are called with the full path to the file that path-watcher
|
||||
--- is registered on.
|
||||
--- Callbacks that are not set will be replaced by a nop.
|
||||
|
||||
--- @class LuaSnip.FSWatcher.Options
|
||||
--- @field lazy boolean?
|
||||
--- If set, the watcher will be initialized even if the root/watched path does
|
||||
--- not yet exist, and start notifications once it is created.
|
||||
--- @field fs_event_providers table<LuaSnip.FSWatcher.FSEventProviders, boolean>?
|
||||
--- Which providers to use for receiving file-changes.
|
||||
|
||||
local function get_opts(opts)
|
||||
opts = opts or {}
|
||||
local lazy = vim.F.if_nil(opts.lazy, false)
|
||||
local fs_event_providers = vim.F.if_nil(opts.fs_event_providers, {autocmd = true, libuv = false})
|
||||
|
||||
return lazy, fs_event_providers
|
||||
end
|
||||
|
||||
-- plain list, don't use map-style table since we'll only need direct access to
|
||||
-- a watcher when it is stopped, which seldomly happens (at least, compared to
|
||||
-- how often it is iterated in the autocmd-callback).
|
||||
M.active_watchers = {}
|
||||
|
||||
vim.api.nvim_create_augroup("_luasnip_fs_watcher", {})
|
||||
vim.api.nvim_create_autocmd({ "BufWritePost" }, {
|
||||
callback = function(args)
|
||||
log.debug("Received BufWritePost for file %s.", args.file)
|
||||
local realpath = Path.normalize(args.file)
|
||||
if not realpath then
|
||||
-- if nil, the path does not exist for some reason.
|
||||
log.info("Registered BufWritePost with <afile> %s, but realpath does not exist. Aborting fs-watcher-notification.", args.file)
|
||||
return
|
||||
end
|
||||
log.debug("Received update for file %s, using realpath %s.", args.file, realpath)
|
||||
|
||||
-- remove stopped watchers.
|
||||
-- Does not really matter whether we do this before or after the
|
||||
-- callbacks, since stopped watchers already take care to not do
|
||||
-- callbacks.
|
||||
-- Doing this during the callback-invocations, however, would incur
|
||||
-- some more complexity since ipairs does not support removal of
|
||||
-- elements during the iteration.
|
||||
M.active_watchers = vim.tbl_filter(function(watcher)
|
||||
-- this won't catch unstarted watchers, since they can't be in this
|
||||
-- list in the first place.
|
||||
return not watcher.stopped
|
||||
end, M.active_watchers)
|
||||
|
||||
for _, watcher in ipairs(M.active_watchers) do
|
||||
watcher:BufWritePost_callback(realpath)
|
||||
end
|
||||
end,
|
||||
group = "_luasnip_fs_watcher",
|
||||
})
|
||||
|
||||
--- @class LuaSnip.FSWatcher.Tree
|
||||
--- @field root string
|
||||
--- @field fs_event userdata
|
||||
--- @field files table<string, boolean>
|
||||
--- @field dir_watchers table<string, LuaSnip.FSWatcher.Tree>
|
||||
--- @field removed boolean
|
||||
--- @field stopped boolean
|
||||
--- @field callbacks LuaSnip.FSWatcher.TreeCallbacks
|
||||
--- @field depth number How deep the root should be monitored.
|
||||
--- @field fs_event_providers table<LuaSnip.FSWatcher.FSEventProviders, boolean>
|
||||
--- @field root_realpath string? Set as soon as the watcher is started.
|
||||
local TreeWatcher = {}
|
||||
local TreeWatcher_mt = {
|
||||
__index = TreeWatcher
|
||||
}
|
||||
|
||||
function TreeWatcher:stop()
|
||||
for _, child_watcher in ipairs(self.dir_watchers) do
|
||||
child_watcher:stop()
|
||||
end
|
||||
self:stop_self()
|
||||
end
|
||||
|
||||
function TreeWatcher:stop_self()
|
||||
-- don't check which fs_event_providers were actually started, for both of
|
||||
-- these it should not matter if they weren't.
|
||||
self.stopped = true
|
||||
self.send_notifications = false
|
||||
|
||||
self.fs_event:stop()
|
||||
-- will be removed from active_watchers after the next event, but already won't receive it.
|
||||
end
|
||||
|
||||
function TreeWatcher:fs_event_callback(err, relpath, events)
|
||||
if not self.send_notifications then
|
||||
-- abort if we should not send notifications anymore.
|
||||
return
|
||||
end
|
||||
vim.schedule_wrap(function()
|
||||
log_tree.debug("raw: self.root: %s; err: %s; relpath: %s; change: %s; rename: %s", self.root, err, relpath, events.change, events.rename)
|
||||
local full_path = Path.join(self.root, relpath)
|
||||
local path_stat = uv.fs_stat(full_path)
|
||||
|
||||
-- try to figure out what happened in the directory.
|
||||
if events.rename then
|
||||
if not uv.fs_stat(self.root) then
|
||||
self:remove_root()
|
||||
return
|
||||
end
|
||||
if not path_stat then
|
||||
self:remove_child(relpath, full_path)
|
||||
return
|
||||
end
|
||||
|
||||
local f_type
|
||||
-- if there is a link to a directory, we are notified on changes!!
|
||||
if path_stat.type == "link" then
|
||||
f_type = uv.fs_stat(uv.fs_realpath(full_path))
|
||||
else
|
||||
f_type = path_stat.type
|
||||
end
|
||||
|
||||
if f_type == "file" then
|
||||
self:new_file(relpath, full_path)
|
||||
return
|
||||
elseif f_type == "directory" then
|
||||
self:new_dir(relpath, full_path)
|
||||
return
|
||||
end
|
||||
elseif events.change then
|
||||
self:change_child(relpath, full_path)
|
||||
end
|
||||
end)()
|
||||
end
|
||||
|
||||
-- May not recognize child correctly if there are symlinks on the path from the
|
||||
-- child to the directory-root.
|
||||
-- Should be fine, especially since, I think, fs_event can recognize those
|
||||
-- correctly, which means that this is an issue only very seldomly.
|
||||
function TreeWatcher:BufWritePost_callback(realpath)
|
||||
if not self.send_notifications then
|
||||
return
|
||||
end
|
||||
|
||||
if realpath:sub(1, #self.realpath_root) ~= self.realpath_root then
|
||||
-- not inside this root.
|
||||
return
|
||||
end
|
||||
|
||||
-- `#self.realpath_root+2`: remove root and path-separator.
|
||||
local root_relative_components = Path.components(realpath:sub(#self.realpath_root+2))
|
||||
local rel = root_relative_components[1]
|
||||
if #root_relative_components == 1 then
|
||||
-- wrote file.
|
||||
-- either new, or changed.
|
||||
if self.files[rel] then
|
||||
-- use regular root for notifications!
|
||||
self:change_file(rel, Path.join(self.root, rel))
|
||||
else
|
||||
self:new_file(rel, Path.join(self.root, rel))
|
||||
end
|
||||
else
|
||||
if self.dir_watchers[rel] then
|
||||
if #root_relative_components == 2 then
|
||||
-- only notify if the changed file is immediately in the
|
||||
-- directory we're watching!
|
||||
-- I think this is the behaviour of fs_event, and logically
|
||||
-- makes sense.
|
||||
self:change_dir(rel, Path.join(self.root, rel))
|
||||
end
|
||||
else
|
||||
-- does nothing if the directory already exists.
|
||||
self:new_dir(rel, Path.join(self.root, rel))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function TreeWatcher:start()
|
||||
if self.depth == 0 then
|
||||
-- don't watch children for 0-depth.
|
||||
return
|
||||
end
|
||||
|
||||
if self.stopped then
|
||||
-- stopping overrides and prevents starting.
|
||||
return
|
||||
end
|
||||
|
||||
self.send_notifications = true
|
||||
|
||||
if self.fs_event_providers.libuv then
|
||||
-- does not work on nfs-drive, at least if it's edited from another
|
||||
-- machine.
|
||||
local success, err = self.fs_event:start(self.root, {}, function(err, relpath, events)
|
||||
self:fs_event_callback(err, relpath, events)
|
||||
end)
|
||||
|
||||
if not success then
|
||||
log_tree.error("Could not start libuv-monitor for path %s due to error %s", self.path, err)
|
||||
else
|
||||
log_tree.info("Monitoring root-directory %s with libuv-monitor.", self.root)
|
||||
end
|
||||
end
|
||||
|
||||
-- needed by BufWritePost-callback.
|
||||
self.realpath_root = Path.normalize(self.root)
|
||||
|
||||
if self.fs_event_providers.autocmd then
|
||||
if self.realpath_root then
|
||||
-- receive notifications on BufWritePost.
|
||||
table.insert(M.active_watchers, self)
|
||||
log_tree.info("Monitoring root-directory %s with autocmd-monitor.", self.root)
|
||||
else
|
||||
log_tree.error("Could not resolve realpath for root %s, not enabling autocmd-monitor", self.root)
|
||||
end
|
||||
end
|
||||
|
||||
-- do initial scan after starting the watcher.
|
||||
-- Scanning first, and then starting the watcher leaves a period of time
|
||||
-- where a new file may be created (after scanning, before watching), where
|
||||
-- we wont know about it.
|
||||
-- If I understand the uv-eventloop correctly, this function, `new`, will
|
||||
-- be executed completely before a callback is called, so self.files and
|
||||
-- self.dir_watchers should be populated correctly when a callback is
|
||||
-- received, even if it was received before all directories/files were
|
||||
-- added.
|
||||
-- This difference can be observed, at least on my machine, by watching a
|
||||
-- directory A, and then creating a nested directory B, and children for it
|
||||
-- in one command, ie. `mkdir -p A/B/{1,2,3,4,5,6,7,8,9}`.
|
||||
-- If the callback is registered after the scan, the latter directories
|
||||
-- (ie. 4-9) did not show up, whereas everything did work correctly if the
|
||||
-- watcher was activated before the scan.
|
||||
-- (almost everything, one directory was included in the initial scan and
|
||||
-- the watch-event, but that seems okay for our purposes)
|
||||
local files, dirs = Path.scandir(self.root)
|
||||
for _, file in ipairs(files) do
|
||||
local relpath = file:sub(#self.root+2)
|
||||
self:new_file(relpath, file)
|
||||
end
|
||||
for _, dir in ipairs(dirs) do
|
||||
local relpath = dir:sub(#self.root+2)
|
||||
self:new_dir(relpath, dir)
|
||||
end
|
||||
end
|
||||
|
||||
-- these functions maintain our logical view of the directory, and call
|
||||
-- callbacks when we detect a change.
|
||||
function TreeWatcher:new_file(rel, full)
|
||||
if self.files[rel] then
|
||||
-- already added
|
||||
return
|
||||
end
|
||||
|
||||
log_tree.debug("new file %s %s", rel, full)
|
||||
self.files[rel] = true
|
||||
self.callbacks.new_file(full)
|
||||
end
|
||||
function TreeWatcher:new_dir(rel, full)
|
||||
if self.dir_watchers[rel] then
|
||||
-- already added
|
||||
return
|
||||
end
|
||||
|
||||
log_tree.debug("new dir %s %s", rel, full)
|
||||
-- first do callback for this directory, then look into (and potentially do
|
||||
-- callbacks for) children.
|
||||
self.callbacks.new_dir(full)
|
||||
-- directory exists => don't need to set lazy.
|
||||
-- inherit fs_event_providers.
|
||||
self.dir_watchers[rel] = M.tree(full, self.depth-1, self.callbacks, {lazy = false, fs_event_providers = self.fs_event_providers})
|
||||
end
|
||||
|
||||
function TreeWatcher:change_file(rel, full)
|
||||
log_tree.debug("changed file %s %s", rel, full)
|
||||
self.callbacks.change_file(full)
|
||||
end
|
||||
function TreeWatcher:change_dir(rel, full)
|
||||
log_tree.debug("changed dir %s %s", rel, full)
|
||||
self.callbacks.change_dir(full)
|
||||
end
|
||||
function TreeWatcher:change_child(rel, full)
|
||||
if self.dir_watchers[rel] then
|
||||
self:change_dir(rel, full)
|
||||
elseif self.files[rel] then
|
||||
self:change_file(rel, full)
|
||||
end
|
||||
end
|
||||
|
||||
function TreeWatcher:remove_child(rel, full)
|
||||
if self.dir_watchers[rel] then
|
||||
log_tree.debug("removing dir %s %s", rel, full)
|
||||
-- should have been stopped by the watcher for the child, or it was not
|
||||
-- even started due to depth.
|
||||
self.dir_watchers[rel]:remove_root()
|
||||
self.dir_watchers[rel] = nil
|
||||
|
||||
self.callbacks.remove_dir(full)
|
||||
elseif self.files[rel] then
|
||||
log_tree.debug("removing file %s %s", rel, full)
|
||||
self.files[rel] = nil
|
||||
|
||||
self.callbacks.remove_file(full)
|
||||
end
|
||||
end
|
||||
|
||||
function TreeWatcher:remove_root()
|
||||
if self.removed then
|
||||
-- already removed
|
||||
return
|
||||
end
|
||||
log_tree.debug("removing root %s", self.root)
|
||||
self.removed = true
|
||||
-- stop own, children should have handled themselves, if they are watched
|
||||
-- (and we don't need to do anything for unwatched children).
|
||||
self:stop_self()
|
||||
|
||||
-- removing entries (set them to nil) is apparently fine when iterating via
|
||||
-- pairs.
|
||||
for relpath, _ in pairs(self.files) do
|
||||
local child_full = Path.join(self.root, relpath)
|
||||
self:remove_child(relpath, child_full)
|
||||
end
|
||||
for relpath, _ in pairs(self.dir_watchers) do
|
||||
local child_full = Path.join(self.root, relpath)
|
||||
self:remove_child(relpath, child_full)
|
||||
end
|
||||
|
||||
self.callbacks.remove_root(self.root)
|
||||
end
|
||||
|
||||
--- Set up new watcher for a tree of files and directories.
|
||||
--- @param root string Absolute path to the root.
|
||||
--- @param depth number The depth up to which to monitor. 1 means that the
|
||||
--- immediate children will be monitored, 2 includes their
|
||||
--- children, and so on.
|
||||
--- @param callbacks LuaSnip.FSWatcher.TreeCallbacks The callbacks to use for this watcher.
|
||||
--- @param opts LuaSnip.FSWatcher.Options Options, described in their class.
|
||||
--- @return LuaSnip.FSWatcher.Tree
|
||||
function M.tree(root, depth, callbacks, opts)
|
||||
local lazy, fs_event_providers = get_opts(opts)
|
||||
|
||||
-- do nothing on missing callback.
|
||||
callbacks = setmetatable(callbacks or {}, callback_mt)
|
||||
|
||||
local o = setmetatable({
|
||||
root = root,
|
||||
fs_event = uv.new_fs_event(),
|
||||
files = {},
|
||||
dir_watchers = {},
|
||||
-- removed: have not yet triggered the removed-callback.
|
||||
removed = false,
|
||||
|
||||
-- track whether the watcher was stopped at some point, and if it as,
|
||||
-- don't allow it to start again.
|
||||
stopped = false,
|
||||
-- whether notifications should be sent.
|
||||
-- Modified by start/stop, wait for start to send any => start out as
|
||||
-- false.
|
||||
send_notifications = false,
|
||||
|
||||
callbacks = callbacks,
|
||||
depth = depth,
|
||||
fs_event_providers = fs_event_providers
|
||||
}, TreeWatcher_mt)
|
||||
|
||||
-- if the path does not yet exist, set watcher up s.t. it will start
|
||||
-- watching when the directory is created.
|
||||
if not uv.fs_stat(root) and lazy then
|
||||
-- root does not yet exist, need to create a watcher that notifies us
|
||||
-- of its creation.
|
||||
local parent_path = Path.parent(root)
|
||||
if not parent_path then
|
||||
error(("Could not find parent-path for %s"):format(root))
|
||||
end
|
||||
|
||||
log_tree.info("Path %s does not exist yet, watching %s for creation.", root, parent_path)
|
||||
|
||||
local parent_watcher
|
||||
parent_watcher = M.tree(parent_path, 1, {
|
||||
new_dir = function(full)
|
||||
if full == root then
|
||||
o:start()
|
||||
-- directory was created, stop watching.
|
||||
parent_watcher:stop_self()
|
||||
end
|
||||
end,
|
||||
-- use same providers.
|
||||
}, { lazy = true, fs_event_providers = fs_event_providers} )
|
||||
else
|
||||
o:start()
|
||||
end
|
||||
|
||||
return o
|
||||
end
|
||||
|
||||
--- @class LuaSnip.FSWatcher.Path
|
||||
--- @field private path string
|
||||
--- @field private fs_event userdata
|
||||
--- @field private removed boolean
|
||||
--- @field private stopped boolean
|
||||
--- @field private send_notifications boolean
|
||||
--- @field private callbacks LuaSnip.FSWatcher.TreeCallbacks
|
||||
--- @field private fs_event_providers table<LuaSnip.FSWatcher.FSEventProviders, boolean>
|
||||
--- @field private realpath string? Set as soon as the watcher is started.
|
||||
local PathWatcher = {}
|
||||
|
||||
local PathWatcher_mt = {
|
||||
__index = PathWatcher
|
||||
}
|
||||
|
||||
function PathWatcher:change(full)
|
||||
log_path.info("detected change at path %s", full)
|
||||
if self.removed then
|
||||
-- this is certainly unexpected.
|
||||
log_path.warn("PathWatcher at %s detected change, but path does not exist logically. Not triggering callback.", full)
|
||||
else
|
||||
self.callbacks.change(self.path)
|
||||
end
|
||||
end
|
||||
|
||||
function PathWatcher:add()
|
||||
if not self.removed then
|
||||
-- already added
|
||||
return
|
||||
end
|
||||
log_path.info("adding path %s", self.path)
|
||||
self.removed = false
|
||||
|
||||
self.callbacks.add(self.path)
|
||||
end
|
||||
function PathWatcher:remove()
|
||||
if self.removed then
|
||||
-- already removed
|
||||
return
|
||||
end
|
||||
log_path.debug("removing path %s", self.path)
|
||||
log_path.info("path %s was removed, stopping watcher.", self.path)
|
||||
|
||||
self.removed = true
|
||||
|
||||
self.callbacks.remove(self.path)
|
||||
|
||||
-- Would have to re-register for new file to receive new notifications.
|
||||
self:stop()
|
||||
end
|
||||
|
||||
function PathWatcher:fs_event_callback(err, relpath, events)
|
||||
if not self.send_notifications then
|
||||
return
|
||||
end
|
||||
|
||||
vim.schedule_wrap(function()
|
||||
log_path.debug("raw: path: %s; err: %s; relpath: %s; change: %s; rename: %s", self.path, err, relpath, events.change, events.rename)
|
||||
|
||||
if events.rename then
|
||||
if not uv.fs_stat(self.path) then
|
||||
self:remove()
|
||||
else
|
||||
self:add()
|
||||
end
|
||||
elseif events.change then
|
||||
self:change()
|
||||
end
|
||||
end)()
|
||||
end
|
||||
|
||||
function PathWatcher:BufWritePost_callback(realpath)
|
||||
if not self.send_notifications then
|
||||
return
|
||||
end
|
||||
|
||||
if realpath == self.realpath then
|
||||
-- notify using passed path, not realpath.
|
||||
self:change(self.path)
|
||||
end
|
||||
end
|
||||
|
||||
function PathWatcher:start()
|
||||
if self.stopped then
|
||||
-- stop() prevents start.
|
||||
return
|
||||
end
|
||||
self.send_notifications = true
|
||||
|
||||
if self.fs_event_providers.libuv then
|
||||
-- does not work on nfs-drive, at least if it's edited from another
|
||||
-- machine.
|
||||
local success, err = self.fs_event:start(self.path, {}, function(err, relpath, events)
|
||||
self:fs_event_callback(err, relpath, events)
|
||||
end)
|
||||
|
||||
if not success then
|
||||
log_path.error("Could not start libuv-monitor for file %s due to error %s", self.path, err)
|
||||
else
|
||||
log_path.info("Monitoring file %s with libuv-monitor.", self.path)
|
||||
end
|
||||
end
|
||||
|
||||
local realpath = Path.normalize(self.path)
|
||||
|
||||
if self.fs_event_providers.autocmd then
|
||||
if realpath then
|
||||
self.realpath = realpath
|
||||
|
||||
-- path exists, add file-monitor.
|
||||
table.insert(M.active_watchers, self)
|
||||
log_path.info("Monitoring file %s with autocmd-monitor.", self.path)
|
||||
else
|
||||
log_path.error("Could not resolve realpath for file %s, not enabling BufWritePost-monitor", self.path)
|
||||
end
|
||||
end
|
||||
|
||||
if realpath then
|
||||
-- path exists, notify.
|
||||
self:add()
|
||||
-- no else, never added the path, never call remove.
|
||||
end
|
||||
end
|
||||
|
||||
function PathWatcher:stop()
|
||||
-- don't check which fs_event_providers were actually started, for both of
|
||||
-- these it should not matter if they weren't.
|
||||
self.stopped = true
|
||||
self.send_notifications = false
|
||||
|
||||
self.fs_event:stop()
|
||||
end
|
||||
|
||||
--- Set up new watcher on a single path only.
|
||||
--- @param path string Absolute path to the root.
|
||||
--- @param callbacks LuaSnip.FSWatcher.PathCallbacks The callbacks to use for this watcher.
|
||||
--- @param opts LuaSnip.FSWatcher.Options? Options, described in their class.
|
||||
--- @return LuaSnip.FSWatcher.Path
|
||||
function M.path(path, callbacks, opts)
|
||||
local lazy, fs_event_providers = get_opts(opts)
|
||||
|
||||
-- do nothing on missing callback.
|
||||
callbacks = setmetatable(callbacks or {}, callback_mt)
|
||||
|
||||
--- @as LuaSnip.FSWatcher.Path
|
||||
local o = setmetatable({
|
||||
path = path,
|
||||
fs_event = uv.new_fs_event(),
|
||||
-- Don't send an initial remove-callback if the path does not yet
|
||||
-- exist.
|
||||
-- Always send add first, or send nothing.
|
||||
removed = true,
|
||||
-- these two are just like in TreeWatcher.
|
||||
stopped = false,
|
||||
-- wait for `start()` to send notifications.
|
||||
send_notifications = false,
|
||||
callbacks = callbacks,
|
||||
fs_event_providers = fs_event_providers
|
||||
}, PathWatcher_mt)
|
||||
|
||||
-- if the path does not yet exist, set watcher up s.t. it will start
|
||||
-- watching when the directory is created.
|
||||
if not uv.fs_stat(path) and lazy then
|
||||
-- root does not yet exist, need to create a watcher that notifies us
|
||||
-- of its creation.
|
||||
local parent_path = Path.parent(path)
|
||||
if not parent_path then
|
||||
error(("Could not find parent-path for %s"):format(path))
|
||||
end
|
||||
|
||||
log_path.info("Path %s does not exist yet, watching %s for creation.", path, parent_path)
|
||||
|
||||
local parent_watcher
|
||||
parent_watcher = M.tree(parent_path, 1, {
|
||||
-- in path_watcher, watch for new file.
|
||||
new_file = function(full)
|
||||
log_path.info("Path: %s %s", full, path)
|
||||
if full == path then
|
||||
o:start()
|
||||
-- directory was created, stop watching.
|
||||
parent_watcher:stop_self()
|
||||
end
|
||||
end,
|
||||
}, {lazy = true, fs_event_providers = fs_event_providers} )
|
||||
else
|
||||
o:start()
|
||||
end
|
||||
|
||||
return o
|
||||
end
|
||||
|
||||
return M
|
|
@ -1,7 +1,8 @@
|
|||
local Cache = require("luasnip.loaders._caches")
|
||||
local util = require("luasnip.util.util")
|
||||
local loader_util = require("luasnip.loaders.util")
|
||||
local Path = require("luasnip.util.path")
|
||||
local session = require("luasnip.session")
|
||||
local loader_data = require("luasnip.loaders.data")
|
||||
|
||||
local M = {}
|
||||
|
||||
|
@ -56,13 +57,13 @@ function M.edit_snippet_files(opts)
|
|||
local items = {}
|
||||
|
||||
-- concat files from all loaders for the selected filetype ft.
|
||||
for _, cache_name in ipairs({
|
||||
"vscode_packages",
|
||||
"vscode_standalone",
|
||||
"snipmate",
|
||||
"lua",
|
||||
for cache_name, ft_file_set in pairs({
|
||||
vscode_packages = loader_data.vscode_ft_paths[ft],
|
||||
vscode_standalone = {},
|
||||
snipmate = loader_data.snipmate_ft_paths[ft],
|
||||
lua = loader_data.lua_ft_paths[ft],
|
||||
}) do
|
||||
for _, path in ipairs(Cache[cache_name].ft_paths[ft] or {}) do
|
||||
for path, _ in pairs(ft_file_set or {}) do
|
||||
local fmt_name = format(path, clean_name[cache_name])
|
||||
if fmt_name then
|
||||
table.insert(ft_paths, path)
|
||||
|
@ -124,42 +125,22 @@ function M.edit_snippet_files(opts)
|
|||
end
|
||||
|
||||
function M.cleanup()
|
||||
Cache.cleanup()
|
||||
require("luasnip.loaders.from_lua").clean()
|
||||
require("luasnip.loaders.from_snipmate").clean()
|
||||
require("luasnip.loaders.from_vscode").clean()
|
||||
end
|
||||
|
||||
--- explicitly load lazy-loaded snippets for some filetypes.
|
||||
---@param fts string[]: list of filetypes.
|
||||
function M.load_lazy_loaded(fts)
|
||||
fts = util.redirect_filetypes(fts)
|
||||
function M.load_lazy_loaded(bufnr)
|
||||
local fts = loader_util.get_load_fts(bufnr)
|
||||
|
||||
for _, ft in ipairs(fts) do
|
||||
require("luasnip.loaders.from_lua")._load_lazy_loaded_ft(ft)
|
||||
Cache.lua.lazy_loaded_ft[ft] = true
|
||||
|
||||
require("luasnip.loaders.from_snipmate")._load_lazy_loaded_ft(ft)
|
||||
Cache.snipmate.lazy_loaded_ft[ft] = true
|
||||
|
||||
require("luasnip.loaders.from_vscode")._load_lazy_loaded_ft(ft)
|
||||
Cache.vscode.lazy_loaded_ft[ft] = true
|
||||
if not session.loaded_fts[ft] then
|
||||
require("luasnip.loaders.from_lua")._load_lazy_loaded_ft(ft)
|
||||
require("luasnip.loaders.from_snipmate")._load_lazy_loaded_ft(ft)
|
||||
require("luasnip.loaders.from_vscode")._load_lazy_loaded_ft(ft)
|
||||
end
|
||||
session.loaded_fts[ft] = true
|
||||
end
|
||||
end
|
||||
|
||||
vim.api.nvim_create_autocmd("BufWritePost", {
|
||||
group = vim.api.nvim_create_augroup("luasnip_watch_reload", {}),
|
||||
callback = function(event)
|
||||
require("luasnip.loaders").reload_file(event.file)
|
||||
end,
|
||||
})
|
||||
function M.reload_file(filename)
|
||||
filename = Path.normalize(filename)
|
||||
if not filename then
|
||||
-- file does not exist.
|
||||
-- log here, maybe.
|
||||
return
|
||||
end
|
||||
require("luasnip.loaders.from_lua")._reload_file(filename)
|
||||
require("luasnip.loaders.from_vscode")._reload_file(filename)
|
||||
require("luasnip.loaders.from_snipmate")._reload_file(filename)
|
||||
end
|
||||
|
||||
return M
|
||||
|
|
86
lua/luasnip/loaders/snippet_cache.lua
Normal file
86
lua/luasnip/loaders/snippet_cache.lua
Normal file
|
@ -0,0 +1,86 @@
|
|||
local uv = vim.uv or vim.loop
|
||||
local duplicate = require("luasnip.nodes.duplicate")
|
||||
|
||||
--- @class LuaSnip.Loaders.SnippetCache.Mtime
|
||||
--- @field sec number
|
||||
--- @field nsec number
|
||||
--- Stores modified time for a file.
|
||||
|
||||
--- @class LuaSnip.Loaders.SnippetCache.TimeCacheEntry
|
||||
--- @field mtime LuaSnip.Loaders.SnippetCache.Mtime?
|
||||
--- @field data LuaSnip.Loaders.SnippetFileData
|
||||
--- mtime is nil if the file does not currently exist. Since `get_fn` may still
|
||||
--- return data, there's no need to treat this differently.
|
||||
|
||||
|
||||
--- @class LuaSnip.Loaders.SnippetCache
|
||||
--- SnippetCache stores snippets and other data loaded by files.
|
||||
--- @field private get_fn fun(file: string): LuaSnip.Loaders.SnippetFileData
|
||||
--- @field private cache table<string, LuaSnip.Loaders.SnippetCache.TimeCacheEntry>
|
||||
local SnippetCache = {}
|
||||
SnippetCache.__index = SnippetCache
|
||||
|
||||
local M = {}
|
||||
|
||||
--- @class LuaSnip.Loaders.SnippetFileData
|
||||
--- @field snippets LuaSnip.Addable[]
|
||||
--- @field autosnippets LuaSnip.Addable[]
|
||||
--- @field misc table any data.
|
||||
|
||||
--- Create new cache.
|
||||
--- @param get_fn fun(file: string): LuaSnip.Loaders.SnippetFileData
|
||||
--- @return LuaSnip.Loaders.SnippetCache
|
||||
function M.new(get_fn)
|
||||
return setmetatable({
|
||||
get_fn = get_fn,
|
||||
cache = {}
|
||||
}, SnippetCache)
|
||||
end
|
||||
|
||||
--- Copy addables from data to new table.
|
||||
--- @param data LuaSnip.Loaders.SnippetFileData
|
||||
--- @return LuaSnip.Loaders.SnippetFileData
|
||||
local function copy_filedata(data)
|
||||
--- @as LuaSnip.Loaders.SnippetFileData
|
||||
return {
|
||||
snippets = vim.tbl_map(duplicate.duplicate_addable, data.snippets),
|
||||
autosnippets = vim.tbl_map(duplicate.duplicate_addable, data.autosnippets),
|
||||
misc = vim.deepcopy(data.misc)
|
||||
}
|
||||
end
|
||||
|
||||
--- Retrieve loaded data for any file, either from the cache, or directly from
|
||||
--- the file.
|
||||
--- For storage-efficiency (and to elide the otherwise necessary deepcopy), the
|
||||
--- snippets are duplicated, which should not leak.
|
||||
--- @param fname string
|
||||
--- @return LuaSnip.Loaders.SnippetFileData
|
||||
function SnippetCache:fetch(fname)
|
||||
local cached = self.cache[fname]
|
||||
local current_stat = uv.fs_stat(fname)
|
||||
|
||||
--- @as LuaSnip.Loaders.SnippetCache.Mtime
|
||||
local mtime = current_stat and current_stat.mtime
|
||||
|
||||
if cached and mtime and mtime.sec == cached.mtime.sec and mtime.nsec == cached.mtime.nsec then
|
||||
-- happy path: data is cached, and valid => just return cached data.
|
||||
return copy_filedata(cached.data)
|
||||
end
|
||||
|
||||
-- data is stale (cache entry does not exist, file was written after
|
||||
-- cache-creation, or the file was deleted).
|
||||
-- fetch data from updated file
|
||||
local res = self.get_fn(fname)
|
||||
|
||||
-- store it.
|
||||
self.cache[fname] = {
|
||||
data = res,
|
||||
mtime = mtime
|
||||
}
|
||||
|
||||
-- return it.
|
||||
-- Don't copy here, no need to.
|
||||
return res
|
||||
end
|
||||
|
||||
return M
|
8
lua/luasnip/loaders/types.lua
Normal file
8
lua/luasnip/loaders/types.lua
Normal file
|
@ -0,0 +1,8 @@
|
|||
--- @class LuaSnip.Loaders.LoadOpts
|
||||
--- @field paths string[]?|string? Either a list of paths, or ","-delimited paths. If nil, searches rtp for snippet-collections.
|
||||
--- @field lazy_paths string[]?|string? Like paths, but these will be watched, and loaded when creation is detected.
|
||||
--- @field include string[]? If set, all filetypes not in include will be excluded from loading.
|
||||
--- @field exclude string[]? Exclude these filetypes, even if they are set in include.
|
||||
--- @field override_priority number? load all snippets with this priority.
|
||||
--- @field default_priority number? snippet-priority, unless the snippet sets its own priority.
|
||||
--- @field fs_event_providers table<LuaSnip.FSWatcher.FSEventProviders, boolean>? How to monitor the filesystem
|
|
@ -1,6 +1,8 @@
|
|||
local Path = require("luasnip.util.path")
|
||||
local util = require("luasnip.util.util")
|
||||
local session = require("luasnip.session")
|
||||
local snippet_collection = require("luasnip.session.snippet_collection")
|
||||
local log = require("luasnip.util.log").new("loaders")
|
||||
|
||||
local function filetypelist_to_set(list)
|
||||
vim.validate({ list = { list, "table", true } })
|
||||
|
@ -37,19 +39,25 @@ local function split_lines(filestring)
|
|||
)
|
||||
end
|
||||
|
||||
local function _is_present(v)
|
||||
local function non_nil(v)
|
||||
return v ~= nil
|
||||
end
|
||||
|
||||
local function normalize_paths(paths, rtp_dirname)
|
||||
local function resolve_root_paths(paths, rtp_dirname)
|
||||
if not paths then
|
||||
paths = vim.api.nvim_get_runtime_file(rtp_dirname, true)
|
||||
elseif type(paths) == "string" then
|
||||
paths = vim.split(paths, ",")
|
||||
end
|
||||
|
||||
paths = vim.tbl_map(Path.expand, paths)
|
||||
paths = vim.tbl_filter(_is_present, paths)
|
||||
paths = vim.tbl_filter(non_nil, paths)
|
||||
paths = util.deduplicate(paths)
|
||||
|
||||
return paths
|
||||
end
|
||||
|
||||
local function resolve_lazy_root_paths(paths)
|
||||
paths = vim.tbl_map(Path.expand_maybe_nonexisting, paths)
|
||||
paths = vim.tbl_filter(non_nil, paths)
|
||||
paths = util.deduplicate(paths)
|
||||
|
||||
return paths
|
||||
|
@ -105,6 +113,25 @@ local function get_ft_paths(root, extension)
|
|||
return ft_path
|
||||
end
|
||||
|
||||
-- fname must be in the directory-tree below root.
|
||||
-- collection_root may not end with a path-separator.
|
||||
-- If both are from "realpath", and fname belongs to the collection, this
|
||||
-- should be a given.
|
||||
local function collection_file_ft(collection_root, fname)
|
||||
local collection_components = Path.components(collection_root)
|
||||
local fname_components = Path.components(fname)
|
||||
|
||||
if #fname_components == #collection_components + 1 then
|
||||
-- if the file is a direct child of the collection-root, get the text
|
||||
-- before the last dot.
|
||||
return fname_components[#collection_components + 1]:match("(.*)%.[^%.]*")
|
||||
else
|
||||
-- if the file is nested deeper, the name of the directory immediately
|
||||
-- below the root is the filetype.
|
||||
return fname_components[#collection_components + 1]
|
||||
end
|
||||
end
|
||||
|
||||
-- extend table like {lua = {path1}, c = {path1, path2}, ...}, new_paths has the same layout.
|
||||
local function extend_ft_paths(paths, new_paths)
|
||||
for ft, path in pairs(new_paths) do
|
||||
|
@ -134,7 +161,7 @@ end
|
|||
local function get_load_paths_snipmate_like(opts, rtp_dirname, extension)
|
||||
local collections_load_paths = {}
|
||||
|
||||
for _, path in ipairs(normalize_paths(opts.paths, rtp_dirname)) do
|
||||
for _, path in ipairs(resolve_root_paths(opts.paths, rtp_dirname)) do
|
||||
local collection_ft_paths = get_ft_paths(path, extension)
|
||||
|
||||
local load_paths = vim.deepcopy(collection_ft_paths)
|
||||
|
@ -184,7 +211,7 @@ local function edit_snippet_files(ft_files)
|
|||
end)
|
||||
end
|
||||
|
||||
local function add_opts(opts)
|
||||
local function make_add_opts(opts)
|
||||
return {
|
||||
override_priority = opts.override_priority,
|
||||
default_priority = opts.default_priority,
|
||||
|
@ -193,19 +220,75 @@ end
|
|||
|
||||
local function get_load_fts(bufnr)
|
||||
local fts = session.config.load_ft_func(bufnr)
|
||||
-- also add "all", loaded by all buffers.
|
||||
table.insert(fts, "all")
|
||||
|
||||
return util.redirect_filetypes(fts)
|
||||
return util.deduplicate(util.redirect_filetypes(fts))
|
||||
end
|
||||
|
||||
local function add_file_snippets(ft, filename, snippets, autosnippets, add_opts)
|
||||
snippet_collection.add_snippets({ [ft] = snippets },
|
||||
vim.tbl_extend("keep", {
|
||||
type = "snippets",
|
||||
key = "__snippets__" .. ft .. "__" .. filename,
|
||||
}, add_opts)
|
||||
)
|
||||
snippet_collection.add_snippets({ [ft] = autosnippets },
|
||||
vim.tbl_extend("keep", {
|
||||
type = "autosnippets",
|
||||
key = "__autosnippets__" .. ft .. "__" .. filename,
|
||||
}, add_opts)
|
||||
)
|
||||
log.info(
|
||||
"Adding %s snippets and %s autosnippets from %s to ft `%s`",
|
||||
#snippets,
|
||||
#autosnippets,
|
||||
filename,
|
||||
ft
|
||||
)
|
||||
end
|
||||
|
||||
local function normalize_opts(opts)
|
||||
opts = opts or {}
|
||||
|
||||
local paths = opts.paths
|
||||
if type(paths) == "string" then
|
||||
paths = vim.split(paths, ",")
|
||||
end
|
||||
|
||||
local add_opts = make_add_opts(opts)
|
||||
local include = opts.include
|
||||
local exclude = opts.exclude
|
||||
local lazy_paths = opts.lazy_paths or {}
|
||||
if type(lazy_paths) == "string" then
|
||||
lazy_paths = vim.split(lazy_paths, ",")
|
||||
end
|
||||
|
||||
local fs_event_providers = vim.F.if_nil(opts.fs_event_providers, {autocmd = true, libuv = false})
|
||||
|
||||
return {
|
||||
paths = paths,
|
||||
lazy_paths = lazy_paths,
|
||||
include = include,
|
||||
exclude = exclude,
|
||||
add_opts = add_opts,
|
||||
fs_event_providers = fs_event_providers,
|
||||
}
|
||||
end
|
||||
|
||||
return {
|
||||
filetypelist_to_set = filetypelist_to_set,
|
||||
split_lines = split_lines,
|
||||
normalize_paths = normalize_paths,
|
||||
resolve_root_paths = resolve_root_paths,
|
||||
resolve_lazy_root_paths = resolve_lazy_root_paths,
|
||||
ft_filter = ft_filter,
|
||||
get_ft_paths = get_ft_paths,
|
||||
get_load_paths_snipmate_like = get_load_paths_snipmate_like,
|
||||
extend_ft_paths = extend_ft_paths,
|
||||
edit_snippet_files = edit_snippet_files,
|
||||
add_opts = add_opts,
|
||||
make_add_opts = make_add_opts,
|
||||
collection_file_ft = collection_file_ft,
|
||||
get_load_fts = get_load_fts,
|
||||
add_file_snippets = add_file_snippets,
|
||||
normalize_opts = normalize_opts,
|
||||
}
|
||||
|
|
|
@ -40,6 +40,8 @@ M.jump_active = false
|
|||
|
||||
M.config = nil
|
||||
|
||||
M.loaded_fts = {}
|
||||
|
||||
function M.get_snip_env()
|
||||
return M.config and M.config.snip_env
|
||||
end
|
||||
|
|
|
@ -238,7 +238,7 @@ function M.add_snippets(snippets, opts)
|
|||
or opts.type
|
||||
assert(
|
||||
snip_type == "autosnippets" or snip_type == "snippets",
|
||||
"snippetType must be either 'autosnippets' or 'snippets'"
|
||||
"snippetType must be either 'autosnippets' or 'snippets', was " .. vim.inspect(snip_type)
|
||||
)
|
||||
|
||||
local snip_ft = snip.filetype or ft
|
||||
|
@ -251,7 +251,7 @@ function M.add_snippets(snippets, opts)
|
|||
table.insert(by_ft[snip_type][snip_ft], snip)
|
||||
by_id[snip.id] = snip
|
||||
|
||||
-- set source if it was passed, and remove from snippet.
|
||||
-- set source if it is available.
|
||||
if snip._source then
|
||||
source.set(snip, snip._source)
|
||||
end
|
||||
|
|
|
@ -14,6 +14,10 @@ local sep = (function()
|
|||
return package.config:sub(1, 1)
|
||||
end)()
|
||||
|
||||
local root_pattern = (function()
|
||||
return uv.os_uname().sysname:find("Windows") and "%w%:" or "%/"
|
||||
end)()
|
||||
|
||||
function Path.join(...)
|
||||
return table.concat({ ... }, sep)
|
||||
end
|
||||
|
@ -66,6 +70,55 @@ function Path.expand(filepath)
|
|||
return uv.fs_realpath(expanded)
|
||||
end
|
||||
|
||||
-- do our best at normalizing a non-existing path.
|
||||
function Path.normalize_nonexisting(filepath, cwd)
|
||||
cwd = cwd or vim.fn.getcwd()
|
||||
|
||||
local normalized = filepath
|
||||
-- replace multiple slashes by one.
|
||||
:gsub(sep .. sep .. "+", sep)
|
||||
-- remove trailing slash.
|
||||
:gsub(sep .. "$", "")
|
||||
-- remove ./ from path.
|
||||
:gsub("%." .. sep, "")
|
||||
|
||||
-- if not yet absolute, prepend path to current directory.
|
||||
if not normalized:match("^" .. root_pattern .. "") then
|
||||
normalized = Path.join(cwd, normalized)
|
||||
end
|
||||
|
||||
return normalized
|
||||
end
|
||||
|
||||
function Path.expand_nonexisting(filepath, cwd)
|
||||
filepath
|
||||
-- replace ~ with home-directory.
|
||||
:gsub("^~", vim.env.HOME)
|
||||
-- replace ./ or .\ with config-directory (likely ~/.config/nvim)
|
||||
:gsub("^[.][/\\]", MYCONFIG_ROOT .. sep)
|
||||
|
||||
return Path.normalize_nonexisting(filepath, cwd)
|
||||
end
|
||||
|
||||
-- do our best at expanding a path that may or may not exist (ie. check if it
|
||||
-- exists, if so do regular expand, and guess expanded path otherwise)
|
||||
-- Not the clearest name :/
|
||||
function Path.expand_maybe_nonexisting(filepath, cwd)
|
||||
local real_expanded = Path.expand(filepath)
|
||||
if not real_expanded then
|
||||
real_expanded = Path.expand_nonexisting(filepath, cwd)
|
||||
end
|
||||
return real_expanded
|
||||
end
|
||||
|
||||
function Path.normalize_maybe_nonexisting(filepath, cwd)
|
||||
local real_normalized = Path.normalize(filepath)
|
||||
if not real_normalized then
|
||||
real_normalized = Path.normalize_nonexisting(filepath, cwd)
|
||||
end
|
||||
return real_normalized
|
||||
end
|
||||
|
||||
---Return files and directories in path as a list
|
||||
---@param root string
|
||||
---@return string[] files, string[] directories
|
||||
|
@ -128,6 +181,19 @@ function Path.extension(fname)
|
|||
return fname:match("%.([^%.]+)$")
|
||||
end
|
||||
|
||||
function Path.components(path)
|
||||
return vim.split(path, sep, {plain=true, trimempty=true})
|
||||
end
|
||||
|
||||
function Path.parent(path)
|
||||
local last_component = path:match("%" .. sep .."[^" .. sep .. "]+$")
|
||||
if not last_component then
|
||||
return nil
|
||||
end
|
||||
|
||||
return path:sub(1, #path - #last_component)
|
||||
end
|
||||
|
||||
-- returns nil if the file does not exist!
|
||||
Path.normalize = uv.fs_realpath
|
||||
|
||||
|
|
|
@ -84,19 +84,9 @@ require("luasnip.config")._setup()
|
|||
vim.api.nvim_create_augroup("_luasnip_lazy_load", {})
|
||||
vim.api.nvim_create_autocmd({ "BufWinEnter", "FileType" }, {
|
||||
callback = function(event)
|
||||
require("luasnip.loaders.from_lua")._load_lazy_loaded(event.buf)
|
||||
end,
|
||||
group = "_luasnip_lazy_load",
|
||||
})
|
||||
vim.api.nvim_create_autocmd({ "BufWinEnter", "FileType" }, {
|
||||
callback = function(event)
|
||||
require("luasnip.loaders.from_snipmate")._load_lazy_loaded(event.buf)
|
||||
end,
|
||||
group = "_luasnip_lazy_load",
|
||||
})
|
||||
vim.api.nvim_create_autocmd({ "BufWinEnter", "FileType" }, {
|
||||
callback = function(event)
|
||||
require("luasnip.loaders.from_vscode")._load_lazy_loaded(event.buf)
|
||||
require("luasnip.loaders").load_lazy_loaded(
|
||||
tonumber(event.buf)
|
||||
)
|
||||
end,
|
||||
group = "_luasnip_lazy_load",
|
||||
})
|
||||
|
|
|
@ -306,4 +306,58 @@ function M.check_global_node_refs(test_name, resolve_map, fn)
|
|||
end
|
||||
end
|
||||
|
||||
local scratchdir_path = ("%s/tests/scratch"):format(os.getenv("LUASNIP_SOURCE"))
|
||||
M.scratchdir_path = scratchdir_path
|
||||
|
||||
function M.scratch_prepare()
|
||||
-- clean (maybe a test was not able to clean up after itself) and re-create
|
||||
-- scratch-directory.
|
||||
os.execute(("rm -rf \"%s\""):format(scratchdir_path))
|
||||
os.execute(("mkdir \"%s\""):format(scratchdir_path))
|
||||
|
||||
exec_lua(([[
|
||||
local function translate_callbacks(cbs)
|
||||
local cbs_new = {}
|
||||
|
||||
for name, cb in pairs(cbs) do
|
||||
cbs_new[name] = function(full_path)
|
||||
-- +1 to start behind scratch-path, +1 to omit
|
||||
-- path-separator.
|
||||
cb(full_path:sub(%s + 2))
|
||||
end
|
||||
end
|
||||
|
||||
return cbs_new
|
||||
end
|
||||
|
||||
function scratch_tree_watcher(root_scratch_rel, depth, cbs, opts)
|
||||
return require("luasnip.loaders.fs_watchers").tree("%s/" .. root_scratch_rel, depth, translate_callbacks(cbs), opts)
|
||||
end
|
||||
|
||||
function scratch_path_watcher(root_scratch_rel, cbs, opts)
|
||||
return require("luasnip.loaders.fs_watchers").path("%s/" .. root_scratch_rel, translate_callbacks(cbs), opts)
|
||||
end
|
||||
]]):format(#scratchdir_path, scratchdir_path, scratchdir_path))
|
||||
end
|
||||
|
||||
function M.scratch_mkdir(scratch_rel)
|
||||
os.execute(("mkdir -p \"%s/%s\""):format(scratchdir_path, scratch_rel))
|
||||
end
|
||||
function M.scratch_touch(scratch_rel)
|
||||
os.execute(("touch \"%s/%s\""):format(scratchdir_path, scratch_rel))
|
||||
end
|
||||
|
||||
function M.scratch_clear()
|
||||
os.execute(("rm -rf \"%s\""):format(scratchdir_path))
|
||||
end
|
||||
|
||||
function M.scratch_edit(scratch_rel)
|
||||
-- trigger BufWritePost.
|
||||
exec(("edit %s/%s"):format(scratchdir_path, scratch_rel))
|
||||
|
||||
-- can replace with "write ++p" once we drop support for old versions.
|
||||
M.scratch_mkdir(scratch_rel:gsub("%/[^%/]+$", ""))
|
||||
exec(("write"):format(scratchdir_path, scratch_rel))
|
||||
end
|
||||
|
||||
return M
|
||||
|
|
|
@ -23,6 +23,8 @@ describe("loaders:", function()
|
|||
helpers.clear()
|
||||
ls_helpers.session_setup_luasnip({ no_snip_globals = true })
|
||||
|
||||
ls_helpers.scratch_prepare()
|
||||
|
||||
screen = Screen.new(50, 5)
|
||||
screen:attach()
|
||||
screen:set_default_attr_ids({
|
||||
|
@ -34,6 +36,8 @@ describe("loaders:", function()
|
|||
end)
|
||||
|
||||
after_each(function()
|
||||
ls_helpers.scratch_clear()
|
||||
|
||||
screen:detach()
|
||||
end)
|
||||
|
||||
|
@ -62,6 +66,10 @@ describe("loaders:", function()
|
|||
-- edit snippet-file, and check for reload.
|
||||
feed(edit_keys)
|
||||
|
||||
feed("<Esc>:w<Cr>")
|
||||
exec_lua("vim.wait(10, function() end)")
|
||||
feed("<C-O>ccall1")
|
||||
|
||||
exec_lua("ls.expand()")
|
||||
|
||||
-- undo changes to snippet-file before checking results.
|
||||
|
@ -69,7 +77,7 @@ describe("loaders:", function()
|
|||
|
||||
-- re-enter current placeholder
|
||||
exec_lua("ls.jump(-1)")
|
||||
exec_lua("ls.jump(1)")
|
||||
exec_lua("ls.jump( 1)")
|
||||
|
||||
screen:expect({
|
||||
grid = [[
|
||||
|
@ -379,27 +387,27 @@ describe("loaders:", function()
|
|||
"snipmate-reload works",
|
||||
ls_helpers.loaders["snipmate(rtp)"],
|
||||
"/tests/data/snipmate-snippets/snippets/all.snippets",
|
||||
"<Esc>2jwcereplaces<Esc>:w<Cr><C-O>ccall1"
|
||||
"<Esc>2jwcereplaces"
|
||||
)
|
||||
|
||||
reload_test(
|
||||
"vscode-reload works",
|
||||
ls_helpers.loaders["vscode(rtp)"],
|
||||
"/tests/data/vscode-snippets/snippets/all.json",
|
||||
"<Esc>4jwlcereplaces<Esc>:w<Cr><C-O>ccall1"
|
||||
"<Esc>4jwlcereplaces"
|
||||
)
|
||||
reload_test(
|
||||
"vscode-standalone-reload works",
|
||||
ls_helpers.loaders["vscode(standalone)"],
|
||||
"/tests/data/vscode-standalone.code-snippets",
|
||||
"<Esc>11jwlcereplaces<Esc>:w<Cr><C-O>ccall1"
|
||||
"<Esc>11jwlcereplaces"
|
||||
)
|
||||
|
||||
reload_test(
|
||||
"lua-reload works",
|
||||
ls_helpers.loaders["lua(rtp)"],
|
||||
"/tests/data/lua-snippets/luasnippets/all.lua",
|
||||
"<Esc>jfecereplaces<Esc>:w<Cr><C-O>ccall1"
|
||||
"<Esc>jfecereplaces"
|
||||
)
|
||||
|
||||
reload_test(
|
||||
|
@ -414,7 +422,7 @@ describe("loaders:", function()
|
|||
)
|
||||
end,
|
||||
"/tests/data/snipmate-snippets/snippets/all.snippets",
|
||||
"<Esc>2jwcereplaces<Esc>:w<Cr><C-O>ccall1"
|
||||
"<Esc>2jwcereplaces"
|
||||
)
|
||||
|
||||
reload_test(
|
||||
|
@ -429,7 +437,7 @@ describe("loaders:", function()
|
|||
)
|
||||
end,
|
||||
"/tests/data/vscode-snippets/snippets/all.json",
|
||||
"<Esc>4jwlcereplaces<Esc>:w<Cr><C-O>ccall1"
|
||||
"<Esc>4jwlcereplaces"
|
||||
)
|
||||
|
||||
reload_test(
|
||||
|
@ -444,7 +452,7 @@ describe("loaders:", function()
|
|||
)
|
||||
end,
|
||||
"/tests/data/lua-snippets/luasnippets/all.lua",
|
||||
"<Esc>jfecereplaces<Esc>:w<Cr><C-O>ccall1"
|
||||
"<Esc>jfecereplaces"
|
||||
)
|
||||
|
||||
reload_test(
|
||||
|
@ -459,7 +467,7 @@ describe("loaders:", function()
|
|||
)
|
||||
end,
|
||||
"/tests/symlinked_data/snipmate-snippets/snippets/all.snippets",
|
||||
"<Esc>2jwcereplaces<Esc>:w<Cr><C-O>ccall1"
|
||||
"<Esc>2jwcereplaces"
|
||||
)
|
||||
|
||||
reload_test(
|
||||
|
@ -473,7 +481,7 @@ describe("loaders:", function()
|
|||
)
|
||||
end,
|
||||
"/tests/symlinked_data/vscode-snippets/snippets/all.json",
|
||||
"<Esc>4jwlcereplaces<Esc>:w<Cr><C-O>ccall1"
|
||||
"<Esc>4jwlcereplaces"
|
||||
)
|
||||
|
||||
reload_test(
|
||||
|
@ -488,9 +496,22 @@ describe("loaders:", function()
|
|||
)
|
||||
end,
|
||||
"/tests/symlinked_data/lua-snippets/luasnippets/all.lua",
|
||||
"<Esc>jfecereplaces<Esc>:w<Cr><C-O>ccall1"
|
||||
"<Esc>jfecereplaces"
|
||||
)
|
||||
|
||||
---
|
||||
--- Many of the following test will do weird things, like write more than
|
||||
--- once, and wait quite often. The delays are added to allow the eventloop
|
||||
--- (I guess) to catch up, and process all the autocommands or callbacks
|
||||
--- dispatched by the writes.
|
||||
--- Multiple writes are sometimes necessary because after the first write,
|
||||
--- for some reason, a function called in BufWritePost does not read the
|
||||
--- new file contents.
|
||||
---
|
||||
--- I've never encountered either of these issues in normal usage, so I'm
|
||||
--- guessing that they are somehow caused by the testing-framework.
|
||||
---
|
||||
|
||||
it("Can load files with `code-snippets`-extension.", function()
|
||||
ls_helpers.loaders["vscode(rtp)"]()
|
||||
|
||||
|
@ -556,4 +577,259 @@ describe("loaders:", function()
|
|||
{2:-- INSERT --} |]],
|
||||
})
|
||||
end)
|
||||
|
||||
it("lazy registration works for lua.", function()
|
||||
exec_lua( ([[
|
||||
require("luasnip.loaders.from_lua").load({lazy_paths="%s"})
|
||||
]]):format(ls_helpers.scratchdir_path .. "/snippets") )
|
||||
|
||||
ls_helpers.scratch_edit("snippets/all.lua")
|
||||
|
||||
feed([[ireturn { ls.parser.parse_snippet("asdf", "qwer") }]])
|
||||
screen:expect{grid=[[
|
||||
return { ls.parser.parse_snippet("asdf", "qwer") }|
|
||||
{0:^~ }|
|
||||
{0:~ }|
|
||||
{0:~ }|
|
||||
{2:-- INSERT --} |]]}
|
||||
|
||||
feed("<Esc>:w<Cr>")
|
||||
|
||||
feed("oasdf")
|
||||
screen:expect{grid=[[
|
||||
return { ls.parser.parse_snippet("asdf", "qwer") }|
|
||||
asdf^ |
|
||||
{0:~ }|
|
||||
{0:~ }|
|
||||
{2:-- INSERT --} |]]}
|
||||
|
||||
exec_lua("ls.expand()")
|
||||
|
||||
screen:expect{grid=[[
|
||||
return { ls.parser.parse_snippet("asdf", "qwer") }|
|
||||
qwer^ |
|
||||
{0:~ }|
|
||||
{0:~ }|
|
||||
{2:-- INSERT --} |]]}
|
||||
end)
|
||||
|
||||
it("lazy registration works for snipmate.", function()
|
||||
exec_lua( ([[
|
||||
require("luasnip.loaders.from_snipmate").load({lazy_paths="%s"})
|
||||
]]):format(ls_helpers.scratchdir_path .. "/snippets") )
|
||||
|
||||
ls_helpers.scratch_edit("snippets/all.snippets")
|
||||
|
||||
feed([[isnippet asdf<Cr> qwer]])
|
||||
screen:expect{grid=[[
|
||||
snippet asdf |
|
||||
qwer^ |
|
||||
{0:~ }|
|
||||
{0:~ }|
|
||||
{2:-- INSERT --} |]]}
|
||||
|
||||
feed("<Esc>:w<Cr>")
|
||||
|
||||
feed("oasdf")
|
||||
screen:expect{grid=[[
|
||||
snippet asdf |
|
||||
qwer |
|
||||
asdf^ |
|
||||
{0:~ }|
|
||||
{2:-- INSERT --} |]]}
|
||||
|
||||
exec_lua("ls.expand()")
|
||||
|
||||
screen:expect{grid=[[
|
||||
snippet asdf |
|
||||
qwer |
|
||||
qwer^ |
|
||||
{0:~ }|
|
||||
{2:-- INSERT --} |]]}
|
||||
end)
|
||||
|
||||
it("lazy registration works for vscode (packages).", function()
|
||||
exec_lua( ([[
|
||||
require("luasnip.loaders.from_vscode").load({lazy_paths="%s"})
|
||||
]]):format(ls_helpers.scratchdir_path .. "/snippets") )
|
||||
|
||||
-- double as quick test for package.jsonc
|
||||
ls_helpers.scratch_edit("snippets/package.jsonc")
|
||||
|
||||
feed([[i{ "name": "snippets", "contributes": { "snippets": [{"language": ["all"], "path": "./all.json"}] } }]])
|
||||
feed("<Esc>:w<Cr>")
|
||||
feed("<Esc>:w<Cr>")
|
||||
feed("<Esc>:w<Cr>")
|
||||
exec_lua("vim.wait(100, function() end)")
|
||||
|
||||
screen:expect{grid=[[
|
||||
{ "name": "snippets", "contributes": { "snippets":|
|
||||
[{"language": ["all"], "path": "./all.json"}] } ^}|
|
||||
{0:~ }|
|
||||
{0:~ }|
|
||||
<scratch/snippets/package.jsonc" 1L, 101B written |]]}
|
||||
|
||||
ls_helpers.scratch_edit("snippets/all.json")
|
||||
|
||||
feed([[i{"snip": {"prefix": "asdf", "body": ["qwer"]}}]])
|
||||
feed("<Esc>:w<Cr>")
|
||||
feed("<Esc>:w<Cr>")
|
||||
feed("<Esc>:w<Cr>")
|
||||
exec_lua("vim.wait(100, function() end)")
|
||||
|
||||
screen:expect{grid=[[
|
||||
{"snip": {"prefix": "asdf", "body": ["qwer"]}^} |
|
||||
{0:~ }|
|
||||
{0:~ }|
|
||||
{0:~ }|
|
||||
<tests/scratch/snippets/all.json" 1L, 47B written |]]}
|
||||
|
||||
feed("oasdf")
|
||||
exec_lua("ls.expand()")
|
||||
|
||||
screen:expect{grid=[[
|
||||
{"snip": {"prefix": "asdf", "body": ["qwer"]}} |
|
||||
qwer^ |
|
||||
{0:~ }|
|
||||
{0:~ }|
|
||||
{2:-- INSERT --} |]]}
|
||||
end)
|
||||
|
||||
it("lazy registration works for vscode (standalone .code-snippets).", function()
|
||||
exec_lua( ([[
|
||||
require("luasnip.loaders.from_vscode").load_standalone({path = "%s", lazy = true})
|
||||
]]):format(ls_helpers.scratchdir_path .. "/vs/snips.code-snippets") )
|
||||
|
||||
ls_helpers.scratch_edit("vs/snips.code-snippets")
|
||||
|
||||
feed([[i{"snip": {"prefix": "asdf", "body": ["qwer"]}}]])
|
||||
feed("<Esc>:w<Cr>")
|
||||
feed("<Esc>:w<Cr>")
|
||||
exec_lua("vim.wait(100, function() end)")
|
||||
|
||||
screen:expect{grid=[[
|
||||
{"snip": {"prefix": "asdf", "body": ["qwer"]}^} |
|
||||
{0:~ }|
|
||||
{0:~ }|
|
||||
{0:~ }|
|
||||
</scratch/vs/snips.code-snippets" 1L, 47B written |]]}
|
||||
|
||||
feed("oasdf")
|
||||
exec_lua("ls.expand()")
|
||||
|
||||
screen:expect{grid=[[
|
||||
{"snip": {"prefix": "asdf", "body": ["qwer"]}} |
|
||||
qwer^ |
|
||||
{0:~ }|
|
||||
{0:~ }|
|
||||
{2:-- INSERT --} |]]}
|
||||
end)
|
||||
|
||||
it("lua-loader refreshes snippets when dependency is written.", function()
|
||||
ls_helpers.scratch_mkdir("snippets")
|
||||
|
||||
exec_lua( ([[
|
||||
require("luasnip.loaders.from_lua").lazy_load({paths="%s"})
|
||||
]]):format(ls_helpers.scratchdir_path .. "/snippets") )
|
||||
|
||||
-- this file will provide the body of the snippet.
|
||||
ls_helpers.scratch_edit("util/a_string.lua")
|
||||
feed([[ireturn "qwer"]])
|
||||
feed("<Esc>:w<Cr>")
|
||||
exec_lua("vim.wait(100, function() end)")
|
||||
|
||||
ls_helpers.scratch_edit("snippets/all.lua")
|
||||
-- extract into variable, so the path does no show up in screen-tests.
|
||||
exec_lua(([[dependency_file = "%s"]]):format(ls_helpers.scratchdir_path .. "/util/a_string.lua"))
|
||||
feed([[ireturn { ls.parser.parse_snippet("asdf", ls_tracked_dofile(dependency_file)) }]])
|
||||
|
||||
feed("<Esc>:w<Cr>")
|
||||
exec_lua("vim.wait(100, function() end)")
|
||||
|
||||
feed("oasdf")
|
||||
exec_lua("ls.expand()")
|
||||
screen:expect{grid=[[
|
||||
return { ls.parser.parse_snippet("asdf", ls_tracke|
|
||||
d_dofile(dependency_file)) } |
|
||||
qwer^ |
|
||||
{0:~ }|
|
||||
{2:-- INSERT --} |]]}
|
||||
|
||||
ls_helpers.scratch_edit("util/a_string.lua")
|
||||
feed([[<Esc>$bcezxcv]])
|
||||
feed("<Esc>:w<Cr>")
|
||||
feed("oasdf")
|
||||
|
||||
exec_lua("ls.expand()")
|
||||
screen:expect{grid=[[
|
||||
return "zxcv" |
|
||||
zxcv^ |
|
||||
{0:~ }|
|
||||
{0:~ }|
|
||||
{2:-- INSERT --} |]]}
|
||||
end)
|
||||
|
||||
it("snipmate-loader handles transitive extends, and updates it when changed.", function()
|
||||
-- setup filetypes A B C D, where A extends B, and C extends D, but B (initially) does not extend C.
|
||||
-- If we add this extends, snippets from D should be available in A.
|
||||
-- I think if this works, all the "simpler" cases should also work fine. Add more tests if they don't.
|
||||
|
||||
ls_helpers.scratch_mkdir("snippets")
|
||||
exec_lua( ([[
|
||||
require("luasnip.loaders.from_snipmate").lazy_load({paths="%s"})
|
||||
]]):format(ls_helpers.scratchdir_path .. "/snippets") )
|
||||
|
||||
ls_helpers.scratch_edit("snippets/A.snippets")
|
||||
feed([[iextends B<Esc>:w<Cr>]])
|
||||
ls_helpers.scratch_edit("snippets/C.snippets")
|
||||
feed([[iextends D<Esc>:w<Cr>]])
|
||||
ls_helpers.scratch_edit("snippets/D.snippets")
|
||||
feed([[isnippet DDDD<Cr> dddd<Esc>:w<Cr>]])
|
||||
|
||||
ls_helpers.scratch_edit("snippets/B.snippets")
|
||||
feed([[iextends C<Esc>:w<Cr>]])
|
||||
|
||||
exec_lua("vim.wait(100, function() end)")
|
||||
|
||||
exec("set ft=A")
|
||||
feed("oDDDD")
|
||||
exec_lua("ls.expand()")
|
||||
screen:expect{grid=[[
|
||||
extends C |
|
||||
dddd^ |
|
||||
{0:~ }|
|
||||
{0:~ }|
|
||||
{2:-- INSERT --} |]]}
|
||||
|
||||
-- make sure we know that A receives snippets from 4 files,
|
||||
-- A/B/C/D.snippets.
|
||||
-- This data is used in the edit_snippet_files-dialog, and this check is
|
||||
-- to somewhat ensure it behaves consistently (can't test it directly,
|
||||
-- unfortunately, I guess since the test-instance waits for input before
|
||||
-- proceeding, but as soon as we give it, we can't check the options :( )
|
||||
-- Anyway, this works too, for now.
|
||||
assert.are.same(4, exec_lua([[return #require("luasnip.util.table").set_to_list(require("luasnip.loaders.data").snipmate_ft_paths["A"]) ]]))
|
||||
end)
|
||||
|
||||
it("Clearing before a lazy collection is loaded will prevent it from loading.", function()
|
||||
exec_lua( ([[
|
||||
require("luasnip.loaders.from_snipmate").load({lazy_paths="%s"})
|
||||
]]):format(ls_helpers.scratchdir_path .. "/snippets") )
|
||||
exec_lua("ls.cleanup()")
|
||||
|
||||
ls_helpers.scratch_edit("snippets/all.snippets")
|
||||
feed([[isnippet DDDD<Cr> dddd<Esc>:w<Cr>]])
|
||||
-- make sure snippets are not loaded because of cleanup, and not
|
||||
-- because we don't give the test-instance time to load them :D
|
||||
exec_lua("vim.wait(100, function() end)")
|
||||
|
||||
feed("oDDDD")
|
||||
exec_lua("ls.expand()")
|
||||
screen:expect{grid=[[
|
||||
snippet DDDD |
|
||||
dddd |
|
||||
DDDD^ |
|
||||
{0:~ }|
|
||||
{2:-- INSERT --} |]]}
|
||||
end)
|
||||
end)
|
||||
|
|
277
tests/unit/fswatcher_spec.lua
Normal file
277
tests/unit/fswatcher_spec.lua
Normal file
|
@ -0,0 +1,277 @@
|
|||
local helpers = require("test.functional.helpers")()
|
||||
local exec_lua = helpers.exec_lua
|
||||
local ls_helpers = require("helpers")
|
||||
local mkdir = ls_helpers.scratch_mkdir
|
||||
local touch = ls_helpers.scratch_touch
|
||||
local edit = ls_helpers.scratch_edit
|
||||
|
||||
describe("fs_events", function()
|
||||
before_each(function()
|
||||
helpers.clear()
|
||||
|
||||
ls_helpers.session_setup_luasnip()
|
||||
ls_helpers.scratch_prepare()
|
||||
|
||||
exec_lua([[ls.log.set_loglevel("debug")]])
|
||||
end)
|
||||
after_each(function()
|
||||
ls_helpers.scratch_clear()
|
||||
end)
|
||||
|
||||
it("works with libuv-event-provider.", function()
|
||||
mkdir("a")
|
||||
mkdir("a/1")
|
||||
mkdir("a/2")
|
||||
mkdir("a/3")
|
||||
touch("a/1/b")
|
||||
mkdir("a/3/b/c")
|
||||
touch("a/3/b/d")
|
||||
|
||||
exec_lua([[
|
||||
seen_files = {
|
||||
["a/1/a"] = 0,
|
||||
["a/1/b"] = 0,
|
||||
["a/3/b/d"] = 0,
|
||||
["a/4/a/a"] = 0,
|
||||
["a/4/a/b/a"] = 0,
|
||||
}
|
||||
seen_dirs = {
|
||||
["a/1"] = 0,
|
||||
["a/2"] = 0,
|
||||
["a/3"] = 0,
|
||||
["a/3/b"] = 0,
|
||||
["a/3/b/c"] = 0,
|
||||
["a/4"] = 0,
|
||||
["a/4/a"] = 0,
|
||||
["a/4/a/b"] = 0,
|
||||
}
|
||||
changed = {
|
||||
["a/1/b"] = 0,
|
||||
["a/1/a"] = 0,
|
||||
}
|
||||
|
||||
watcher = scratch_tree_watcher("a", 3, {
|
||||
new_file = function(path)
|
||||
seen_files[path] = seen_files[path] + 1
|
||||
end,
|
||||
new_dir = function(path)
|
||||
seen_dirs[path] = seen_dirs[path] + 1
|
||||
end,
|
||||
change_file = function(path)
|
||||
changed[path] = changed[path] + 1
|
||||
end
|
||||
}, {lazy=false, fs_event_providers = {libuv = true, autocmd = false} } )
|
||||
]])
|
||||
|
||||
-- new file.
|
||||
touch("a/1/a")
|
||||
|
||||
-- wait for a/1/a to be recognized...
|
||||
exec_lua("vim.wait(10, function() end)")
|
||||
|
||||
-- ... then change and wait again.
|
||||
touch("a/1/a")
|
||||
touch("a/1/b")
|
||||
|
||||
exec_lua("vim.wait(10, function() end)")
|
||||
|
||||
mkdir("a/4/a")
|
||||
touch("a/4/a/a")
|
||||
mkdir("a/4/a/b")
|
||||
|
||||
-- this should not be reported, due to depth.
|
||||
touch("a/4/a/b/a")
|
||||
|
||||
exec_lua("vim.wait(10, function() end)")
|
||||
|
||||
assert.are.same({
|
||||
{
|
||||
-- files
|
||||
["a/1/a"] = 1,
|
||||
["a/1/b"] = 1,
|
||||
["a/3/b/d"] = 1,
|
||||
["a/4/a/a"] = 1,
|
||||
["a/4/a/b/a"] = 0
|
||||
}, {
|
||||
-- directories
|
||||
["a/1"] = 1,
|
||||
["a/2"] = 1,
|
||||
["a/3"] = 1,
|
||||
["a/3/b"] = 1,
|
||||
["a/3/b/c"] = 1,
|
||||
["a/4"] = 1,
|
||||
["a/4/a"] = 1,
|
||||
["a/4/a/b"] = 1
|
||||
}, {
|
||||
-- changed files
|
||||
|
||||
-- this is reported twice, once on create, once on the actual change.
|
||||
-- Maybe a small peculiarity to watch out for, but does not seem bad.
|
||||
["a/1/a"] = 2,
|
||||
["a/1/b"] = 1
|
||||
}},
|
||||
exec_lua([[return {seen_files, seen_dirs, changed}]]) )
|
||||
end)
|
||||
|
||||
it("works with autocmd-event-provider.", function()
|
||||
mkdir("a")
|
||||
edit("a/1/b")
|
||||
edit("a/3/b/d")
|
||||
|
||||
exec_lua([[
|
||||
seen_files = {
|
||||
["a/1/a"] = 0,
|
||||
["a/1/b"] = 0,
|
||||
["a/3/b/d"] = 0,
|
||||
["a/4/a/a"] = 0,
|
||||
["a/4/a/b/a"] = 0,
|
||||
}
|
||||
seen_dirs = {
|
||||
["a/1"] = 0,
|
||||
["a/3"] = 0,
|
||||
["a/3/b"] = 0,
|
||||
["a/4"] = 0,
|
||||
["a/4/a"] = 0,
|
||||
["a/4/a/b"] = 0,
|
||||
}
|
||||
changed = {
|
||||
["a/1/b"] = 0,
|
||||
["a/1/a"] = 0,
|
||||
["a/4/a/a"] = 0,
|
||||
["a/4/a/b/a"] = 0
|
||||
}
|
||||
|
||||
watcher = scratch_tree_watcher("a", 3, {
|
||||
new_file = function(path)
|
||||
-- os.execute("echo qwerqwerqwer " .. path .. " >> asdf")
|
||||
seen_files[path] = seen_files[path] + 1
|
||||
-- os.execute("echo qwerqwerqwes " .. path .. " >> asdf")
|
||||
end,
|
||||
new_dir = function(path)
|
||||
-- os.execute("echo qwerqwerqwer" .. path .. " >> asdf")
|
||||
seen_dirs[path] = seen_dirs[path] + 1
|
||||
end,
|
||||
change_file = function(path)
|
||||
-- os.execute("echo change " .. path .. " >> asdf")
|
||||
changed[path] = changed[path] + 1
|
||||
end
|
||||
}, {lazy=false, fs_event_providers = {libuv = false, autocmd = true} } )
|
||||
]])
|
||||
|
||||
-- new file.
|
||||
edit("a/1/a")
|
||||
|
||||
-- wait for a/1/a to be recognized...
|
||||
exec_lua("vim.wait(10, function() end)")
|
||||
|
||||
-- ... then change.
|
||||
edit("a/1/a")
|
||||
|
||||
edit("a/1/b")
|
||||
|
||||
exec_lua("vim.wait(10, function() end)")
|
||||
|
||||
edit("a/4/a/a")
|
||||
|
||||
-- this should not be reported, due to depth.
|
||||
edit("a/4/a/b/a")
|
||||
|
||||
exec_lua("vim.wait(10, function() end)")
|
||||
|
||||
assert.are.same({
|
||||
{
|
||||
["a/1/a"] = 1,
|
||||
["a/1/b"] = 1,
|
||||
["a/3/b/d"] = 1,
|
||||
["a/4/a/a"] = 1,
|
||||
["a/4/a/b/a"] = 0,
|
||||
}, {
|
||||
["a/1"] = 1,
|
||||
["a/3"] = 1,
|
||||
["a/3/b"] = 1,
|
||||
["a/4"] = 1,
|
||||
["a/4/a"] = 1,
|
||||
["a/4/a/b"] = 1,
|
||||
}, {
|
||||
["a/1/b"] = 1,
|
||||
["a/1/a"] = 1,
|
||||
["a/4/a/a"] = 1,
|
||||
-- to deep.
|
||||
["a/4/a/b/a"] = 0
|
||||
} },
|
||||
exec_lua([[return {seen_files, seen_dirs, changed}]]) )
|
||||
end)
|
||||
|
||||
it("lazy registration works with libuv.", function()
|
||||
mkdir("a")
|
||||
|
||||
exec_lua([[
|
||||
seen_files = {
|
||||
["a/a/a"] = 0,
|
||||
}
|
||||
-- won't see any directories, a/a is root.
|
||||
changed = {
|
||||
["a/a/a"] = 0,
|
||||
}
|
||||
|
||||
watcher = scratch_tree_watcher("a/a", 3, {
|
||||
new_file = function(path)
|
||||
seen_files[path] = seen_files[path] + 1
|
||||
end,
|
||||
new_dir = function(path)
|
||||
seen_dirs[path] = seen_dirs[path] + 1
|
||||
end,
|
||||
change_file = function(path)
|
||||
changed[path] = changed[path] + 1
|
||||
end
|
||||
}, {lazy=true, fs_event_providers = {libuv = true, autocmd = false} } )
|
||||
]])
|
||||
|
||||
mkdir("a/a")
|
||||
touch("a/a/a")
|
||||
|
||||
assert.are.same({
|
||||
{
|
||||
["a/a/a"] = 1,
|
||||
}, {
|
||||
["a/a/a"] = 1,
|
||||
} },
|
||||
exec_lua([[return {seen_files, changed}]]) )
|
||||
end)
|
||||
|
||||
it("lazy registration works with autocmd.", function()
|
||||
mkdir("a")
|
||||
|
||||
exec_lua([[
|
||||
seen_files = {
|
||||
["a/a/a"] = 0,
|
||||
}
|
||||
-- won't see any directories, a/a is root.
|
||||
changed = {
|
||||
["a/a/a"] = 0,
|
||||
}
|
||||
|
||||
watcher = scratch_tree_watcher("a/a", 3, {
|
||||
new_file = function(path)
|
||||
seen_files[path] = seen_files[path] + 1
|
||||
end,
|
||||
new_dir = function(path)
|
||||
seen_dirs[path] = seen_dirs[path] + 1
|
||||
end,
|
||||
change_file = function(path)
|
||||
changed[path] = changed[path] + 1
|
||||
end
|
||||
}, {lazy=true, fs_event_providers = {libuv = false, autocmd = true} } )
|
||||
]])
|
||||
|
||||
edit("a/a/a")
|
||||
|
||||
assert.are.same({
|
||||
{
|
||||
["a/a/a"] = 1,
|
||||
}, {
|
||||
["a/a/a"] = 1,
|
||||
} },
|
||||
exec_lua([[return {seen_files, changed}]]) )
|
||||
end)
|
||||
end)
|
Loading…
Reference in a new issue