neovim-confs/lua/custom/plugins/configs/lspsettings.lua

203 lines
4.9 KiB
Lua

local M = {}
M.basedpyright = {
root_dir = vim.loop.cwd,
flags = { debounce_text_changes = 300 },
single_file_support = true,
settings = {
python = {
analysis = {
autoSearchPaths = true,
diagnosticMode = "openFilesOnly",
useLibraryCodeForTypes = true,
typeCheckingMode = "basic",
},
},
},
}
M.rust_analyzer = {
settings = {
["rust-analyzer"] = {
check = {
command = "clippy",
},
imports = {
granularity = {
group = "module",
},
prefix = "self",
},
cargo = {
buildScripts = {
enable = true,
},
},
procMacro = {
enable = true,
},
},
},
}
M.ltex = {
use_spellfile = false,
settings = {
ltex = {
checkFrequency = "save", -- shut up while i'm just editing, see <https://github.com/folke/noice.nvim/issues/166>
-- specific language (such as en-GB or de-DE is recommended, but I
-- want multilingual)
language = "auto",
enabled = {
"bibtex",
"tex",
"latex",
"gitcommit",
"markdown",
"org",
"restructuredtext",
"rsweave",
"quarto",
"rmd",
"context",
-- "html",
-- "xhtml",
},
additionalRules = {
enablePickyRules = true,
-- thats cool, but often adds diagnostics in
-- places where a german might confuse words that are similar
-- between english and german REGARDLESS of context. I seem to use the
-- english words only in the proper contexts, so leaving this on
-- just adds annoying hints like 'Hinweis: "list/NN.*" (English) bedeutet "Liste",
-- "Verzeichnis" (German). Meinten Sie vielleicht 'cunning', 'trick'?'
-- everytime I use the word "list". I liked that this makes the hints be
-- in german regardless of the language I'm working in through...
--motherTongue = "de",
},
-- load token and additional languagetool items later
},
},
}
M.textlsp = {
filetypes = {
"bibtex",
"tex",
"latex",
"gitcommit",
"markdown",
"org",
"restructuredtext",
"rsweave",
"quarto",
"rmd",
"context",
"html",
"xhtml",
},
settings = {
textLSP = {
analysers = {
languagetool = {
enabled = true,
check_text = {
on_open = true,
on_save = true,
on_change = false,
},
},
gramformer = {
-- gramformer dependency needs to be installed manually
enabled = true,
gpu = false,
check_text = {
on_open = false,
on_save = true,
on_change = false,
},
},
hf_checker = {
enabled = false,
gpu = false,
quantize = 32,
model = "pszemraj/flan-t5-large-grammar-synthesis",
min_length = 40,
check_text = {
on_open = false,
on_save = true,
on_change = false,
},
},
hf_instruction_checker = {
enabled = true,
gpu = false,
quantize = 32,
model = "grammarly/coedit-large",
min_length = 40,
check_text = {
on_open = false,
on_save = true,
on_change = false,
},
},
hf_completion = {
enabled = true,
gpu = false,
quantize = 32,
model = "bert-base-multilingual-cased",
topk = 5,
},
-- openai = {
-- enabled = false,
-- api_key = "<MY_API_KEY>",
-- check_text = {
-- on_open = false,
-- on_save = false,
-- on_change = false,
-- },
-- model = "gpt-3.5-turbo",
-- max_token = 16,
-- },
-- grammarbot = {
-- enabled = false,
-- api_key = "<MY_API_KEY>",
-- -- longer texts are split, this parameter sets the maximum number of splits per analysis
-- input_max_requests = 1,
-- check_text = {
-- on_open = false,
-- on_save = false,
-- on_change = false,
-- },
-- },
},
documents = {
-- org = {
-- org_todo_keywords = {
-- "TODO",
-- "IN_PROGRESS",
-- "DONE",
-- },
-- },
txt = {
parse = true,
},
},
},
},
}
-- load secrets
-- the secret module should just return a string with the token
local available, token = require("custom.utils").try_require "custom.secret.languagetool_token"
if available then
M.ltex.languageToolOrg = {
apiKey = token,
username = "accounts@cscherr.de",
languageToolHttpServerUrl = "https://api.languagetoolplus.com/v2/",
}
M.ltex.languageToolHttpServerUrl = "https://api.languagetoolplus.com/v2/"
end
return M