182 lines
4.1 KiB
Lua
182 lines
4.1 KiB
Lua
local on_attach = require("plugins.configs.lspconfig").on_attach
|
|
local capabilities = require("plugins.configs.lspconfig").capabilities
|
|
|
|
local M = {}
|
|
|
|
M.rust_analyzer = {
|
|
on_attach = on_attach,
|
|
capabilities = capabilities,
|
|
settings = {
|
|
["rust-analyzer"] = {
|
|
check = {
|
|
command = "clippy",
|
|
},
|
|
imports = {
|
|
granularity = {
|
|
group = "module",
|
|
},
|
|
prefix = "self",
|
|
},
|
|
cargo = {
|
|
buildScripts = {
|
|
enable = true,
|
|
},
|
|
},
|
|
procMacro = {
|
|
enable = true,
|
|
},
|
|
},
|
|
},
|
|
}
|
|
|
|
M.ltex = {
|
|
on_attach = on_attach,
|
|
capabilities = capabilities,
|
|
settings = {
|
|
ltex = {
|
|
-- specific language (such as en-GB or de-DE is recommended, but I
|
|
-- want multilingual)
|
|
language = "auto",
|
|
enabled = {
|
|
"bibtex",
|
|
"tex",
|
|
"latex",
|
|
"gitcommit",
|
|
"markdown",
|
|
"org",
|
|
"restructuredtext",
|
|
"rsweave",
|
|
"quarto",
|
|
"rmd",
|
|
"context",
|
|
"html",
|
|
"xhtml",
|
|
},
|
|
-- load token and additional languagetool items later
|
|
},
|
|
},
|
|
}
|
|
|
|
M.textlsp = {
|
|
on_attach = on_attach,
|
|
capabilities = capabilities,
|
|
filetypes = {
|
|
"bibtex",
|
|
"tex",
|
|
"latex",
|
|
"gitcommit",
|
|
"markdown",
|
|
"org",
|
|
"restructuredtext",
|
|
"rsweave",
|
|
"quarto",
|
|
"rmd",
|
|
"context",
|
|
"html",
|
|
"xhtml",
|
|
},
|
|
settings = {
|
|
textLSP = {
|
|
analysers = {
|
|
languagetool = {
|
|
enabled = true,
|
|
check_text = {
|
|
on_open = true,
|
|
on_save = true,
|
|
on_change = false,
|
|
},
|
|
},
|
|
gramformer = {
|
|
-- gramformer dependency needs to be installed manually
|
|
enabled = true,
|
|
gpu = false,
|
|
check_text = {
|
|
on_open = false,
|
|
on_save = true,
|
|
on_change = false,
|
|
},
|
|
},
|
|
hf_checker = {
|
|
enabled = false,
|
|
gpu = false,
|
|
quantize = 32,
|
|
model = "pszemraj/flan-t5-large-grammar-synthesis",
|
|
min_length = 40,
|
|
check_text = {
|
|
on_open = false,
|
|
on_save = true,
|
|
on_change = false,
|
|
},
|
|
},
|
|
hf_instruction_checker = {
|
|
enabled = true,
|
|
gpu = false,
|
|
quantize = 32,
|
|
model = "grammarly/coedit-large",
|
|
min_length = 40,
|
|
check_text = {
|
|
on_open = false,
|
|
on_save = true,
|
|
on_change = false,
|
|
},
|
|
},
|
|
hf_completion = {
|
|
enabled = true,
|
|
gpu = false,
|
|
quantize = 32,
|
|
model = "bert-base-multilingual-cased",
|
|
topk = 5,
|
|
},
|
|
-- openai = {
|
|
-- enabled = false,
|
|
-- api_key = "<MY_API_KEY>",
|
|
-- check_text = {
|
|
-- on_open = false,
|
|
-- on_save = false,
|
|
-- on_change = false,
|
|
-- },
|
|
-- model = "gpt-3.5-turbo",
|
|
-- max_token = 16,
|
|
-- },
|
|
-- grammarbot = {
|
|
-- enabled = false,
|
|
-- api_key = "<MY_API_KEY>",
|
|
-- -- longer texts are split, this parameter sets the maximum number of splits per analysis
|
|
-- input_max_requests = 1,
|
|
-- check_text = {
|
|
-- on_open = false,
|
|
-- on_save = false,
|
|
-- on_change = false,
|
|
-- },
|
|
-- },
|
|
},
|
|
documents = {
|
|
-- org = {
|
|
-- org_todo_keywords = {
|
|
-- "TODO",
|
|
-- "IN_PROGRESS",
|
|
-- "DONE",
|
|
-- },
|
|
-- },
|
|
txt = {
|
|
parse = true,
|
|
},
|
|
},
|
|
},
|
|
},
|
|
}
|
|
|
|
-- load secrets
|
|
-- the secret module should just return a string with the token
|
|
local available, token = require("custom.utils").try_require "custom.secret.languagetool_token"
|
|
if available then
|
|
M.ltex.languageToolOrg = {
|
|
apiKey = token,
|
|
username = "accounts@cscherr.de",
|
|
languageToolHttpServerUrl = "https://api.languagetoolplus.com/v2/",
|
|
}
|
|
M.ltex.languageToolHttpServerUrl = "https://api.languagetoolplus.com/v2/"
|
|
end
|
|
|
|
return M
|