replace cmp-ai with minuet-ai
This commit is contained in:
parent
5fc4f2d961
commit
097587c8e0
|
@ -23,6 +23,7 @@
|
|||
"mini.nvim": { "branch": "main", "commit": "8413efde2546be4dec9ea424bc6af346a14dffbe" },
|
||||
"neo-tree.nvim": { "branch": "main", "commit": "8c75e8a2949cd6cd35525799200a8d34471ee9eb" },
|
||||
"nui.nvim": { "branch": "main", "commit": "61574ce6e60c815b0a0c4b5655b8486ba58089a1" },
|
||||
"minuet-ai.nvim": { "branch": "main", "commit": "bd5a7ae2bda3a4f57e8a94e3229f41647c77c69e" },
|
||||
"nvim-ansible": { "branch": "main", "commit": "9c3b4a771b8c8d7b4f2171466464d978cb3846f7" },
|
||||
"nvim-autopairs": { "branch": "master", "commit": "fd2badc24e675f947162a16c124d395bde80dbd6" },
|
||||
"nvim-cmp": { "branch": "main", "commit": "ae644feb7b67bf1ce4260c231d1d4300b19c6f30" },
|
||||
|
|
|
@ -36,7 +36,62 @@ return {
|
|||
'hrsh7th/cmp-path',
|
||||
'hrsh7th/cmp-nvim-lsp-signature-help',
|
||||
|
||||
{ dir = '~/code/cmp-ai' },
|
||||
{
|
||||
'milanglacier/minuet-ai.nvim',
|
||||
config = function()
|
||||
local function get_stop_tokens(model)
|
||||
if model:match '^codellama' then
|
||||
return { '\n\n', '<EOT>' }
|
||||
elseif model:match '^qwen' then
|
||||
return { '<|endoftext|>' }
|
||||
elseif model:match '^starcoder' then
|
||||
return { '<file_sep>' }
|
||||
elseif model:match '^codegemma' then
|
||||
return { '<|fim_prefix|>', '<|fim_suffix|>', '<|fim_middle|>', '<|file_separator|>' }
|
||||
elseif model:match '^codestral' then
|
||||
return { '[INST]', '[/INST]', '[PREFIX]', '[MIDDLE]', '[SUFFIX]' }
|
||||
elseif model:match '^deepseek-coder' then
|
||||
return nil
|
||||
end
|
||||
end
|
||||
|
||||
local llama_base_url = os.getenv 'LLAMA_API_BASE' or 'http://localhost:11434'
|
||||
-- local model = 'codellama:7b-code'
|
||||
-- local model = 'codellama:13b-code'
|
||||
-- local model = 'qwen2.5-coder:7b-base'
|
||||
-- local model = 'starcoder2:7b'
|
||||
-- local model = 'codestral:22b'
|
||||
-- local model = 'codegemma:7b-code'
|
||||
local model = 'deepseek-coder-v2:16b'
|
||||
|
||||
require('minuet').setup {
|
||||
enabled = true,
|
||||
provider = 'openai_fim_compatible',
|
||||
context_window = 12800,
|
||||
context_ratio = 0.75,
|
||||
throttle = 1000,
|
||||
debounce = 400,
|
||||
notify = 'verbose',
|
||||
request_timeout = 30,
|
||||
n_completions = 3,
|
||||
provider_options = {
|
||||
openai_fim_compatible = {
|
||||
model = model,
|
||||
end_point = llama_base_url .. '/v1/completions',
|
||||
api_key = 'LLAMA_API_KEY',
|
||||
name = 'Ollama',
|
||||
stream = false,
|
||||
optional = {
|
||||
stop = get_stop_tokens(model),
|
||||
max_tokens = 256,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
end,
|
||||
},
|
||||
|
||||
-- { dir = '~/code/cmp-ai' },
|
||||
},
|
||||
config = function()
|
||||
-- See `:help cmp`
|
||||
|
@ -45,39 +100,39 @@ return {
|
|||
local luasnip = require 'luasnip'
|
||||
luasnip.config.setup {}
|
||||
|
||||
local cmp_ai = require 'cmp_ai.config'
|
||||
cmp_ai:setup {
|
||||
provider_options = {
|
||||
model = 'codellama:13b-code',
|
||||
-- model = 'codellama:7b-code',
|
||||
-- model = 'qwen2.5-coder:7b-base',
|
||||
-- model = 'starcoder2:7b',
|
||||
-- model = 'codestral:22b',
|
||||
-- model = 'codegemma:7b-code',
|
||||
base_url = os.getenv 'LLAMA_API_BASE' or 'http://localhost:8080/ollama',
|
||||
api_key = os.getenv 'LLAMA_API_KEY',
|
||||
options = {
|
||||
stop = { '\n\n', '<EOT>' }, -- codellama
|
||||
-- stop = { '<|endoftext|>' }, -- qwen-coder
|
||||
-- stop = { '<file_sep>' }, -- starcoder2
|
||||
-- stop = { '[INST]', '[/INST]', '[PREFIX]', '[MIDDLE]', '[SUFFIX]' }, -- codeestral
|
||||
-- stop = { '<|fim_prefix|>', '<|fim_suffix|>', '<|fim_middle|>', '<|file_separator|>' }, -- codegemma
|
||||
},
|
||||
prompt = function(lines_before, lines_after)
|
||||
return lines_before
|
||||
-- return '<PRE> ' .. lines_before .. ' <SUF>' .. lines_after .. ' <MID>'
|
||||
-- return '<fim_prefix>' .. lines_before .. '<fim_suffix>' .. lines_after .. '<fim_middle>'
|
||||
-- return '<|fim_prefix|>' .. lines_before .. '<|fim_suffix|>' .. lines_after .. '<|fim_middle|>'
|
||||
end,
|
||||
suffix = function(lines_after)
|
||||
return lines_after
|
||||
end,
|
||||
},
|
||||
max_lines = 500,
|
||||
run_on_every_keystroke = false,
|
||||
notify = true,
|
||||
}
|
||||
vim.api.nvim_set_hl(0, 'CmpItemKindOllama', { fg = '#6cc644' })
|
||||
-- local cmp_ai = require 'cmp_ai.config'
|
||||
-- cmp_ai:setup {
|
||||
-- provider_options = {
|
||||
-- model = 'codellama:13b-code',
|
||||
-- -- model = 'codellama:7b-code',
|
||||
-- -- model = 'qwen2.5-coder:7b-base',
|
||||
-- -- model = 'starcoder2:7b',
|
||||
-- -- model = 'codestral:22b',
|
||||
-- -- model = 'codegemma:7b-code',
|
||||
-- base_url = os.getenv 'LLAMA_API_BASE' or 'http://localhost:8080/ollama',
|
||||
-- api_key = os.getenv 'LLAMA_API_KEY',
|
||||
-- options = {
|
||||
-- stop = { '\n\n', '<EOT>' }, -- codellama
|
||||
-- -- stop = { '<|endoftext|>' }, -- qwen-coder
|
||||
-- -- stop = { '<file_sep>' }, -- starcoder2
|
||||
-- -- stop = { '[INST]', '[/INST]', '[PREFIX]', '[MIDDLE]', '[SUFFIX]' }, -- codeestral
|
||||
-- -- stop = { '<|fim_prefix|>', '<|fim_suffix|>', '<|fim_middle|>', '<|file_separator|>' }, -- codegemma
|
||||
-- },
|
||||
-- prompt = function(lines_before, lines_after)
|
||||
-- return lines_before
|
||||
-- -- return '<PRE> ' .. lines_before .. ' <SUF>' .. lines_after .. ' <MID>'
|
||||
-- -- return '<fim_prefix>' .. lines_before .. '<fim_suffix>' .. lines_after .. '<fim_middle>'
|
||||
-- -- return '<|fim_prefix|>' .. lines_before .. '<|fim_suffix|>' .. lines_after .. '<|fim_middle|>'
|
||||
-- end,
|
||||
-- suffix = function(lines_after)
|
||||
-- return lines_after
|
||||
-- end,
|
||||
-- },
|
||||
-- max_lines = 500,
|
||||
-- run_on_every_keystroke = false,
|
||||
-- notify = true,
|
||||
-- }
|
||||
-- vim.api.nvim_set_hl(0, 'CmpItemKindOllama', { fg = '#6cc644' })
|
||||
|
||||
cmp.setup {
|
||||
snippet = {
|
||||
|
@ -148,13 +203,16 @@ return {
|
|||
end
|
||||
end, { 'i', 's' }),
|
||||
|
||||
['<C-x>'] = cmp.mapping.complete {
|
||||
config = {
|
||||
sources = cmp.config.sources {
|
||||
{ name = 'cmp_ai' },
|
||||
},
|
||||
},
|
||||
},
|
||||
-- explicit keybinding for minuet-ai completions
|
||||
['<C-x>'] = require('minuet').make_cmp_map(),
|
||||
|
||||
-- ['<C-x>'] = cmp.mapping.complete {
|
||||
-- config = {
|
||||
-- sources = cmp.config.sources {
|
||||
-- { name = 'cmp_ai' },
|
||||
-- },
|
||||
-- },
|
||||
-- },
|
||||
|
||||
-- For more advanced Luasnip keymaps (e.g. selecting choice nodes, expansion) see:
|
||||
-- https://github.com/L3MON4D3/LuaSnip?tab=readme-ov-file#keymaps
|
||||
|
|
Loading…
Reference in a new issue