add llm.nvim plugin

This commit is contained in:
Gered 2024-11-10 14:27:06 -05:00
parent b14f61daaa
commit e1a2c551ed
2 changed files with 141 additions and 0 deletions

View file

@ -16,6 +16,7 @@
"indent-blankline.nvim": { "branch": "master", "commit": "e7a4442e055ec953311e77791546238d1eaae507" },
"lazy.nvim": { "branch": "main", "commit": "1159bdccd8910a0fd0914b24d6c3d186689023d9" },
"lazydev.nvim": { "branch": "main", "commit": "491452cf1ca6f029e90ad0d0368848fac717c6d2" },
"llm.nvim": { "branch": "main", "commit": "9832a149bdcf0709433ca9c2c3a1c87460e98d13" },
"lualine.nvim": { "branch": "master", "commit": "b431d228b7bbcdaea818bdc3e25b8cdbe861f056" },
"luvit-meta": { "branch": "main", "commit": "ce76f6f6cdc9201523a5875a4471dcfe0186eb60" },
"mason-lspconfig.nvim": { "branch": "main", "commit": "25c11854aa25558ee6c03432edfa0df0217324be" },

140
nvim/lua/plugins/llm.lua Normal file
View file

@ -0,0 +1,140 @@
return {
{
'huggingface/llm.nvim',
config = function()
local function get_stop_tokens(model)
if model:match '^codellama' then
return { '<PRE>', '<SUF>', '<MID>', '<EOT>' }
elseif model:match '^qwen' then
return {
'<|endoftext|>',
'<|fim_prefix|>',
'<|fim_middle|>',
'<|fim_suffix|>',
'<|fim_pad|>',
'<|repo_name|>',
'<|file_sep|>',
'<|im_start|>',
'<|im_end|>',
}
elseif model:match '^starcoder' then
return { '<fim_prefix>', '<fim_suffix>', '<fim_middle>', '<file_sep>', '<|endoftext|>' }
elseif model:match '^codestral' then
return { '[INST]', '[/INST]', '[PREFIX]', '[MIDDLE]', '[SUFFIX]' }
elseif model:match '^deepseek%-coder' then
return { '<fim▁begin>', '<fim▁hole>', '<fim▁end>', '<end▁of▁sentence>' }
elseif model:match '^granite%-code' then
return { 'System:', 'Question:', 'Answer:' }
end
end
local function get_fim_options(model)
if model:match '^codellama' then
--return '<PRE> ' .. prefix .. ' <SUF>' .. suffix .. ' <MID>'
return {
enabled = true,
prefix = '<PRE> ',
suffix = ' <SUF>',
middle = ' <MID>',
}
elseif model:match '^qwen' then
-- return '<|fim_prefix|>' .. prefix .. '<|fim_suffix|>' .. suffix .. '<|fim_middle|>'
return {
enabled = true,
prefix = '<|fim_prefix|>',
suffix = '<|fim_suffix|>',
middle = '<|fim_middle|>',
}
elseif model:match '^starcoder' then
-- return '<fim_prefix>' .. prefix .. '<fim_suffix>' .. suffix .. '<fim_middle>'
return {
enabled = true,
prefix = '<fim_prefix>',
suffix = '<fim_suffix>',
middle = '<fim_middle>',
}
elseif model:match '^codestral' then
-- return '[SUFFIX]' .. suffix .. '[PREFIX]' .. prefix
return {
enabled = true,
prefix = '[PREFIX]',
suffix = '[SUFFIX]',
middle = '',
}
elseif model:match '^deepseek%-coder' then
-- return '<fim▁begin>' .. prefix .. '<fim▁hole>' .. suffix .. '<fim▁end>'
return {
enabled = true,
prefix = '<fim▁begin>',
suffix = '<fim▁hole>',
middle = '<fim▁end>',
}
elseif model:match '^granite%-code' then
-- return '<fim_prefix> ' .. prefix .. '<fim_suffix> ' .. suffix .. '<fim_middle>'
return {
enabled = true,
prefix = '<fim_prefix> ',
suffix = '<fim_suffix> ',
middle = '<fim_middle>',
}
end
end
local llama_base_url = os.getenv 'LLAMA_API_BASE' or 'http://localhost:11434'
local llama_api_key = os.getenv 'LLAMA_API_KEY'
local model = 'codellama:13b-code-q4_K_M'
-- local model = 'qwen2.5-coder:7b-base-q4_K_M'
-- local model = 'starcoder2:7b-q4_K_M'
-- local model = 'codestral:22b-v0.1-q4_K_M'
-- local model = 'deepseek-coder-v2:16b-lite-base-q4_K_M-fixed'
-- local model = 'granite-code:8b-base-q4_K_M'
require('llm').setup {
enable_suggestions_on_startup = false,
accept_keymap = '<Tab>',
dismiss_keymap = '<S-Tab>',
api_token = llama_api_key,
model = model,
backend = 'ollama',
url = llama_base_url .. '/api/generate',
tokens_to_clear = get_stop_tokens(model),
fim = get_fim_options(model),
debounce_ms = 500,
request_body = {
temperature = 0.2,
n = 1,
-- max_tokens = 256,
-- stop = get_stop_tokens(model),
},
lsp = {
-- NOTE: custom fork of llm-ls is required to use api keys with ollama (e.g. via open webui)
-- to install:
-- git clone https://code.blarg.ca/gered/llm-ls.git
-- cd llm-ls/
-- cargo install --path ./crates/llm-ls/ --locked
bin_path = os.getenv 'HOME' .. '/.cargo/bin/llm-ls',
-- host = '127.0.0.1',
-- port = 12345,
-- cmd_env = { LLM_LOG_LEVEL = 'INFO' },
},
}
local function map(mode, lhs, rhs, desc)
local opts = { silent = true, noremap = true, desc = desc or '' }
vim.keymap.set(mode, lhs, rhs, opts)
end
map('i', '<C-x>', '<Cmd>LLMSuggestion<CR>', 'Request LLM suggestion')
map('n', '<leader>ta', '<Cmd>LLMToggleAutoSuggest<CR>', 'Toggle: LLM [A]uto Suggestions')
map('i', '<Tab>', function()
local llm = require 'llm.completion'
if llm.shown_suggestion ~= nil then
llm.complete()
else
local keys = vim.api.nvim_replace_termcodes('<Tab>', true, false, true)
vim.api.nvim_feedkeys(keys, 'n', false)
end
end)
end,
},
}