update minuet-ai config

use a fork to fix compatibility with ollama + openweb ui endpoints

update the minuet-ai config to use the updated prompt string and
response parsing customization capabilities that this fork brings in,
which is necessary to use the non-legacy api completions endpoint
This commit is contained in:
Gered 2024-10-19 15:48:38 -04:00
parent 7964e15b8b
commit eb2f19f576
2 changed files with 34 additions and 3 deletions

View file

@ -22,7 +22,7 @@
"mason.nvim": { "branch": "main", "commit": "e2f7f9044ec30067bc11800a9e266664b88cda22" },
"menu": { "branch": "main", "commit": "ea606f6ab2430db0aece8075e62c14132b815ae1" },
"mini.nvim": { "branch": "main", "commit": "e52ac74bd4e9c0ce6a182ee551eb099236b5a89d" },
"minuet-ai.nvim": { "branch": "main", "commit": "bd5a7ae2bda3a4f57e8a94e3229f41647c77c69e" },
"minuet-ai": { "branch": "openai_fim_fn_customizations", "commit": "cec6383aeacb5087134204412bd980f368972c53" },
"nvim-ansible": { "branch": "main", "commit": "9c3b4a771b8c8d7b4f2171466464d978cb3846f7" },
"nvim-autopairs": { "branch": "master", "commit": "ee297f215e95a60b01fde33275cc3c820eddeebe" },
"nvim-cmp": { "branch": "main", "commit": "ae644feb7b67bf1ce4260c231d1d4300b19c6f30" },

View file

@ -37,7 +37,10 @@ return {
'hrsh7th/cmp-nvim-lsp-signature-help',
{
'milanglacier/minuet-ai.nvim',
-- 'milanglacier/minuet-ai.nvim',
-- dir = '~/code/minuet-ai.nvim',
url = 'ssh://git@code.blarg.ca:2250/gered/minuet-ai.git',
branch = 'openai_fim_fn_customizations',
config = function()
local function get_stop_tokens(model)
if model:match '^codellama' then
@ -53,6 +56,20 @@ return {
end
end
local function get_fim_prompt(model, prefix, suffix)
if model:match '^codellama' then
return '<PRE> ' .. prefix .. ' <SUF>' .. suffix .. ' <MID>'
elseif model:match '^qwen' then
return '<|fim_prefix|>' .. prefix .. '<|fim_suffix|>' .. suffix .. '<|fim_middle|>'
elseif model:match '^starcoder' then
return '<fim_prefix>' .. prefix .. '<fim_suffix>' .. suffix .. '<fim_middle>'
elseif model:match '^codestral' then
return '[SUFFIX]' .. suffix .. '[PREFIX]' .. prefix
elseif model:match '^deepseek-coder' then
return '<fim▁begin>' .. prefix .. '<fim▁hole>' .. suffix .. '<fim▁end>'
end
end
local llama_base_url = os.getenv 'LLAMA_API_BASE' or 'http://localhost:11434'
-- local model = 'codellama:7b-code'
local model = 'codellama:13b-code'
@ -75,7 +92,7 @@ return {
provider_options = {
openai_fim_compatible = {
model = model,
end_point = llama_base_url .. '/v1/completions',
end_point = llama_base_url .. '/v1/chat/completions',
api_key = 'LLAMA_API_KEY',
name = 'Ollama',
stream = false,
@ -85,6 +102,20 @@ return {
temperature = 0.2,
n = 1,
},
prompt_fn = function(data, options, prefix, suffix)
data.messages = {
{
role = 'user',
content = get_fim_prompt(model, prefix, suffix),
},
}
--data.prompt = prefix
--data.suffix = suffix
end,
get_text_fn = function(json)
return json.choices[1].message.content
--return json.choices[1].text
end,
},
},
}