diff --git a/nvim/lazy-lock.json b/nvim/lazy-lock.json index 1016892..b0fdb84 100644 --- a/nvim/lazy-lock.json +++ b/nvim/lazy-lock.json @@ -23,6 +23,7 @@ "mini.nvim": { "branch": "main", "commit": "8413efde2546be4dec9ea424bc6af346a14dffbe" }, "neo-tree.nvim": { "branch": "main", "commit": "8c75e8a2949cd6cd35525799200a8d34471ee9eb" }, "nui.nvim": { "branch": "main", "commit": "61574ce6e60c815b0a0c4b5655b8486ba58089a1" }, + "minuet-ai.nvim": { "branch": "main", "commit": "bd5a7ae2bda3a4f57e8a94e3229f41647c77c69e" }, "nvim-ansible": { "branch": "main", "commit": "9c3b4a771b8c8d7b4f2171466464d978cb3846f7" }, "nvim-autopairs": { "branch": "master", "commit": "fd2badc24e675f947162a16c124d395bde80dbd6" }, "nvim-cmp": { "branch": "main", "commit": "ae644feb7b67bf1ce4260c231d1d4300b19c6f30" }, diff --git a/nvim/lua/plugins/nvim-cmp.lua b/nvim/lua/plugins/nvim-cmp.lua index 5436dfe..2f58af9 100644 --- a/nvim/lua/plugins/nvim-cmp.lua +++ b/nvim/lua/plugins/nvim-cmp.lua @@ -36,7 +36,62 @@ return { 'hrsh7th/cmp-path', 'hrsh7th/cmp-nvim-lsp-signature-help', - { dir = '~/code/cmp-ai' }, + { + 'milanglacier/minuet-ai.nvim', + config = function() + local function get_stop_tokens(model) + if model:match '^codellama' then + return { '\n\n', '' } + elseif model:match '^qwen' then + return { '<|endoftext|>' } + elseif model:match '^starcoder' then + return { '' } + elseif model:match '^codegemma' then + return { '<|fim_prefix|>', '<|fim_suffix|>', '<|fim_middle|>', '<|file_separator|>' } + elseif model:match '^codestral' then + return { '[INST]', '[/INST]', '[PREFIX]', '[MIDDLE]', '[SUFFIX]' } + elseif model:match '^deepseek-coder' then + return nil + end + end + + local llama_base_url = os.getenv 'LLAMA_API_BASE' or 'http://localhost:11434' + -- local model = 'codellama:7b-code' + -- local model = 'codellama:13b-code' + -- local model = 'qwen2.5-coder:7b-base' + -- local model = 'starcoder2:7b' + -- local model = 'codestral:22b' + -- local model = 'codegemma:7b-code' + local model = 'deepseek-coder-v2:16b' + + require('minuet').setup { + enabled = true, + provider = 'openai_fim_compatible', + context_window = 12800, + context_ratio = 0.75, + throttle = 1000, + debounce = 400, + notify = 'verbose', + request_timeout = 30, + n_completions = 3, + provider_options = { + openai_fim_compatible = { + model = model, + end_point = llama_base_url .. '/v1/completions', + api_key = 'LLAMA_API_KEY', + name = 'Ollama', + stream = false, + optional = { + stop = get_stop_tokens(model), + max_tokens = 256, + }, + }, + }, + } + end, + }, + + -- { dir = '~/code/cmp-ai' }, }, config = function() -- See `:help cmp` @@ -45,39 +100,39 @@ return { local luasnip = require 'luasnip' luasnip.config.setup {} - local cmp_ai = require 'cmp_ai.config' - cmp_ai:setup { - provider_options = { - model = 'codellama:13b-code', - -- model = 'codellama:7b-code', - -- model = 'qwen2.5-coder:7b-base', - -- model = 'starcoder2:7b', - -- model = 'codestral:22b', - -- model = 'codegemma:7b-code', - base_url = os.getenv 'LLAMA_API_BASE' or 'http://localhost:8080/ollama', - api_key = os.getenv 'LLAMA_API_KEY', - options = { - stop = { '\n\n', '' }, -- codellama - -- stop = { '<|endoftext|>' }, -- qwen-coder - -- stop = { '' }, -- starcoder2 - -- stop = { '[INST]', '[/INST]', '[PREFIX]', '[MIDDLE]', '[SUFFIX]' }, -- codeestral - -- stop = { '<|fim_prefix|>', '<|fim_suffix|>', '<|fim_middle|>', '<|file_separator|>' }, -- codegemma - }, - prompt = function(lines_before, lines_after) - return lines_before - -- return '
 ' .. lines_before .. ' ' .. lines_after .. ' '
-            -- return '' .. lines_before .. '' .. lines_after .. ''
-            -- return '<|fim_prefix|>' .. lines_before .. '<|fim_suffix|>' .. lines_after .. '<|fim_middle|>'
-          end,
-          suffix = function(lines_after)
-            return lines_after
-          end,
-        },
-        max_lines = 500,
-        run_on_every_keystroke = false,
-        notify = true,
-      }
-      vim.api.nvim_set_hl(0, 'CmpItemKindOllama', { fg = '#6cc644' })
+      -- local cmp_ai = require 'cmp_ai.config'
+      -- cmp_ai:setup {
+      --   provider_options = {
+      --     model = 'codellama:13b-code',
+      --     -- model = 'codellama:7b-code',
+      --     -- model = 'qwen2.5-coder:7b-base',
+      --     -- model = 'starcoder2:7b',
+      --     -- model = 'codestral:22b',
+      --     -- model = 'codegemma:7b-code',
+      --     base_url = os.getenv 'LLAMA_API_BASE' or 'http://localhost:8080/ollama',
+      --     api_key = os.getenv 'LLAMA_API_KEY',
+      --     options = {
+      --       stop = { '\n\n', '' }, -- codellama
+      --       -- stop = { '<|endoftext|>' }, -- qwen-coder
+      --       -- stop = { '' }, -- starcoder2
+      --       -- stop = { '[INST]', '[/INST]', '[PREFIX]', '[MIDDLE]', '[SUFFIX]' }, -- codeestral
+      --       -- stop = { '<|fim_prefix|>', '<|fim_suffix|>', '<|fim_middle|>', '<|file_separator|>' }, -- codegemma
+      --     },
+      --     prompt = function(lines_before, lines_after)
+      --       return lines_before
+      --       -- return '
 ' .. lines_before .. ' ' .. lines_after .. ' '
+      --       -- return '' .. lines_before .. '' .. lines_after .. ''
+      --       -- return '<|fim_prefix|>' .. lines_before .. '<|fim_suffix|>' .. lines_after .. '<|fim_middle|>'
+      --     end,
+      --     suffix = function(lines_after)
+      --       return lines_after
+      --     end,
+      --   },
+      --   max_lines = 500,
+      --   run_on_every_keystroke = false,
+      --   notify = true,
+      -- }
+      -- vim.api.nvim_set_hl(0, 'CmpItemKindOllama', { fg = '#6cc644' })
 
       cmp.setup {
         snippet = {
@@ -148,13 +203,16 @@ return {
             end
           end, { 'i', 's' }),
 
-          [''] = cmp.mapping.complete {
-            config = {
-              sources = cmp.config.sources {
-                { name = 'cmp_ai' },
-              },
-            },
-          },
+          -- explicit keybinding for minuet-ai completions
+          [''] = require('minuet').make_cmp_map(),
+
+          -- [''] = cmp.mapping.complete {
+          --   config = {
+          --     sources = cmp.config.sources {
+          --       { name = 'cmp_ai' },
+          --     },
+          --   },
+          -- },
 
           -- For more advanced Luasnip keymaps (e.g. selecting choice nodes, expansion) see:
           --    https://github.com/L3MON4D3/LuaSnip?tab=readme-ov-file#keymaps