add ollama completion support. work-in-progress using local plugin src
plugin is a fork of cmp-ai with various fixes and all other ai backends stripped out
This commit is contained in:
parent
97fe503287
commit
fccac391ef
|
@ -35,6 +35,8 @@ return {
|
|||
'hrsh7th/cmp-nvim-lsp',
|
||||
'hrsh7th/cmp-path',
|
||||
'hrsh7th/cmp-nvim-lsp-signature-help',
|
||||
|
||||
{ dir = '~/code/cmp-ai' },
|
||||
},
|
||||
config = function()
|
||||
-- See `:help cmp`
|
||||
|
@ -43,6 +45,18 @@ return {
|
|||
local luasnip = require 'luasnip'
|
||||
luasnip.config.setup {}
|
||||
|
||||
local cmp_ai = require 'cmp_ai.config'
|
||||
cmp_ai:setup {
|
||||
provider_options = {
|
||||
base_url = os.getenv 'LLAMA_API_BASE' or 'http://localhost:8080/ollama',
|
||||
api_key = os.getenv 'LLAMA_API_KEY',
|
||||
},
|
||||
max_lines = 500,
|
||||
run_on_every_keystroke = false,
|
||||
notify = true,
|
||||
}
|
||||
vim.api.nvim_set_hl(0, 'CmpItemKindOllama', { fg = '#6cc644' })
|
||||
|
||||
cmp.setup {
|
||||
snippet = {
|
||||
expand = function(args)
|
||||
|
@ -112,6 +126,14 @@ return {
|
|||
end
|
||||
end, { 'i', 's' }),
|
||||
|
||||
['<C-x>'] = cmp.mapping.complete {
|
||||
config = {
|
||||
sources = cmp.config.sources {
|
||||
{ name = 'cmp_ai' },
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
-- For more advanced Luasnip keymaps (e.g. selecting choice nodes, expansion) see:
|
||||
-- https://github.com/L3MON4D3/LuaSnip?tab=readme-ov-file#keymaps
|
||||
},
|
||||
|
|
Loading…
Reference in a new issue