From 2a2178574a222d22138039fb1956cc87be4b9aa5 Mon Sep 17 00:00:00 2001 From: gered Date: Sun, 10 Nov 2024 18:54:50 -0500 Subject: [PATCH] switch llm.nvim config to use ollama chat completions api --- nvim/lua/plugins/llm.lua | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nvim/lua/plugins/llm.lua b/nvim/lua/plugins/llm.lua index d2239d8..a2b94f2 100644 --- a/nvim/lua/plugins/llm.lua +++ b/nvim/lua/plugins/llm.lua @@ -96,7 +96,7 @@ return { api_token = llama_api_key, model = model, backend = 'ollama', - url = llama_base_url .. '/api/generate', + url = llama_base_url .. '/v1/chat/completions', tokens_to_clear = get_stop_tokens(model), fim = get_fim_options(model), debounce_ms = 500,