switch llm.nvim config to use ollama chat completions api

This commit is contained in:
Gered 2024-11-10 18:54:50 -05:00
parent 9d789aa496
commit 2a2178574a

View file

@ -96,7 +96,7 @@ return {
api_token = llama_api_key,
model = model,
backend = 'ollama',
url = llama_base_url .. '/api/generate',
url = llama_base_url .. '/v1/chat/completions',
tokens_to_clear = get_stop_tokens(model),
fim = get_fim_options(model),
debounce_ms = 500,