add prompt & suffix functions to better generate ollama code fim prompts

most fim-capable models use a "template" that will auto-generate the
correct prompt text for the model using the `prompt` and `suffix` values
in the request data blob, if the `suffix` key is found
This commit is contained in:
Gered 2024-10-02 22:53:21 -04:00
parent fccac391ef
commit c0e1ced17b

View file

@ -50,6 +50,15 @@ return {
provider_options = {
base_url = os.getenv 'LLAMA_API_BASE' or 'http://localhost:8080/ollama',
api_key = os.getenv 'LLAMA_API_KEY',
prompt = function(lines_before, lines_after)
return lines_before
-- return '<PRE> ' .. lines_before .. ' <SUF>' .. lines_after .. ' <MID>'
-- return '<fim_prefix>' .. lines_before .. '<fim_suffix>' .. lines_after .. '<fim_middle>'
-- return '<|fim_prefix|>' .. lines_before .. '<|fim_suffix|>' .. lines_after .. '<|fim_middle|>'
end,
suffix = function(lines_after)
return lines_after
end,
},
max_lines = 500,
run_on_every_keystroke = false,