From fcf4d60eeea12b3b25877b09aae2c3e6a38b5bbe Mon Sep 17 00:00:00 2001 From: jmorganca Date: Tue, 30 Apr 2024 17:38:44 -0400 Subject: [PATCH] llm: add back check for empty token cache --- llm/ext_server/server.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llm/ext_server/server.cpp b/llm/ext_server/server.cpp index 52ac653f..3448bcc5 100644 --- a/llm/ext_server/server.cpp +++ b/llm/ext_server/server.cpp @@ -1032,7 +1032,7 @@ struct llama_server_context slot.has_next_token = false; } - if (llama_token_is_eog(model, result.tok)) + if (!slot.cache_tokens.empty() && llama_token_is_eog(model, result.tok)) { slot.stopped_eos = true; slot.has_next_token = false;