From: Georgi Gerganov Date: Fri, 8 Mar 2024 10:40:02 +0000 (+0200) Subject: server : fix EOS token detection with disabled cache (#5938) X-Git-Tag: upstream/0.0.4488~2125 X-Git-Url: https://git.djapps.eu/?a=commitdiff_plain;h=af37fd8b30e37ccbffdd82e6f48559e2fb7ce7dd;p=pkg%2Fggml%2Fsources%2Fllama.cpp server : fix EOS token detection with disabled cache (#5938) --- diff --git a/examples/server/server.cpp b/examples/server/server.cpp index f255ad76..1434095f 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1123,7 +1123,7 @@ struct server_context { }); } - if (!slot.cache_tokens.empty() && result.tok == llama_token_eos(model)) { + if (result.tok == llama_token_eos(model)) { slot.stopped_eos = true; slot.has_next_token = false;