From: Xuan-Son Nguyen Date: Wed, 18 Jun 2025 07:58:43 +0000 (+0200) Subject: llama-chat : fix multiple system message for gemma, orion (#14246) X-Git-Url: https://git.djapps.eu/?a=commitdiff_plain;h=95402553a5effc61ddc9e29c7bcb56f71311dd4a;p=pkg%2Fggml%2Fsources%2Fllama.cpp llama-chat : fix multiple system message for gemma, orion (#14246) --- diff --git a/src/llama-chat.cpp b/src/llama-chat.cpp index bc4fa05a..0839cad3 100644 --- a/src/llama-chat.cpp +++ b/src/llama-chat.cpp @@ -333,7 +333,7 @@ int32_t llm_chat_apply_template( std::string role(message->role); if (role == "system") { // there is no system message for gemma, but we will merge it with user prompt, so nothing is broken - system_prompt = trim(message->content); + system_prompt += trim(message->content); continue; } // in gemma, "assistant" is "model" @@ -355,7 +355,7 @@ int32_t llm_chat_apply_template( std::string role(message->role); if (role == "system") { // there is no system message support, we will merge it with user prompt - system_prompt = message->content; + system_prompt += message->content; continue; } else if (role == "user") { ss << "Human: ";