]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
llama-chat : fix multiple system message for gemma, orion (#14246)
authorXuan-Son Nguyen <redacted>
Wed, 18 Jun 2025 07:58:43 +0000 (09:58 +0200)
committerGitHub <redacted>
Wed, 18 Jun 2025 07:58:43 +0000 (09:58 +0200)
src/llama-chat.cpp

index bc4fa05a74ef470796099125835dcc32b0ad52d9..0839cad3ee6db5cea3b883542cee7bb0c9a09a1e 100644 (file)
@@ -333,7 +333,7 @@ int32_t llm_chat_apply_template(
             std::string role(message->role);
             if (role == "system") {
                 // there is no system message for gemma, but we will merge it with user prompt, so nothing is broken
-                system_prompt = trim(message->content);
+                system_prompt += trim(message->content);
                 continue;
             }
             // in gemma, "assistant" is "model"
@@ -355,7 +355,7 @@ int32_t llm_chat_apply_template(
             std::string role(message->role);
             if (role == "system") {
                 // there is no system message support, we will merge it with user prompt
-                system_prompt = message->content;
+                system_prompt += message->content;
                 continue;
             } else if (role == "user") {
                 ss << "Human: ";