]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
chat : fix hunyuan auto-detection (#15114)
authorstevenkuang <redacted>
Wed, 6 Aug 2025 09:48:30 +0000 (17:48 +0800)
committerGitHub <redacted>
Wed, 6 Aug 2025 09:48:30 +0000 (11:48 +0200)
Signed-off-by: stevenkuang <redacted>
src/llama-chat.cpp

index 1baf7350a67f6dd86bae0a3cebe964b4639a0587..da814f00a8c7c0eed746fa15040d255bd754bd80 100644 (file)
@@ -193,11 +193,11 @@ llm_chat_template llm_chat_detect_template(const std::string & tmpl) {
         return LLM_CHAT_TEMPLATE_LLAMA4;
     } else if (tmpl_contains("<|endofuserprompt|>")) {
         return LLM_CHAT_TEMPLATE_DOTS1;
-    } else if (tmpl_contains("<|startoftext|>") && tmpl_contains("<|extra_4|>")) {
+    } else if (tmpl_contains("<|extra_0|>") && tmpl_contains("<|extra_4|>")) {
         return LLM_CHAT_TEMPLATE_HUNYUAN_MOE;
     } else if (tmpl_contains("<|start|>") && tmpl_contains("<|channel|>")) {
         return LLM_CHAT_TEMPLATE_OPENAI_MOE;
-    } else if (tmpl_contains("<|hy_place▁holder▁no▁2|>") && tmpl_contains("<|hy_place▁holder▁no▁3|>")) {
+    } else if (tmpl_contains("<|hy_Assistant|>") && tmpl_contains("<|hy_place▁holder▁no▁3|>")) {
         return LLM_CHAT_TEMPLATE_HUNYUAN_DENSE;
     } else if (tmpl_contains("<|im_assistant|>assistant<|im_middle|>")) {
         return LLM_CHAT_TEMPLATE_KIMI_K2;