From: Juk Armstrong Date: Tue, 5 Aug 2025 12:56:44 +0000 (+0100) Subject: Fix `glm4moe` bug (#15088) X-Git-Tag: upstream/0.0.6164~72 X-Git-Url: https://git.djapps.eu/?a=commitdiff_plain;h=c81de6e107ef51ef76aadcb8a6f008711c462517;p=pkg%2Fggml%2Fsources%2Fllama.cpp Fix `glm4moe` bug (#15088) --- diff --git a/src/llama-model.cpp b/src/llama-model.cpp index 44f89003..f53fa2f8 100644 --- a/src/llama-model.cpp +++ b/src/llama-model.cpp @@ -13800,10 +13800,6 @@ struct llm_build_glm4_moe : public llm_graph_context { LLM_FFN_SILU, LLM_FFN_PAR, il); cb(cur, "ffn_out", il); } else { - // MoE layer with shared experts - const int64_t n_expert = hparams.n_expert; - const int64_t n_expert_used = hparams.n_expert_used; - // Process routed experts using existing MoE infrastructure ggml_tensor * routed_out = build_moe_ffn(cur, model.layers[il].ffn_gate_inp,