]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
llama : minor coding style fix for smollm3 (#14605)
authorXuan-Son Nguyen <redacted>
Thu, 10 Jul 2025 07:00:20 +0000 (09:00 +0200)
committerGitHub <redacted>
Thu, 10 Jul 2025 07:00:20 +0000 (10:00 +0300)
src/llama-arch.cpp

index 1955c03eb3d1c9170392e38217db9e8abd59f75c..cb2c9dba8d3585ff3cc70a9eedff887a2a2fffab 100644 (file)
@@ -1777,26 +1777,26 @@ static const std::map<llm_arch, std::map<llm_tensor, const char *>> LLM_TENSOR_N
         },
     },
     {
-        LLM_ARCH_UNKNOWN,
+        LLM_ARCH_SMOLLM3,
         {
-            { LLM_TENSOR_TOKEN_EMBD,      "token_embd" },
+            { LLM_TENSOR_TOKEN_EMBD,     "token_embd" },
+            { LLM_TENSOR_OUTPUT_NORM,    "output_norm" },
+            { LLM_TENSOR_OUTPUT,         "output" },
+            { LLM_TENSOR_ATTN_NORM,      "blk.%d.attn_norm" },
+            { LLM_TENSOR_ATTN_Q,         "blk.%d.attn_q" },
+            { LLM_TENSOR_ATTN_K,         "blk.%d.attn_k" },
+            { LLM_TENSOR_ATTN_V,         "blk.%d.attn_v" },
+            { LLM_TENSOR_ATTN_OUT,       "blk.%d.attn_output" },
+            { LLM_TENSOR_FFN_NORM,       "blk.%d.ffn_norm" },
+            { LLM_TENSOR_FFN_GATE,       "blk.%d.ffn_gate" },
+            { LLM_TENSOR_FFN_DOWN,       "blk.%d.ffn_down" },
+            { LLM_TENSOR_FFN_UP,         "blk.%d.ffn_up" },
         },
     },
     {
-        LLM_ARCH_SMOLLM3,
+        LLM_ARCH_UNKNOWN,
         {
-            { LLM_TENSOR_TOKEN_EMBD,     "token_embd"            },
-            { LLM_TENSOR_OUTPUT_NORM,    "output_norm"           },
-            { LLM_TENSOR_OUTPUT,         "output"                },
-            { LLM_TENSOR_ATTN_NORM,      "blk.%d.attn_norm"      },
-            { LLM_TENSOR_ATTN_Q,         "blk.%d.attn_q"         },
-            { LLM_TENSOR_ATTN_K,         "blk.%d.attn_k"         },
-            { LLM_TENSOR_ATTN_V,         "blk.%d.attn_v"         },
-            { LLM_TENSOR_ATTN_OUT,       "blk.%d.attn_output"    },
-            { LLM_TENSOR_FFN_NORM,       "blk.%d.ffn_norm"       },
-            { LLM_TENSOR_FFN_GATE,       "blk.%d.ffn_gate"       },
-            { LLM_TENSOR_FFN_DOWN,       "blk.%d.ffn_down"       },
-            { LLM_TENSOR_FFN_UP,         "blk.%d.ffn_up"         },
+            { LLM_TENSOR_TOKEN_EMBD,      "token_embd" },
         },
     },
 };