]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
convert : more variants of rope_theta config entries (#18668)
authorSigbjørn Skjæret <redacted>
Wed, 7 Jan 2026 21:34:51 +0000 (22:34 +0100)
committerGitHub <redacted>
Wed, 7 Jan 2026 21:34:51 +0000 (22:34 +0100)
convert_hf_to_gguf.py

index 0a8bac0e2d64aa8552650f63fb973d044a75c36c..386e2a7e52eb293561c502d80376703af6c76ae9 100755 (executable)
@@ -771,8 +771,8 @@ class TextModel(ModelBase):
 
         self.rope_parameters = self.hparams.get("rope_parameters", self.hparams.get("rope_scaling")) or {}
 
-        rope_theta = self.find_hparam(["rope_theta", "global_rope_theta", "rotary_emb_base"], optional=True)
-        local_rope_theta = self.find_hparam(["local_rope_theta", "rope_local_theta", "swa_rope_theta", "rope_local_base_freq"], optional=True)
+        rope_theta = self.find_hparam(["global_rope_theta", "rope_global_theta", "rope_theta_global", "rope_theta", "rotary_emb_base"], optional=True)
+        local_rope_theta = self.find_hparam(["local_rope_theta", "rope_local_theta", "rope_theta_local", "swa_rope_theta", "rope_local_base_freq"], optional=True)
 
         # Ensure "rope_theta" and "rope_type" is mirrored in rope_parameters
         if "full_attention" not in self.rope_parameters and "sliding_attention" not in self.rope_parameters: