]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
llama : change yarn_ext_factor placeholder to -1 (#3922)
authorcebtenzzre <redacted>
Fri, 3 Nov 2023 06:31:58 +0000 (02:31 -0400)
committerGitHub <redacted>
Fri, 3 Nov 2023 06:31:58 +0000 (08:31 +0200)
llama.cpp

index bb60044b4707f777b239fd8e8fa245e43c71bdee..cc0211ceb02113dab4672a20979364cab6a9ec91 100644 (file)
--- a/llama.cpp
+++ b/llama.cpp
@@ -7982,7 +7982,7 @@ struct llama_context_params llama_context_default_params() {
         /*.rope_scaling_type           =*/ LLAMA_ROPE_SCALING_UNSPECIFIED,
         /*.rope_freq_base              =*/ 0.0f,
         /*.rope_freq_scale             =*/ 0.0f,
-        /*.yarn_ext_factor             =*/ NAN,
+        /*.yarn_ext_factor             =*/ -1.0f,
         /*.yarn_attn_factor            =*/ 1.0f,
         /*.yarn_beta_fast              =*/ 32.0f,
         /*.yarn_beta_slow              =*/ 1.0f,
@@ -8125,7 +8125,7 @@ struct llama_context * llama_new_context_with_model(
         cparams.rope_freq_scale = 1.0f; // never scale if scaling type is none
     }
 
-    if (std::isnan(cparams.yarn_ext_factor)) { // NaN indicates 'not set'
+    if (cparams.yarn_ext_factor < 0.0f) { // negative indicates 'not set'
         cparams.yarn_ext_factor = rope_scaling_type == LLAMA_ROPE_SCALING_YARN ? 1.0f : 0.0f;
     }