]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
arg : fix attention non-causal arg value hint (#9985)
authorDaniel Bevenius <redacted>
Mon, 21 Oct 2024 18:12:52 +0000 (20:12 +0200)
committerGitHub <redacted>
Mon, 21 Oct 2024 18:12:52 +0000 (21:12 +0300)
This commit updates the argument value hint for the `--attention`
argument to `non-causal`.

The motivation for this change is that the only values for this argument
are `causal` and `non-causal`.

common/arg.cpp

index d6a8e1f6ff0bf8a7a4ce18b967d1decd174754fd..168c2b1f3e1ed99662c552967d36fa861fee15b7 100644 (file)
@@ -1097,7 +1097,7 @@ common_params_context common_params_parser_init(common_params & params, llama_ex
         }
     ).set_examples({LLAMA_EXAMPLE_EMBEDDING, LLAMA_EXAMPLE_RETRIEVAL, LLAMA_EXAMPLE_SERVER}).set_env("LLAMA_ARG_POOLING"));
     add_opt(common_arg(
-        {"--attention"}, "{causal,non,causal}",
+        {"--attention"}, "{causal,non-causal}",
         "attention type for embeddings, use model default if unspecified",
         [](common_params & params, const std::string & value) {
             /**/ if (value == "causal") { params.attention_type = LLAMA_ATTENTION_TYPE_CAUSAL; }