From: kallewoof Date: Thu, 14 Aug 2025 11:03:30 +0000 (+0900) Subject: perplexity : provide a helpful hint for has_cpl case in split_equal error. (#15304) X-Git-Tag: upstream/0.0.6164~8 X-Git-Url: https://git.djapps.eu/?a=commitdiff_plain;h=810b9fc8b99dd55517a3e94c6dee29748d482559;p=pkg%2Fggml%2Fsources%2Fllama.cpp perplexity : provide a helpful hint for has_cpl case in split_equal error. (#15304) When attempting to do llama-perplexity on certain tasks which have coupled sequences there is a cryptic error that does not tell you what to do, which is to set the -kvu flag. This adds a hint about that fact. --- diff --git a/src/llama-batch.cpp b/src/llama-batch.cpp index 8698d89a..55d89eca 100644 --- a/src/llama-batch.cpp +++ b/src/llama-batch.cpp @@ -477,7 +477,7 @@ llama_ubatch llama_batch_allocr::split_simple(uint32_t n_ubatch) { llama_ubatch llama_batch_allocr::split_equal(uint32_t n_ubatch, bool sequential) { if (sequential && has_cpl) { - LLAMA_LOG_ERROR("%s: sequential split is not supported when there are coupled sequences in the input batch\n", __func__); + LLAMA_LOG_ERROR("%s: sequential split is not supported when there are coupled sequences in the input batch (you may need to use the -kvu flag)\n", __func__); return {}; }