From: Nexes the Elder Date: Fri, 18 Jul 2025 04:25:54 +0000 (+0200) Subject: graph : Pass the graph placeholder message in debug mode (#14748) X-Git-Tag: upstream/0.0.6073~145 X-Git-Url: https://git.djapps.eu/?a=commitdiff_plain;h=09651d09ffc1e941bd1be23163abf5495c416547;p=pkg%2Fggml%2Fsources%2Fllama.cpp graph : Pass the graph placeholder message in debug mode (#14748) Without that condition, this debug log clutters the screen every batch treated in the prompt processing, or every token generated in Kobold.cpp. --- diff --git a/src/llama-graph.cpp b/src/llama-graph.cpp index f47538ef..e27f78c2 100644 --- a/src/llama-graph.cpp +++ b/src/llama-graph.cpp @@ -467,7 +467,9 @@ bool llm_graph_result::can_reuse(const llm_graph_params & params) { for (auto & input : inputs) { const bool cur = input->can_reuse(params); - LLAMA_LOG_DEBUG(" %s: can_reuse = %d\n", "placeholder", cur); + if (debug > 1) { + LLAMA_LOG_DEBUG("%s: can_reuse = %d\n", "placeholder", cur); + } res = res && cur; }