From: Gilad S. Date: Wed, 14 May 2025 16:18:18 +0000 (+0300) Subject: fix: crash when calling `llama_state_get_size` on a context without a KV cache (... X-Git-Url: https://git.djapps.eu/?a=commitdiff_plain;h=017f10b5fa630a013ec4f9936e410a60d4f460d5;p=pkg%2Fggml%2Fsources%2Fllama.cpp fix: crash when calling `llama_state_get_size` on a context without a KV cache (#13542) --- diff --git a/src/llama-context.cpp b/src/llama-context.cpp index 62246c10..1b76317d 100644 --- a/src/llama-context.cpp +++ b/src/llama-context.cpp @@ -1704,10 +1704,12 @@ size_t llama_context::state_write_data(llama_io_write_i & io) { } } - LLAMA_LOG_DEBUG("%s: - writing KV self\n", __func__); llama_kv_cache * kv_self = static_cast(memory.get()); - - kv_self->state_write(io); + + if (kv_self != nullptr) { + LLAMA_LOG_DEBUG("%s: - writing KV self\n", __func__); + kv_self->state_write(io); + } return io.n_bytes(); }