]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
Make sure that at least the CPU backend is available for utilities
authorMathieu Baudier <redacted>
Tue, 21 Jan 2025 12:31:47 +0000 (13:31 +0100)
committerMathieu Baudier <redacted>
Tue, 21 Jan 2025 12:31:47 +0000 (13:31 +0100)
debian/control

index 919503e30db0a9bc5d4b56d7479b17302710acd7..a672883615c83d7a5352dec5f280b9f0fd0cab96 100644 (file)
@@ -22,7 +22,7 @@ Package: llama-cpp-cli
 Architecture: any
 Priority: optional
 Depends: ${misc:Depends}, ${shlibs:Depends},
- libllama, curl
+ libllama, ggml, curl
 Description: Inference of LLMs in pure C/C++ (CLI)
  Llama.cpp inference of LLMs in pure C/C++ (CLI).
 
@@ -30,7 +30,7 @@ Package: llama-cpp-server
 Architecture: any
 Priority: optional
 Depends: ${misc:Depends}, ${shlibs:Depends},
- libllama, curl, openssl
+ libllama, ggml, curl, openssl
 Description: Inference of LLMs in pure C/C++ (CLI)
  Llama.cpp inference of LLMs in pure C/C++ (CLI).