]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
docker : include legacy llama-completion binary (#17964)
authorSigbjørn Skjæret <redacted>
Fri, 12 Dec 2025 18:39:23 +0000 (19:39 +0100)
committerGitHub <redacted>
Fri, 12 Dec 2025 18:39:23 +0000 (19:39 +0100)
.devops/cann.Dockerfile
.devops/cpu.Dockerfile
.devops/cuda.Dockerfile
.devops/intel.Dockerfile
.devops/musa.Dockerfile
.devops/rocm.Dockerfile
.devops/s390x.Dockerfile
.devops/tools.sh
.devops/vulkan.Dockerfile

index cd8f87b2ea60fed3253b2f53f4581f83e61894b0..9d27e00fb1c8a0205e5e0be59b82fb245c1dc44a 100644 (file)
@@ -111,7 +111,7 @@ ENTRYPOINT ["/app/tools.sh"]
 # ==============================================================================
 FROM base AS light
 
-COPY --from=build /app/full/llama-cli /app
+COPY --from=build /app/full/llama-cli /app/full/llama-completion /app
 
 ENTRYPOINT [ "/app/llama-cli" ]
 
index 6e16ecda44f036625fde5d5cf098ad1e8be04c8f..b9e84ab986a77732bac9cc30c3891eaf46008be3 100644 (file)
@@ -68,7 +68,7 @@ ENTRYPOINT ["/app/tools.sh"]
 ### Light, CLI only
 FROM base AS light
 
-COPY --from=build /app/full/llama-cli /app
+COPY --from=build /app/full/llama-cli /app/full/llama-completion /app
 
 WORKDIR /app
 
index 54f793d0a3f530cb36402156015d8ddcdd58e0e8..fed586315799864075b6ae4e0d8e9f5f4a412d68 100644 (file)
@@ -74,7 +74,7 @@ ENTRYPOINT ["/app/tools.sh"]
 ### Light, CLI only
 FROM base AS light
 
-COPY --from=build /app/full/llama-cli /app
+COPY --from=build /app/full/llama-cli /app/full/llama-completion /app
 
 WORKDIR /app
 
index d1a8fbed4cf1a32814a257bd569f74c8d7624925..adebf08229740bad801a3e2aa6c012db8a3a8b0a 100644 (file)
@@ -73,7 +73,7 @@ ENTRYPOINT ["/app/tools.sh"]
 FROM base AS light
 
 COPY --from=build /app/lib/ /app
-COPY --from=build /app/full/llama-cli /app
+COPY --from=build /app/full/llama-cli /app/full/llama-completion /app
 
 WORKDIR /app
 
index faa3500e619deba5e4f7bd31f62add76964c5e6f..34d6ad9f40faa40377b76740d65102a38bd58b34 100644 (file)
@@ -81,7 +81,7 @@ ENTRYPOINT ["/app/tools.sh"]
 ### Light, CLI only
 FROM base AS light
 
-COPY --from=build /app/full/llama-cli /app
+COPY --from=build /app/full/llama-cli /app/full/llama-completion /app
 
 WORKDIR /app
 
index d6bf28b1058829ac04f951754b3ae5a90187cb0c..53c3ed8d88068f01bd2b482e99f8342bd1bfff02 100644 (file)
@@ -94,7 +94,7 @@ ENTRYPOINT ["/app/tools.sh"]
 ### Light, CLI only
 FROM base AS light
 
-COPY --from=build /app/full/llama-cli /app
+COPY --from=build /app/full/llama-cli /app/full/llama-completion /app
 
 WORKDIR /app
 
index b7c9457680b08914fd6bf6712b6a7b4f31c29e3b..1e66f061d53f2904c43545b7c29ff864fcb16d85 100644 (file)
@@ -105,7 +105,7 @@ WORKDIR /llama.cpp/bin
 
 # Copy llama.cpp binaries and libraries
 COPY --from=collector /llama.cpp/bin/*.so /llama.cpp/bin
-COPY --from=collector /llama.cpp/bin/llama-cli /llama.cpp/bin
+COPY --from=collector /llama.cpp/bin/llama-cli /llama.cpp/bin/llama-completion /llama.cpp/bin
 
 ENTRYPOINT [ "/llama.cpp/bin/llama-cli" ]
 
index 8a3a69340059c580777eaea94f0ccc546544a541..cc5ee17dfdb3d5ec79adde7a471cffe5e4d86349 100755 (executable)
@@ -13,6 +13,8 @@ elif [[ "$arg1" == '--quantize' || "$arg1" == '-q' ]]; then
     exec ./llama-quantize "$@"
 elif [[ "$arg1" == '--run' || "$arg1" == '-r' ]]; then
     exec ./llama-cli "$@"
+elif [[ "$arg1" == '--run-legacy' || "$arg1" == '-l' ]]; then
+    exec ./llama-completion "$@"
 elif [[ "$arg1" == '--bench' || "$arg1" == '-b' ]]; then
     exec ./llama-bench "$@"
 elif [[ "$arg1" == '--perplexity' || "$arg1" == '-p' ]]; then
@@ -32,8 +34,10 @@ elif [[ "$arg1" == '--server' || "$arg1" == '-s' ]]; then
 else
     echo "Unknown command: $arg1"
     echo "Available commands: "
-    echo "  --run (-r): Run a model previously converted into ggml"
-    echo "              ex: -m /models/7B/ggml-model-q4_0.bin -p \"Building a website can be done in 10 simple steps:\" -n 512"
+    echo "  --run (-r): Run a model (chat) previously converted into ggml"
+    echo "              ex: -m /models/7B/ggml-model-q4_0.bin"
+    echo "  --run-legacy (-l): Run a model (legacy completion) previously converted into ggml"
+    echo "              ex: -m /models/7B/ggml-model-q4_0.bin -no-cnv -p \"Building a website can be done in 10 simple steps:\" -n 512"
     echo "  --bench (-b): Benchmark the performance of the inference for various parameters."
     echo "              ex: -m model.gguf"
     echo "  --perplexity (-p): Measure the perplexity of a model over a given text."
index fd7195c5bebab4df7eb84ef9e55ed81f56e8d125..b37b4f277d738fafea90795ccd417649a27a7e25 100644 (file)
@@ -68,7 +68,7 @@ ENTRYPOINT ["/app/tools.sh"]
 ### Light, CLI only
 FROM base AS light
 
-COPY --from=build /app/full/llama-cli /app
+COPY --from=build /app/full/llama-cli /app/full/llama-completion /app
 
 WORKDIR /app