]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
llama : allow building all tests on windows when not using shared libs (#13980)
authorDiego Devesa <redacted>
Mon, 9 Jun 2025 18:03:09 +0000 (11:03 -0700)
committerGitHub <redacted>
Mon, 9 Jun 2025 18:03:09 +0000 (20:03 +0200)
* llama : allow building all tests on windows when not using shared libraries

* add static windows build to ci

* tests : enable debug logs for test-chat

---------

Co-authored-by: Georgi Gerganov <redacted>
.github/workflows/build.yml
tests/CMakeLists.txt
tests/test-chat.cpp

index 867a589ce16485c33d8d25596e6a44a1bde2662a..3c9804d437cdc0d291ea29ad4c1da1c1f18dc290 100644 (file)
@@ -687,8 +687,8 @@ jobs:
     strategy:
       matrix:
         include:
-          - build: 'cpu-x64'
-            defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_OPENMP=OFF'
+          - build: 'cpu-x64 (static)'
+            defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DBUILD_SHARED_LIBS=OFF'
           - build: 'openblas-x64'
             defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_OPENMP=OFF -DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS -DBLAS_INCLUDE_DIRS="$env:RUNNER_TEMP/openblas/include" -DBLAS_LIBRARIES="$env:RUNNER_TEMP/openblas/lib/openblas.lib"'
           - build: 'vulkan-x64'
index 83f7d1a4584f79ebea8498bb2ac37a508eed818f..2f7bad2cf7ec9d7579f8785aa4d27f9d0571d275 100644 (file)
@@ -104,8 +104,8 @@ if (LLAMA_LLGUIDANCE)
     llama_build_and_test(test-grammar-llguidance.cpp ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-llama-bpe.gguf)
 endif ()
 
-if (NOT WIN32)
-    # these tests are disabled on Windows because they use internal functions not exported with LLAMA_API
+if (NOT WIN32 OR NOT BUILD_SHARED_LIBS)
+    # these tests are disabled on Windows because they use internal functions not exported with LLAMA_API (when building with shared libraries)
     llama_build_and_test(test-sampling.cpp)
     llama_build_and_test(test-grammar-parser.cpp)
     llama_build_and_test(test-grammar-integration.cpp)
index c6d998f1019124c11b0fd276366e68e200434392..6ebf1464d911a9317f92beb7fc0c2b0916480c3f 100644 (file)
@@ -7,6 +7,8 @@
 //
 #include "chat.h"
 
+#include "log.h"
+
 #include "../src/unicode.h"
 #include "../src/llama-grammar.h"
 
@@ -1428,6 +1430,8 @@ static void test_msg_diffs_compute() {
 }
 
 int main(int argc, char ** argv) {
+    common_log_set_verbosity_thold(999);
+
     // try {
 #ifndef _WIN32
         if (argc > 1) {