]> git.djapps.eu Git - pkg/ggml/sources/ggml/commitdiff
llama-bench : add support for the RPC backend (llama/7435)
authorRadoslav Gerganov <redacted>
Wed, 29 May 2024 11:45:44 +0000 (14:45 +0300)
committerGeorgi Gerganov <redacted>
Sat, 15 Jun 2024 19:05:47 +0000 (22:05 +0300)
include/ggml/ggml.h
src/ggml.c

index 3859895b6e72db5667b2801f4a72a27c482da138..f9deac7e8054e628fd5a46448a5d1c53ee1b1533 100644 (file)
@@ -2428,6 +2428,7 @@ extern "C" {
     GGML_API int ggml_cpu_has_sse3       (void);
     GGML_API int ggml_cpu_has_ssse3      (void);
     GGML_API int ggml_cpu_has_sycl       (void);
+    GGML_API int ggml_cpu_has_rpc        (void);
     GGML_API int ggml_cpu_has_vsx        (void);
     GGML_API int ggml_cpu_has_matmul_int8(void);
 
index d8f74f3ceaf5da026988c807dbf0aaff129d7022..e6e2397b7848b68ad5ca658d7db822c4387bdfd0 100644 (file)
@@ -22872,6 +22872,14 @@ int ggml_cpu_has_sycl(void) {
 #endif
 }
 
+int ggml_cpu_has_rpc(void) {
+#if defined(GGML_USE_RPC)
+    return 1;
+#else
+    return 0;
+#endif
+}
+
 int ggml_cpu_has_gpublas(void) {
     return ggml_cpu_has_cuda() || ggml_cpu_has_clblast() || ggml_cpu_has_vulkan() || ggml_cpu_has_kompute() ||
            ggml_cpu_has_sycl();