]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
nix : add cuda, use a symlinked toolkit for cmake (#3202)
authorErik Scholz <redacted>
Mon, 25 Sep 2023 11:48:30 +0000 (13:48 +0200)
committerGitHub <redacted>
Mon, 25 Sep 2023 11:48:30 +0000 (13:48 +0200)
flake.nix

index 7723357afe41918757c0dd2372d3cea3b2aad7de..433d3d942ce4057466273ed003f3bddb15bc497b 100644 (file)
--- a/flake.nix
+++ b/flake.nix
         );
         pkgs = import nixpkgs { inherit system; };
         nativeBuildInputs = with pkgs; [ cmake ninja pkg-config ];
+        cudatoolkit_joined = with pkgs; symlinkJoin {
+          # HACK(Green-Sky): nix currently has issues with cmake findcudatoolkit
+          # see https://github.com/NixOS/nixpkgs/issues/224291
+          # copied from jaxlib
+          name = "${cudaPackages.cudatoolkit.name}-merged";
+          paths = [
+            cudaPackages.cudatoolkit.lib
+            cudaPackages.cudatoolkit.out
+          ] ++ lib.optionals (lib.versionOlder cudaPackages.cudatoolkit.version "11") [
+            # for some reason some of the required libs are in the targets/x86_64-linux
+            # directory; not sure why but this works around it
+            "${cudaPackages.cudatoolkit}/targets/${system}"
+          ];
+        };
         llama-python =
           pkgs.python3.withPackages (ps: with ps; [ numpy sentencepiece ]);
         postPatch = ''
             "-DLLAMA_CLBLAST=ON"
           ];
         };
+        packages.cuda = pkgs.stdenv.mkDerivation {
+          inherit name src meta postPatch nativeBuildInputs postInstall;
+          buildInputs = with pkgs; buildInputs ++ [ cudatoolkit_joined ];
+          cmakeFlags = cmakeFlags ++ [
+            "-DLLAMA_CUBLAS=ON"
+          ];
+        };
         packages.rocm = pkgs.stdenv.mkDerivation {
           inherit name src meta postPatch nativeBuildInputs postInstall;
           buildInputs = with pkgs; buildInputs ++ [ hip hipblas rocblas ];