]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
flake.nix: expose full scope in legacyPackages
authorSomeone Serge <redacted>
Fri, 29 Dec 2023 16:15:37 +0000 (16:15 +0000)
committerPhilip Taron <redacted>
Sun, 31 Dec 2023 21:14:58 +0000 (13:14 -0800)
.devops/nix/jetson-support.nix
flake.nix

index 08426d2abb7ec78cbbd68538a01d8f9bf66bec6d..78e2e40e03864e3df046389f7b751a1fd4575656 100644 (file)
@@ -8,12 +8,13 @@
       pkgsCuda,
       ...
     }:
-    lib.optionalAttrs (system == "aarch64-linux") {
-      packages =
+    {
+      legacyPackages =
         let
-          caps.jetson-xavier = "7.2";
-          caps.jetson-orin = "8.7";
-          caps.jetson-nano = "5.3";
+          caps.llamaPackagesXavier = "7.2";
+          caps.llamaPackagesOrin = "8.7";
+          caps.llamaPackagesTX2 = "6.2";
+          caps.llamaPackagesNano = "5.3";
 
           pkgsFor =
             cap:
               };
             };
         in
-        builtins.mapAttrs (name: cap: ((pkgsFor cap).callPackage ./scope.nix { }).llama-cpp) caps;
+        builtins.mapAttrs (name: cap: (pkgsFor cap).callPackage ./scope.nix { }) caps;
+
+      packages = lib.optionalAttrs (system == "aarch64-linux") {
+        jetson-xavier = config.legacyPackages.llamaPackagesXavier.llama-cpp;
+        jetson-orin = config.legacyPackages.llamaPackagesOrin.llama-cpp;
+        jetson-nano = config.legacyPackages.llamaPackagesNano.llama-cpp;
+      };
     };
 }
index 2209070aa83cd53de27a92ffa857f063e0f5e684..6785b52f442e5a978fff796a906c1cc87f50e7b9 100644 (file)
--- a/flake.nix
+++ b/flake.nix
             ...
           }:
           {
+            # Unlike `.#packages`, legacyPackages may contain values of
+            # arbitrary types (including nested attrsets) and may even throw
+            # exceptions. This attribute isn't recursed into by `nix flake
+            # show` either.
+            #
+            # You can add arbitrary scripts to `.devops/nix/scope.nix` and
+            # access them as `nix build .#llamaPackages.${scriptName}` using
+            # the same path you would with an overlay.
+            legacyPackages = {
+              llamaPackages = pkgs.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
+              llamaPackagesCuda = pkgsCuda.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
+              llamaPackagesRocm = pkgsRocm.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
+            };
+
             # We don't use the overlay here so as to avoid making too many instances of nixpkgs,
             # cf. https://zimbatm.com/notes/1000-instances-of-nixpkgs
             packages =
               {
-                default = (pkgs.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp;
+                default = config.legacyPackages.llamaPackages.llama-cpp;
               }
               // lib.optionalAttrs pkgs.stdenv.isLinux {
                 opencl = config.packages.default.override { useOpenCL = true; };
-                cuda = (pkgsCuda.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp;
-                rocm = (pkgsRocm.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp;
+                cuda = config.legacyPackages.llamaPackagesCuda.llama-cpp;
+                rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp;
 
                 mpi-cpu = config.packages.default.override { useMpi = true; };
                 mpi-cuda = config.packages.default.override { useMpi = true; };