pkgsCuda,
...
}:
- lib.optionalAttrs (system == "aarch64-linux") {
- packages =
+ {
+ legacyPackages =
let
- caps.jetson-xavier = "7.2";
- caps.jetson-orin = "8.7";
- caps.jetson-nano = "5.3";
+ caps.llamaPackagesXavier = "7.2";
+ caps.llamaPackagesOrin = "8.7";
+ caps.llamaPackagesTX2 = "6.2";
+ caps.llamaPackagesNano = "5.3";
pkgsFor =
cap:
};
};
in
- builtins.mapAttrs (name: cap: ((pkgsFor cap).callPackage ./scope.nix { }).llama-cpp) caps;
+ builtins.mapAttrs (name: cap: (pkgsFor cap).callPackage ./scope.nix { }) caps;
+
+ packages = lib.optionalAttrs (system == "aarch64-linux") {
+ jetson-xavier = config.legacyPackages.llamaPackagesXavier.llama-cpp;
+ jetson-orin = config.legacyPackages.llamaPackagesOrin.llama-cpp;
+ jetson-nano = config.legacyPackages.llamaPackagesNano.llama-cpp;
+ };
};
}
...
}:
{
+ # Unlike `.#packages`, legacyPackages may contain values of
+ # arbitrary types (including nested attrsets) and may even throw
+ # exceptions. This attribute isn't recursed into by `nix flake
+ # show` either.
+ #
+ # You can add arbitrary scripts to `.devops/nix/scope.nix` and
+ # access them as `nix build .#llamaPackages.${scriptName}` using
+ # the same path you would with an overlay.
+ legacyPackages = {
+ llamaPackages = pkgs.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
+ llamaPackagesCuda = pkgsCuda.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
+ llamaPackagesRocm = pkgsRocm.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
+ };
+
# We don't use the overlay here so as to avoid making too many instances of nixpkgs,
# cf. https://zimbatm.com/notes/1000-instances-of-nixpkgs
packages =
{
- default = (pkgs.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp;
+ default = config.legacyPackages.llamaPackages.llama-cpp;
}
// lib.optionalAttrs pkgs.stdenv.isLinux {
opencl = config.packages.default.override { useOpenCL = true; };
- cuda = (pkgsCuda.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp;
- rocm = (pkgsRocm.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp;
+ cuda = config.legacyPackages.llamaPackagesCuda.llama-cpp;
+ rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp;
mpi-cpu = config.packages.default.override { useMpi = true; };
mpi-cuda = config.packages.default.override { useMpi = true; };