|
6 | 6 | flake-parts.url = "github:hercules-ci/flake-parts";
|
7 | 7 | };
|
8 | 8 |
|
9 |
| - # Optional binary cache |
10 |
| - nixConfig = { |
11 |
| - extra-substituters = [ |
12 |
| - # Populated by the CI in ggerganov/llama.cpp |
13 |
| - "https://llama-cpp.cachix.org" |
14 |
| - |
15 |
| - # A development cache for nixpkgs imported with `config.cudaSupport = true`. |
16 |
| - # Populated by https://hercules-ci.com/github/SomeoneSerge/nixpkgs-cuda-ci. |
17 |
| - # This lets one skip building e.g. the CUDA-enabled openmpi. |
18 |
| - # TODO: Replace once nix-community obtains an official one. |
19 |
| - "https://cuda-maintainers.cachix.org" |
20 |
| - ]; |
21 |
| - |
22 |
| - # Verify these are the same keys as published on |
23 |
| - # - https://app.cachix.org/cache/llama-cpp |
24 |
| - # - https://app.cachix.org/cache/cuda-maintainers |
25 |
| - extra-trusted-public-keys = [ |
26 |
| - "llama-cpp.cachix.org-1:H75X+w83wUKTIPSO1KWy9ADUrzThyGs8P5tmAbkWhQc=" |
27 |
| - "cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E=" |
28 |
| - ]; |
29 |
| - }; |
30 |
| - |
| 9 | + # There's an optional binary cache available. The details are below, but they're commented out. |
| 10 | + # |
| 11 | + # Why? The terrible experience of being prompted to accept them on every single Nix command run. |
| 12 | + # Plus, there are warnings shown about not being a trusted user on a default Nix install |
| 13 | + # if you *do* say yes to the prompts. |
| 14 | + # |
| 15 | + # This experience makes having `nixConfig` in a flake a persistent UX problem. |
| 16 | + # |
| 17 | + # To make use of the binary cache, please add the relevant settings to your `nix.conf`. |
| 18 | + # It's located at `/etc/nix/nix.conf` on non-NixOS systems. On NixOS, adjust the `nix.settings` |
| 19 | + # option in your NixOS configuration to add `extra-substituters` and `extra-trusted-public-keys`, |
| 20 | + # as shown below. |
| 21 | + # |
| 22 | + # ``` |
| 23 | + # nixConfig = { |
| 24 | + # extra-substituters = [ |
| 25 | + # # Populated by the CI in ggerganov/llama.cpp |
| 26 | + # "https://llama-cpp.cachix.org" |
| 27 | + # |
| 28 | + # # A development cache for nixpkgs imported with `config.cudaSupport = true`. |
| 29 | + # # Populated by https://hercules-ci.com/github/SomeoneSerge/nixpkgs-cuda-ci. |
| 30 | + # # This lets one skip building e.g. the CUDA-enabled openmpi. |
| 31 | + # # TODO: Replace once nix-community obtains an official one. |
| 32 | + # "https://cuda-maintainers.cachix.org" |
| 33 | + # ]; |
| 34 | + # |
| 35 | + # # Verify these are the same keys as published on |
| 36 | + # # - https://app.cachix.org/cache/llama-cpp |
| 37 | + # # - https://app.cachix.org/cache/cuda-maintainers |
| 38 | + # extra-trusted-public-keys = [ |
| 39 | + # "llama-cpp.cachix.org-1:H75X+w83wUKTIPSO1KWy9ADUrzThyGs8P5tmAbkWhQc=" |
| 40 | + # "cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E=" |
| 41 | + # ]; |
| 42 | + # }; |
| 43 | + # ``` |
31 | 44 |
|
32 | 45 | # For inspection, use `nix flake show github:ggerganov/llama.cpp` or the nix repl:
|
33 | 46 | #
|
|
0 commit comments