server-config/services/llama-cpp.nix
2025-04-10 11:15:18 -04:00

39 lines
955 B
Nix

{
pkgs,
service_configs,
config,
inputs,
optimizePackage,
...
}:
{
services.llama-cpp = {
enable = true;
model = builtins.toString (
pkgs.fetchurl {
url = "https://huggingface.co/mradermacher/Gemma-3-R1984-12B-GGUF/resolve/main/Gemma-3-R1984-12B.IQ4_XS.gguf";
sha256 = "f6d94f4bc6bd2101617f0c2b0b7883d20d74018da101e440cb7e7a55514fe78d";
}
);
port = service_configs.ports.llama_cpp;
host = "0.0.0.0";
package = (
optimizePackage (
inputs.llamacpp.packages.${pkgs.system}.default.overrideAttrs (old: {
cmakeFlags = old.cmakeFlags ++ [
"-DGGML_AVX2=ON"
];
})
)
);
extraFlags = [
"--flash-attn"
];
};
services.caddy.virtualHosts."llm.${service_configs.https.domain}".extraConfig = ''
${builtins.readFile ../secrets/caddy_auth}
reverse_proxy :${builtins.toString config.services.llama-cpp.port}
'';
}