server-config/services/llama-cpp.nix
2025-04-14 13:11:40 -04:00

39 lines
984 B
Nix

{
pkgs,
service_configs,
config,
inputs,
optimizePackage,
...
}:
{
services.llama-cpp = {
enable = true;
model = builtins.toString (
pkgs.fetchurl {
url = "https://huggingface.co/bartowski/agentica-org_DeepCoder-14B-Preview-GGUF/resolve/main/agentica-org_DeepCoder-14B-Preview-Q4_0.gguf";
sha256 = "6f60030be2287d6a1d52c91e6880352ed99e18da6d955a6204c77cfeaebbca01";
}
);
port = service_configs.ports.llama_cpp;
host = "0.0.0.0";
package = (
optimizePackage (
inputs.llamacpp.packages.${pkgs.system}.default.overrideAttrs (old: {
cmakeFlags = old.cmakeFlags ++ [
"-DGGML_AVX2=ON"
];
})
)
);
extraFlags = [
"--flash-attn"
];
};
services.caddy.virtualHosts."llm.${service_configs.https.domain}".extraConfig = ''
${builtins.readFile ../secrets/caddy_auth}
reverse_proxy :${builtins.toString config.services.llama-cpp.port}
'';
}