37 lines
1.0 KiB
Nix
37 lines
1.0 KiB
Nix
{
|
|
pkgs,
|
|
service_configs,
|
|
config,
|
|
inputs,
|
|
optimizePackage,
|
|
lib,
|
|
...
|
|
}:
|
|
{
|
|
services.llama-cpp = {
|
|
enable = true;
|
|
model = builtins.toString (
|
|
pkgs.fetchurl {
|
|
url = "https://huggingface.co/bartowski/deepseek-ai_DeepSeek-R1-0528-Qwen3-8B-GGUF/resolve/main/deepseek-ai_DeepSeek-R1-0528-Qwen3-8B-Q4_0.gguf";
|
|
sha256 = "a71a983c64eb72a2b4a885993cd0675474afe7e92d72b051ab8716b23157daa0";
|
|
}
|
|
);
|
|
port = service_configs.ports.llama_cpp;
|
|
host = "0.0.0.0";
|
|
# vulkan broken: https://github.com/ggml-org/llama.cpp/issues/13801
|
|
package = (optimizePackage inputs.llamacpp.packages.${pkgs.system}.default);
|
|
extraFlags = [
|
|
# "-ngl"
|
|
# "9999"
|
|
];
|
|
};
|
|
|
|
# have to do this in order to get vulkan to work
|
|
systemd.services.llama-cpp.serviceConfig.DynamicUser = lib.mkForce false;
|
|
|
|
services.caddy.virtualHosts."llm.${service_configs.https.domain}".extraConfig = ''
|
|
${builtins.readFile ../secrets/caddy_auth}
|
|
reverse_proxy :${builtins.toString config.services.llama-cpp.port}
|
|
'';
|
|
}
|