36 lines
991 B
Nix
36 lines
991 B
Nix
{
|
|
pkgs,
|
|
service_configs,
|
|
config,
|
|
inputs,
|
|
lib,
|
|
...
|
|
}:
|
|
{
|
|
services.llama-cpp = {
|
|
enable = true;
|
|
model = builtins.toString (
|
|
pkgs.fetchurl {
|
|
url = "https://huggingface.co/ggml-org/gpt-oss-20b-GGUF/resolve/main/gpt-oss-20b-mxfp4.gguf";
|
|
sha256 = "52f57ab7d3df3ba9173827c1c6832e73375553a846f3e32b49f1ae2daad688d4";
|
|
}
|
|
);
|
|
port = service_configs.ports.llama_cpp;
|
|
host = "0.0.0.0";
|
|
# vulkan broken: https://github.com/ggml-org/llama.cpp/issues/13801
|
|
package = (lib.optimizePackage inputs.llamacpp.packages.${pkgs.system}.default);
|
|
extraFlags = [
|
|
# "-ngl"
|
|
# "9999"
|
|
];
|
|
};
|
|
|
|
# have to do this in order to get vulkan to work
|
|
systemd.services.llama-cpp.serviceConfig.DynamicUser = lib.mkForce false;
|
|
|
|
services.caddy.virtualHosts."llm.${service_configs.https.domain}".extraConfig = ''
|
|
${builtins.readFile ../secrets/caddy_auth}
|
|
reverse_proxy :${builtins.toString config.services.llama-cpp.port}
|
|
'';
|
|
}
|