{ pkgs, service_configs, config, inputs, lib, ... }: { services.llama-cpp = { enable = true; model = builtins.toString ( pkgs.fetchurl { url = "https://huggingface.co/rodrigomt/Qwen3-30B-A3B-Thinking-Deepseek-Distill-2507-v3.1-V2-GGUF/resolve/main/Qwen3-30B-A3B-Thinking-Deepseek-Distill-2507-v3.1-V2-UD-Q4_K_XL.gguf"; sha256 = "1a3abffc8463041e24cdc43af26c99b6cfab1d2ee78fef0d793033ec0e5b58aa"; } ); port = service_configs.ports.llama_cpp; host = "0.0.0.0"; # vulkan broken: https://github.com/ggml-org/llama.cpp/issues/13801 package = (lib.optimizePackage inputs.llamacpp.packages.${pkgs.system}.default); extraFlags = [ # "-ngl" # "9999" ]; }; # have to do this in order to get vulkan to work systemd.services.llama-cpp.serviceConfig.DynamicUser = lib.mkForce false; services.caddy.virtualHosts."llm.${service_configs.https.domain}".extraConfig = '' ${builtins.readFile ../secrets/caddy_auth} reverse_proxy :${builtins.toString config.services.llama-cpp.port} ''; }