llm: use vulkan

This commit is contained in:
Simon Gardling 2025-04-30 12:59:07 -04:00
parent 870093686a
commit fb4043712e
Signed by: titaniumtown
GPG Key ID: 9AB28AC10ECE533D

View File

@ -4,6 +4,7 @@
config,
inputs,
optimizePackage,
lib,
...
}:
{
@ -17,20 +18,17 @@
);
port = service_configs.ports.llama_cpp;
host = "0.0.0.0";
package = (
optimizePackage (
inputs.llamacpp.packages.${pkgs.system}.default.overrideAttrs (old: {
cmakeFlags = old.cmakeFlags ++ [
"-DGGML_AVX2=ON"
];
})
)
);
package = (optimizePackage inputs.llamacpp.packages.${pkgs.system}.vulkan);
extraFlags = [
"--flash-attn"
"-ngl"
"9999"
];
};
# have to do this in order to get vulkan to work
systemd.services.llama-cpp.serviceConfig.DynamicUser = lib.mkForce false;
services.caddy.virtualHosts."llm.${service_configs.https.domain}".extraConfig = ''
${builtins.readFile ../secrets/caddy_auth}
reverse_proxy :${builtins.toString config.services.llama-cpp.port}