fully remove llama-cpp

This commit is contained in:
2026-01-08 05:41:10 -05:00
parent d4172a5886
commit 16d3050eb8
3 changed files with 0 additions and 50 deletions

View File

@@ -33,8 +33,6 @@
./services/soulseek.nix
# ./services/llama-cpp.nix
./services/ups.nix
./services/bitwarden.nix

View File

@@ -28,11 +28,6 @@
inputs.nixpkgs.follows = "nixpkgs";
};
llamacpp = {
url = "github:ggml-org/llama.cpp";
inputs.nixpkgs.follows = "nixpkgs";
};
srvos = {
url = "github:nix-community/srvos";
inputs.nixpkgs.follows = "nixpkgs";

View File

@@ -1,43 +0,0 @@
{
pkgs,
service_configs,
config,
inputs,
lib,
...
}:
{
services.llama-cpp = {
enable = true;
model = builtins.toString (
pkgs.fetchurl {
url = "https://huggingface.co/unsloth/Apriel-1.5-15b-Thinker-GGUF/resolve/main/Apriel-1.5-15b-Thinker-Q4_0.gguf";
sha256 = "4d9439b76b6f4380ab5205617c1ef3d10b0e8897146a0a7ccb7155bca1771df7";
}
);
port = service_configs.ports.llama_cpp;
host = "0.0.0.0";
# vulkan broken: https://github.com/ggml-org/llama.cpp/issues/13801
package = (
lib.optimizePackage (
inputs.llamacpp.packages.${pkgs.system}.vulkan.overrideAttrs (old: {
postPatch = "";
})
)
);
extraFlags = [
"-ngl"
"12"
"-c"
"16384"
];
};
# have to do this in order to get vulkan to work
systemd.services.llama-cpp.serviceConfig.DynamicUser = lib.mkForce false;
services.caddy.virtualHosts."llm.${service_configs.https.domain}".extraConfig = ''
import ${config.age.secrets.caddy_auth.path}
reverse_proxy :${builtins.toString config.services.llama-cpp.port}
'';
}