add llama-server

This commit is contained in:
Simon Gardling 2025-03-31 03:19:55 -04:00
parent 7d2bb541c3
commit d34793c18f
Signed by: titaniumtown
GPG Key ID: 9AB28AC10ECE533D
4 changed files with 89 additions and 0 deletions

View File

@ -27,6 +27,8 @@
# ./services/matrix.nix
# ./services/owntracks.nix
./services/soulseek.nix
# ./services/llama-cpp.nix
];
systemd.targets = {

52
flake.lock generated
View File

@ -88,6 +88,24 @@
"type": "github"
}
},
"flake-parts_2": {
"inputs": {
"nixpkgs-lib": "nixpkgs-lib"
},
"locked": {
"lastModified": 1730504689,
"narHash": "sha256-hgmguH29K2fvs9szpq2r3pz2/8cJd2LPS+b4tfNFCwE=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "506278e768c2a08bec68eb62932193e341f55c90",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "flake-parts",
"type": "github"
}
},
"flake-utils": {
"inputs": {
"systems": "systems"
@ -174,6 +192,27 @@
"type": "github"
}
},
"llamacpp": {
"inputs": {
"flake-parts": "flake-parts_2",
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1743366063,
"narHash": "sha256-Jrvjz9A8oGVo4KCP2miGX2VxIm5mRNPVIEcEaVBDRsE=",
"owner": "ggml-org",
"repo": "llama.cpp",
"rev": "2c3f8b850a4a6cff0f5dda2135c03fc81d33ed8b",
"type": "github"
},
"original": {
"owner": "ggml-org",
"repo": "llama.cpp",
"type": "github"
}
},
"nix-minecraft": {
"inputs": {
"flake-compat": "flake-compat_2",
@ -228,6 +267,18 @@
"type": "github"
}
},
"nixpkgs-lib": {
"locked": {
"lastModified": 1730504152,
"narHash": "sha256-lXvH/vOfb4aGYyvFmZK/HlsNsr/0CVWlwYvo2rxJk3s=",
"type": "tarball",
"url": "https://github.com/NixOS/nixpkgs/archive/cc2f28000298e1269cea6612cd06ec9979dd5d7f.tar.gz"
},
"original": {
"type": "tarball",
"url": "https://github.com/NixOS/nixpkgs/archive/cc2f28000298e1269cea6612cd06ec9979dd5d7f.tar.gz"
}
},
"nixpkgs-qbt": {
"locked": {
"lastModified": 1738103934,
@ -275,6 +326,7 @@
"disko": "disko",
"home-manager": "home-manager",
"lanzaboote": "lanzaboote",
"llamacpp": "llamacpp",
"nix-minecraft": "nix-minecraft",
"nixos-hardware": "nixos-hardware",
"nixpkgs": "nixpkgs",

View File

@ -29,6 +29,11 @@
url = "github:nix-community/disko";
inputs.nixpkgs.follows = "nixpkgs";
};
llamacpp = {
url = "github:ggml-org/llama.cpp";
inputs.nixpkgs.follows = "nixpkgs";
};
};
outputs =
@ -41,6 +46,7 @@
home-manager,
lanzaboote,
disko,
llamacpp,
...
}@inputs:
let
@ -68,6 +74,7 @@
immich = 2284;
soulseek_web = 5030;
soulseek_listen = 50300;
llama_cpp = 8991;
};
https = {

28
services/llama-cpp.nix Normal file
View File

@ -0,0 +1,28 @@
{
pkgs,
service_configs,
config,
inputs,
...
}:
{
services.llama-cpp = {
enable = true;
model = builtins.toString (
pkgs.fetchurl {
url = "https://huggingface.co/bartowski/google_gemma-3-27b-it-GGUF/resolve/main/google_gemma-3-27b-it-IQ4_XS.gguf";
sha256 = "bd2f188c66d8ccb0bffcb0c91e4dbbb72754bb1732e0bca323a2f266a35e01c8";
}
);
port = service_configs.ports.llama_cpp;
host = "0.0.0.0";
package = inputs.llamacpp.packages.${pkgs.system}.default;
extraFlags = [
];
};
services.caddy.virtualHosts."llm.${service_configs.https.domain}".extraConfig = ''
reverse_proxy :${builtins.toString config.services.llama-cpp.port}
'';
}