2025-02-05 22:57:25 -05:00

94 lines
2.3 KiB
Nix

{
pkgs,
lib,
optimizeWithFlags,
...
}:
let
models = [
{
name = "DeepSeek-R1-Distill-Qwen-14B-Q4_0.gguf";
context_length = 32768;
gen_length = 8192;
source = pkgs.fetchurl {
url = "https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-14B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-14B-Q4_0.gguf?download=true";
sha256 = "906b3382f2680f4ce845459b4a122e904002b075238080307586bcffcde49eef";
};
}
];
gpt4all_package = (
optimizeWithFlags
(pkgs.gpt4all.overrideAttrs (old: {
patches = old.patches ++ [
./disable-settings-err.patch
./disable-version-check.patch
];
}))
# compile flags
[
"-O3"
"-march=native"
"-mtune=native"
]
);
in
{
home.packages = [
gpt4all_package
];
home.file =
lib.recursiveUpdate
{
".config/nomic.ai/GPT4All.ini".text =
let
system_prompt = "";
in
''
[General]
chatTheme=Dark
height=940
suggestionMode=Off
threadCount=8
uniqueId=7096f2d2-448d-4272-a132-d37e77f8a781
userDefaultModel=${
# select the first element of `models` to be the default model
(builtins.elemAt models 0).name
}
width=1472
x=0
y=0
[download]
lastVersionStarted=${gpt4all_package.version}
''
+ (lib.concatStrings (
map (model: ''
[model-${model.name}]
contextLength=${builtins.toString model.context_length}
filename=${model.name}
maxLength=${builtins.toString model.gen_length}
promptBatchSize=256
systemMessage="${
# replace newlines with the string "\n" for gpt4all to properly parse
builtins.replaceStrings [ "\n" ] [ "\\n" ] system_prompt
}\n"
'') models
))
+ ''
[network]
isActive=true
usageStatsActive=true
'';
}
(
builtins.listToAttrs (
map (f: {
name = ".local/share/nomic.ai/GPT4All/${f.name}";
value.source = f.source;
}) models
)
);
}