2025-03-17 14:14:36 -04:00

114 lines
3.1 KiB
Nix

{
pkgs,
lib,
optimizeWithFlags,
...
}:
let
models = [
rec {
name = "DeepSeek-R1-Distill-Qwen-14B-IQ4_XS";
filename = src.name;
context_length = 131072;
gen_length = 32768;
src = pkgs.fetchurl {
url = "https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-14B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-14B-IQ4_XS.gguf";
sha256 = "031b190e7aa81770b5d069de181dcfe4b284bf5d75fa12f82f5e54a3178adcfd";
};
}
# https://github.com/nomic-ai/gpt4all/issues/3540
# rec {
# name = "Gemma_3_27b_it";
# filename = src.name;
# context_length = 131072;
# gen_length = 32768;
# src = pkgs.fetchurl {
# url = "https://huggingface.co/bartowski/google_gemma-3-27b-it-GGUF/resolve/main/google_gemma-3-27b-it-IQ4_XS.gguf";
# sha256 = "bd2f188c66d8ccb0bffcb0c91e4dbbb72754bb1732e0bca323a2f266a35e01c8";
# };
# }
];
gpt4all_package = (
optimizeWithFlags
(pkgs.gpt4all.overrideAttrs (old: {
patches = old.patches ++ [
./disable-settings-err.patch
./disable-version-check.patch
];
}))
# compile flags
[
"-O3"
"-march=native"
"-mtune=native"
]
);
system_prompt = "You are a helpful and harmless assistant. You should think step-by-step. Explore multiple trains of thought and methods (to completion) in order to derive the correct answer.";
chat_name_prompt = "Make a 3 word headline for this conversation";
in
{
home.packages = [
gpt4all_package
];
home.file =
lib.recursiveUpdate
{
".config/nomic.ai/GPT4All.ini".text =
''
[General]
chatTheme=Dark
suggestionMode=Off
threadCount=8
userDefaultModel=${
# select the first element of `models` to be the default model
(builtins.elemAt models 0).name
}
[download]
lastVersionStarted=${gpt4all_package.version}
''
+ (lib.concatMapStringsSep "\n" (model: ''
[model-${model.name}]
contextLength=${builtins.toString model.context_length}
filename=${model.filename}
maxLength=${builtins.toString model.gen_length}
promptBatchSize=256
chatNamePrompt=${chat_name_prompt}
systemMessage="${
# replace newlines with the string literal "\n" for gpt4all to properly parse
builtins.replaceStrings [ "\n" ] [ "\\n" ] system_prompt
}"
'') models)
+ ''
[network]
isActive=false
usageStatsActive=false
[localdocs]
fileExtensions=${
lib.concatStringsSep ", " [
"docx"
"pdf"
"txt"
"md"
"rst"
"rs"
]
}
'';
}
(
builtins.listToAttrs (
map (f: {
name = ".local/share/nomic.ai/GPT4All/${f.filename}";
value.source = f.src;
}) models
)
);
}