{ pkgs, lib, optimizeWithFlags, ... }: let models = [ rec { name = "DeepSeek-R1-Distill-Qwen-14B-IQ4_XS"; filename = src.name; context_length = 131072; gen_length = 32768; src = pkgs.fetchurl { url = "https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-14B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-14B-IQ4_XS.gguf"; sha256 = "031b190e7aa81770b5d069de181dcfe4b284bf5d75fa12f82f5e54a3178adcfd"; }; } ]; gpt4all_package = ( optimizeWithFlags (pkgs.gpt4all.overrideAttrs (old: { patches = old.patches ++ [ ./disable-settings-err.patch ./disable-version-check.patch ]; })) # compile flags [ "-O3" "-march=native" "-mtune=native" ] ); system_prompt = "You are a helpful and harmless assistant. You should think step-by-step."; chat_name_prompt = "Make a 3 word headline for this conversation"; in { home.packages = [ gpt4all_package ]; home.file = lib.recursiveUpdate { ".config/nomic.ai/GPT4All.ini".text = '' [General] chatTheme=Dark suggestionMode=Off threadCount=8 userDefaultModel=${ # select the first element of `models` to be the default model (builtins.elemAt models 0).name } [download] lastVersionStarted=${gpt4all_package.version} '' + (lib.concatMapStringsSep "\n" (model: '' [model-${model.name}] contextLength=${builtins.toString model.context_length} filename=${model.filename} maxLength=${builtins.toString model.gen_length} promptBatchSize=256 chatNamePrompt=${chat_name_prompt} systemMessage="${ # replace newlines with the string literal "\n" for gpt4all to properly parse builtins.replaceStrings [ "\n" ] [ "\\n" ] system_prompt }" '') models) + '' [network] isActive=false usageStatsActive=false [localdocs] fileExtensions=${ lib.concatStringsSep ", " [ "docx" "pdf" "txt" "md" "rst" "rs" ] } ''; } ( builtins.listToAttrs ( map (f: { name = ".local/share/nomic.ai/GPT4All/${f.filename}"; value.source = f.src; }) models ) ); }