update gpt4all models and config
This commit is contained in:
parent
934a0dc51f
commit
c95bed4536
12
home-manager/flake.lock
generated
12
home-manager/flake.lock
generated
@ -174,11 +174,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1738722444,
|
||||
"narHash": "sha256-DHVyKCiIQVDqjYoVU2j7UaLNIlOnpB9sP1cPRNRpqvY=",
|
||||
"lastModified": 1738808867,
|
||||
"narHash": "sha256-m5rbY/ck0NAlfSBxo++vl7EZn8fkZ02H3kGGc7q883c=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "347fb01821c3cd8d54e563d244a599c1e27a393d",
|
||||
"rev": "ae46f37fb727030ddc2ef65a675b751484c90032",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -242,11 +242,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1738782903,
|
||||
"narHash": "sha256-xxclr3MHrE8hjQbHBlwONgCkYY8UHhjoA1jjB6pLvC0=",
|
||||
"lastModified": 1738804720,
|
||||
"narHash": "sha256-3bAR5ALk5AoFt4BlsQRdjSV6VVH6lbgtYdElb0A+qDc=",
|
||||
"owner": "0xc000022070",
|
||||
"repo": "zen-browser-flake",
|
||||
"rev": "876ab3f1dc42bb52c250453d73130a6d07322b51",
|
||||
"rev": "7868f1c54b7f8e09be194aaa0934791596df1ea1",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
||||
@ -7,12 +7,12 @@
|
||||
let
|
||||
models = [
|
||||
{
|
||||
name = "Qwen2.5-14B-Instruct-IQ4_XS.gguf";
|
||||
name = "DeepSeek-R1-Distill-Qwen-7B-Q6_K.gguf";
|
||||
context_length = 32768;
|
||||
gen_length = 8192;
|
||||
source = pkgs.fetchurl {
|
||||
url = "https://huggingface.co/bartowski/Qwen2.5-14B-Instruct-GGUF/resolve/main/Qwen2.5-14B-Instruct-IQ4_XS.gguf?download=true";
|
||||
sha256 = "+AHt49no0qQ48MoNsqGJV4FeJ3Cf2hSZqTMjNUIHaO4=";
|
||||
url = "https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-7B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-7B-Q6_K.gguf?download=true";
|
||||
sha256 = "3ffa00e72db05668453687d5ab8e7c9fb19705cfd57292a956db17a3633c93f3";
|
||||
};
|
||||
}
|
||||
];
|
||||
@ -72,12 +72,10 @@ in
|
||||
filename=${model.name}
|
||||
maxLength=${builtins.toString model.gen_length}
|
||||
promptBatchSize=256
|
||||
promptTemplate=<|im_start|>user\n%1<|im_end|>\n<|im_start|>assistant\n
|
||||
systemPrompt="<|im_start|>system\n${
|
||||
systemMessage="${
|
||||
# replace newlines with the string "\n" for gpt4all to properly parse
|
||||
builtins.replaceStrings [ "\n" ] [ "\\n" ] system_prompt
|
||||
}<|im_end|>
|
||||
\n"
|
||||
}\n"
|
||||
'') models
|
||||
))
|
||||
+ ''
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user