--- /srv/rebuilderd/tmp/rebuilderdR7LcUx/inputs/llama.cpp-examples_7593+dfsg-3_amd64.deb +++ /srv/rebuilderd/tmp/rebuilderdR7LcUx/out/llama.cpp-examples_7593+dfsg-3_amd64.deb ├── file list │ @@ -1,3 +1,3 @@ │ -rw-r--r-- 0 0 0 4 2026-01-16 17:30:50.000000 debian-binary │ -rw-r--r-- 0 0 0 2344 2026-01-16 17:30:50.000000 control.tar.xz │ --rw-r--r-- 0 0 0 4539968 2026-01-16 17:30:50.000000 data.tar.xz │ +-rw-r--r-- 0 0 0 4540388 2026-01-16 17:30:50.000000 data.tar.xz ├── control.tar.xz │ ├── control.tar │ │ ├── ./md5sums │ │ │ ├── ./md5sums │ │ │ │┄ Files differ ├── data.tar.xz │ ├── data.tar │ │ ├── file list │ │ │ @@ -30,26 +30,26 @@ │ │ │ -rw-r--r-- 0 root (0) root (0) 2656 2026-01-16 17:30:50.000000 ./usr/share/doc/llama.cpp-examples/changelog.Debian.gz │ │ │ -rw-r--r-- 0 root (0) root (0) 13633 2026-01-16 17:30:50.000000 ./usr/share/doc/llama.cpp-examples/copyright │ │ │ drwxr-xr-x 0 root (0) root (0) 0 2026-01-16 17:30:50.000000 ./usr/share/lintian/ │ │ │ drwxr-xr-x 0 root (0) root (0) 0 2026-01-16 17:30:50.000000 ./usr/share/lintian/overrides/ │ │ │ -rw-r--r-- 0 root (0) root (0) 177 2026-01-16 17:30:50.000000 ./usr/share/lintian/overrides/llama.cpp-examples │ │ │ drwxr-xr-x 0 root (0) root (0) 0 2026-01-16 17:30:50.000000 ./usr/share/man/ │ │ │ drwxr-xr-x 0 root (0) root (0) 0 2026-01-16 17:30:50.000000 ./usr/share/man/man1/ │ │ │ --rw-r--r-- 0 root (0) root (0) 5765 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-batched.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 5769 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-batched.1.gz │ │ │ -rw-r--r-- 0 root (0) root (0) 6194 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-embedding.1.gz │ │ │ --rw-r--r-- 0 root (0) root (0) 5717 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-eval-callback.1.gz │ │ │ --rw-r--r-- 0 root (0) root (0) 6094 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-finetune.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 5721 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-eval-callback.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 6096 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-finetune.1.gz │ │ │ -rw-r--r-- 0 root (0) root (0) 400 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-gguf-hash.1.gz │ │ │ -rw-r--r-- 0 root (0) root (0) 253 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-gguf.1.gz │ │ │ --rw-r--r-- 0 root (0) root (0) 5714 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-lookahead.1.gz │ │ │ --rw-r--r-- 0 root (0) root (0) 5945 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-lookup-create.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 5717 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-lookahead.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 5947 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-lookup-create.1.gz │ │ │ -rw-r--r-- 0 root (0) root (0) 276 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-lookup-merge.1.gz │ │ │ --rw-r--r-- 0 root (0) root (0) 5943 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-lookup-stats.1.gz │ │ │ --rw-r--r-- 0 root (0) root (0) 5933 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-lookup.1.gz │ │ │ --rw-r--r-- 0 root (0) root (0) 5824 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-parallel.1.gz │ │ │ --rw-r--r-- 0 root (0) root (0) 5883 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-passkey.1.gz │ │ │ --rw-r--r-- 0 root (0) root (0) 6031 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-retrieval.1.gz │ │ │ --rw-r--r-- 0 root (0) root (0) 5722 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-save-load-state.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 5945 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-lookup-stats.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 5935 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-lookup.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 5827 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-parallel.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 5885 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-passkey.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 6033 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-retrieval.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 5725 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-save-load-state.1.gz │ │ │ -rw-r--r-- 0 root (0) root (0) 269 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-simple-chat.1.gz │ │ │ -rw-r--r-- 0 root (0) root (0) 259 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-simple.1.gz │ │ │ --rw-r--r-- 0 root (0) root (0) 6416 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-speculative-simple.1.gz │ │ │ --rw-r--r-- 0 root (0) root (0) 6405 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-speculative.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 6417 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-speculative-simple.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 6406 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-speculative.1.gz │ │ ├── ./usr/share/man/man1/llama-batched.1.gz │ │ │ ├── llama-batched.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-BATCHED "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-batched \- llama-batched │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-zen4.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-cascadelake.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-\-\-\-\-\fR sampling params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-\-samplers\fR SAMPLERS samplers that will be used for generation in the order, separated by │ │ │ │ .TP │ │ │ │ \&';' │ │ ├── ./usr/share/man/man1/llama-embedding.1.gz │ │ │ ├── llama-embedding.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-EMBEDDING "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-embedding \- llama-embedding │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-zen4.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-cascadelake.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-h\fR, \fB\-\-help\fR, \fB\-\-usage\fR print usage and exit │ │ │ │ \fB\-\-version\fR show version and build info │ │ │ │ \fB\-cl\fR, \fB\-\-cache\-list\fR show list of models in cache │ │ │ │ \fB\-\-completion\-bash\fR print source\-able bash completion script for llama.cpp │ │ │ │ \fB\-\-verbose\-prompt\fR print a verbose prompt before generation (default: false) │ │ ├── ./usr/share/man/man1/llama-eval-callback.1.gz │ │ │ ├── llama-eval-callback.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-EVAL-CALLBACK "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-eval-callback \- llama-eval-callback │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-zen4.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-cascadelake.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-\-\-\-\-\fR sampling params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-\-samplers\fR SAMPLERS samplers that will be used for generation in the order, separated by │ │ │ │ .TP │ │ │ │ \&';' │ │ ├── ./usr/share/man/man1/llama-finetune.1.gz │ │ │ ├── llama-finetune.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-FINETUNE "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-finetune \- llama-finetune │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-zen4.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-cascadelake.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-h\fR, \fB\-\-help\fR, \fB\-\-usage\fR print usage and exit │ │ │ │ \fB\-\-version\fR show version and build info │ │ │ │ \fB\-cl\fR, \fB\-\-cache\-list\fR show list of models in cache │ │ │ │ \fB\-\-completion\-bash\fR print source\-able bash completion script for llama.cpp │ │ │ │ \fB\-\-verbose\-prompt\fR print a verbose prompt before generation (default: false) │ │ ├── ./usr/share/man/man1/llama-lookahead.1.gz │ │ │ ├── llama-lookahead.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-LOOKAHEAD "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-lookahead \- llama-lookahead │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-zen4.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-cascadelake.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-\-\-\-\-\fR sampling params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-\-samplers\fR SAMPLERS samplers that will be used for generation in the order, separated by │ │ │ │ .TP │ │ │ │ \&';' │ │ ├── ./usr/share/man/man1/llama-lookup-create.1.gz │ │ │ ├── llama-lookup-create.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-LOOKUP-CREATE "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-lookup-create \- llama-lookup-create │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-zen4.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-cascadelake.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-h\fR, \fB\-\-help\fR, \fB\-\-usage\fR print usage and exit │ │ │ │ \fB\-\-version\fR show version and build info │ │ │ │ \fB\-cl\fR, \fB\-\-cache\-list\fR show list of models in cache │ │ │ │ \fB\-\-completion\-bash\fR print source\-able bash completion script for llama.cpp │ │ │ │ \fB\-\-verbose\-prompt\fR print a verbose prompt before generation (default: false) │ │ ├── ./usr/share/man/man1/llama-lookup-stats.1.gz │ │ │ ├── llama-lookup-stats.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-LOOKUP-STATS "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-lookup-stats \- llama-lookup-stats │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-zen4.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-cascadelake.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-h\fR, \fB\-\-help\fR, \fB\-\-usage\fR print usage and exit │ │ │ │ \fB\-\-version\fR show version and build info │ │ │ │ \fB\-cl\fR, \fB\-\-cache\-list\fR show list of models in cache │ │ │ │ \fB\-\-completion\-bash\fR print source\-able bash completion script for llama.cpp │ │ │ │ \fB\-\-verbose\-prompt\fR print a verbose prompt before generation (default: false) │ │ ├── ./usr/share/man/man1/llama-lookup.1.gz │ │ │ ├── llama-lookup.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-LOOKUP "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-lookup \- llama-lookup │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-zen4.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-cascadelake.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-h\fR, \fB\-\-help\fR, \fB\-\-usage\fR print usage and exit │ │ │ │ \fB\-\-version\fR show version and build info │ │ │ │ \fB\-cl\fR, \fB\-\-cache\-list\fR show list of models in cache │ │ │ │ \fB\-\-completion\-bash\fR print source\-able bash completion script for llama.cpp │ │ │ │ \fB\-\-verbose\-prompt\fR print a verbose prompt before generation (default: false) │ │ ├── ./usr/share/man/man1/llama-parallel.1.gz │ │ │ ├── llama-parallel.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-PARALLEL "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-parallel \- llama-parallel │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-zen4.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-cascadelake.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-h\fR, \fB\-\-help\fR, \fB\-\-usage\fR print usage and exit │ │ │ │ \fB\-\-version\fR show version and build info │ │ │ │ \fB\-cl\fR, \fB\-\-cache\-list\fR show list of models in cache │ │ │ │ \fB\-\-completion\-bash\fR print source\-able bash completion script for llama.cpp │ │ │ │ \fB\-\-verbose\-prompt\fR print a verbose prompt before generation (default: false) │ │ ├── ./usr/share/man/man1/llama-passkey.1.gz │ │ │ ├── llama-passkey.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-PASSKEY "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-passkey \- llama-passkey │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-zen4.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-cascadelake.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-h\fR, \fB\-\-help\fR, \fB\-\-usage\fR print usage and exit │ │ │ │ \fB\-\-version\fR show version and build info │ │ │ │ \fB\-cl\fR, \fB\-\-cache\-list\fR show list of models in cache │ │ │ │ \fB\-\-completion\-bash\fR print source\-able bash completion script for llama.cpp │ │ │ │ \fB\-\-verbose\-prompt\fR print a verbose prompt before generation (default: false) │ │ ├── ./usr/share/man/man1/llama-retrieval.1.gz │ │ │ ├── llama-retrieval.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-RETRIEVAL "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-retrieval \- llama-retrieval │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-zen4.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-cascadelake.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-h\fR, \fB\-\-help\fR, \fB\-\-usage\fR print usage and exit │ │ │ │ \fB\-\-version\fR show version and build info │ │ │ │ \fB\-cl\fR, \fB\-\-cache\-list\fR show list of models in cache │ │ │ │ \fB\-\-completion\-bash\fR print source\-able bash completion script for llama.cpp │ │ │ │ \fB\-\-verbose\-prompt\fR print a verbose prompt before generation (default: false) │ │ ├── ./usr/share/man/man1/llama-save-load-state.1.gz │ │ │ ├── llama-save-load-state.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-SAVE-LOAD-STATE "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-save-load-state \- llama-save-load-state │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-zen4.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-cascadelake.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-\-\-\-\-\fR sampling params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-\-samplers\fR SAMPLERS samplers that will be used for generation in the order, separated by │ │ │ │ .TP │ │ │ │ \&';' │ │ ├── ./usr/share/man/man1/llama-speculative-simple.1.gz │ │ │ ├── llama-speculative-simple.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-SPECULATIVE-SIMPLE "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-speculative-simple \- llama-speculative-simple │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-zen4.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-cascadelake.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-h\fR, \fB\-\-help\fR, \fB\-\-usage\fR print usage and exit │ │ │ │ \fB\-\-version\fR show version and build info │ │ │ │ \fB\-cl\fR, \fB\-\-cache\-list\fR show list of models in cache │ │ │ │ \fB\-\-completion\-bash\fR print source\-able bash completion script for llama.cpp │ │ │ │ \fB\-\-verbose\-prompt\fR print a verbose prompt before generation (default: false) │ │ ├── ./usr/share/man/man1/llama-speculative.1.gz │ │ │ ├── llama-speculative.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-SPECULATIVE "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-speculative \- llama-speculative │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-zen4.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-cascadelake.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-h\fR, \fB\-\-help\fR, \fB\-\-usage\fR print usage and exit │ │ │ │ \fB\-\-version\fR show version and build info │ │ │ │ \fB\-cl\fR, \fB\-\-cache\-list\fR show list of models in cache │ │ │ │ \fB\-\-completion\-bash\fR print source\-able bash completion script for llama.cpp │ │ │ │ \fB\-\-verbose\-prompt\fR print a verbose prompt before generation (default: false)