--- /srv/rebuilderd/tmp/rebuilderdR7LcUx/inputs/llama.cpp-tools_7593+dfsg-3_amd64.deb +++ /srv/rebuilderd/tmp/rebuilderdR7LcUx/out/llama.cpp-tools_7593+dfsg-3_amd64.deb ├── file list │ @@ -1,3 +1,3 @@ │ -rw-r--r-- 0 0 0 4 2026-01-16 17:30:50.000000 debian-binary │ -rw-r--r-- 0 0 0 2388 2026-01-16 17:30:50.000000 control.tar.xz │ --rw-r--r-- 0 0 0 3145368 2026-01-16 17:30:50.000000 data.tar.xz │ +-rw-r--r-- 0 0 0 3145436 2026-01-16 17:30:50.000000 data.tar.xz ├── control.tar.xz │ ├── control.tar │ │ ├── ./md5sums │ │ │ ├── ./md5sums │ │ │ │┄ Files differ ├── data.tar.xz │ ├── data.tar │ │ ├── file list │ │ │ @@ -42,11 +42,11 @@ │ │ │ -rw-r--r-- 0 root (0) root (0) 4122 2025-12-31 11:13:23.000000 ./usr/share/llama.cpp-tools/llama-server/themes/wild/favicon.ico │ │ │ -rw-r--r-- 0 root (0) root (0) 34367 2025-12-31 11:13:23.000000 ./usr/share/llama.cpp-tools/llama-server/themes/wild/index.html │ │ │ -rw-r--r-- 0 root (0) root (0) 76484 2025-12-31 11:13:23.000000 ./usr/share/llama.cpp-tools/llama-server/themes/wild/llama_cpp.png │ │ │ -rw-r--r-- 0 root (0) root (0) 259586 2025-12-31 11:13:23.000000 ./usr/share/llama.cpp-tools/llama-server/themes/wild/llamapattern.png │ │ │ -rw-r--r-- 0 root (0) root (0) 496463 2025-12-31 11:13:23.000000 ./usr/share/llama.cpp-tools/llama-server/themes/wild/wild.png │ │ │ drwxr-xr-x 0 root (0) root (0) 0 2026-01-16 17:30:50.000000 ./usr/share/man/ │ │ │ drwxr-xr-x 0 root (0) root (0) 0 2026-01-16 17:30:50.000000 ./usr/share/man/man1/ │ │ │ --rw-r--r-- 0 root (0) root (0) 1171 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-bench.1.gz │ │ │ --rw-r--r-- 0 root (0) root (0) 8010 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-cli.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 1174 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-bench.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 8009 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-cli.1.gz │ │ │ -rw-r--r-- 0 root (0) root (0) 1496 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-quantize.1.gz │ │ │ --rw-r--r-- 0 root (0) root (0) 9304 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-server.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 9306 2026-01-16 17:30:50.000000 ./usr/share/man/man1/llama-server.1.gz │ │ ├── ./usr/share/man/man1/llama-bench.1.gz │ │ │ ├── llama-bench.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-BENCH "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-bench \- llama-bench │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-zen4.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-cascadelake.so\/\fP │ │ │ │ usage: obj\-x86_64\-linux\-gnu/bin/llama\-bench [options] │ │ │ │ .SS "options:" │ │ │ │ .HP │ │ │ │ \fB\-h\fR, \fB\-\-help\fR │ │ │ │ .TP │ │ │ │ \fB\-\-numa\fR │ │ │ │ numa mode (default: disabled) │ │ ├── ./usr/share/man/man1/llama-cli.1.gz │ │ │ ├── llama-cli.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-CLI "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-cli \- llama-cli │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-zen4.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-cascadelake.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-h\fR, \fB\-\-help\fR, \fB\-\-usage\fR print usage and exit │ │ │ │ \fB\-\-version\fR show version and build info │ │ │ │ \fB\-cl\fR, \fB\-\-cache\-list\fR show list of models in cache │ │ │ │ \fB\-\-completion\-bash\fR print source\-able bash completion script for llama.cpp │ │ │ │ \fB\-\-verbose\-prompt\fR print a verbose prompt before generation (default: false) │ │ ├── ./usr/share/man/man1/llama-server.1.gz │ │ │ ├── llama-server.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-SERVER "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-server \- llama-server │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-zen4.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/x86_64\-linux\-gnu/ggml/backends0/libggml\-cpu\-cascadelake.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-h\fR, \fB\-\-help\fR, \fB\-\-usage\fR print usage and exit │ │ │ │ \fB\-\-version\fR show version and build info │ │ │ │ \fB\-cl\fR, \fB\-\-cache\-list\fR show list of models in cache │ │ │ │ \fB\-\-completion\-bash\fR print source\-able bash completion script for llama.cpp │ │ │ │ \fB\-\-verbose\-prompt\fR print a verbose prompt before generation (default: false)