--- /srv/rebuilderd/tmp/rebuilderdy2Qg9H/inputs/llama.cpp-tools-extra_7593+dfsg-2_ppc64el.deb +++ /srv/rebuilderd/tmp/rebuilderdy2Qg9H/out/llama.cpp-tools-extra_7593+dfsg-2_ppc64el.deb ├── file list │ @@ -1,3 +1,3 @@ │ -rw-r--r-- 0 0 0 4 2026-01-11 15:26:14.000000 debian-binary │ -rw-r--r-- 0 0 0 1820 2026-01-11 15:26:14.000000 control.tar.xz │ --rw-r--r-- 0 0 0 2040800 2026-01-11 15:26:14.000000 data.tar.xz │ +-rw-r--r-- 0 0 0 2040352 2026-01-11 15:26:14.000000 data.tar.xz ├── control.tar.xz │ ├── control.tar │ │ ├── ./md5sums │ │ │ ├── ./md5sums │ │ │ │┄ Files differ ├── data.tar.xz │ ├── data.tar │ │ ├── file list │ │ │ @@ -26,15 +26,15 @@ │ │ │ -rw-r--r-- 0 root (0) root (0) 2505 2026-01-11 15:26:14.000000 ./usr/share/doc/llama.cpp-tools-extra/changelog.Debian.gz │ │ │ -rw-r--r-- 0 root (0) root (0) 13633 2026-01-11 15:26:14.000000 ./usr/share/doc/llama.cpp-tools-extra/copyright │ │ │ drwxr-xr-x 0 root (0) root (0) 0 2026-01-11 15:26:14.000000 ./usr/share/lintian/ │ │ │ drwxr-xr-x 0 root (0) root (0) 0 2026-01-11 15:26:14.000000 ./usr/share/lintian/overrides/ │ │ │ -rw-r--r-- 0 root (0) root (0) 180 2026-01-11 15:26:14.000000 ./usr/share/lintian/overrides/llama.cpp-tools-extra │ │ │ drwxr-xr-x 0 root (0) root (0) 0 2026-01-11 15:26:14.000000 ./usr/share/man/ │ │ │ drwxr-xr-x 0 root (0) root (0) 0 2026-01-11 15:26:14.000000 ./usr/share/man/man1/ │ │ │ --rw-r--r-- 0 root (0) root (0) 5975 2026-01-11 15:26:14.000000 ./usr/share/man/man1/llama-batched-bench.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 5976 2026-01-11 15:26:14.000000 ./usr/share/man/man1/llama-batched-bench.1.gz │ │ │ -rw-r--r-- 0 root (0) root (0) 518 2026-01-11 15:26:14.000000 ./usr/share/man/man1/llama-gguf-split.1.gz │ │ │ --rw-r--r-- 0 root (0) root (0) 6259 2026-01-11 15:26:14.000000 ./usr/share/man/man1/llama-imatrix.1.gz │ │ │ --rw-r--r-- 0 root (0) root (0) 6798 2026-01-11 15:26:14.000000 ./usr/share/man/man1/llama-mtmd-cli.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 6261 2026-01-11 15:26:14.000000 ./usr/share/man/man1/llama-imatrix.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 6799 2026-01-11 15:26:14.000000 ./usr/share/man/man1/llama-mtmd-cli.1.gz │ │ │ -rw-r--r-- 0 root (0) root (0) 6215 2026-01-11 15:26:14.000000 ./usr/share/man/man1/llama-perplexity.1.gz │ │ │ -rw-r--r-- 0 root (0) root (0) 997 2026-01-11 15:26:14.000000 ./usr/share/man/man1/llama-run.1.gz │ │ │ -rw-r--r-- 0 root (0) root (0) 732 2026-01-11 15:26:14.000000 ./usr/share/man/man1/llama-tokenize.1.gz │ │ │ --rw-r--r-- 0 root (0) root (0) 5894 2026-01-11 15:26:14.000000 ./usr/share/man/man1/llama-tts.1.gz │ │ │ +-rw-r--r-- 0 root (0) root (0) 5895 2026-01-11 15:26:14.000000 ./usr/share/man/man1/llama-tts.1.gz │ │ ├── ./usr/share/man/man1/llama-batched-bench.1.gz │ │ │ ├── llama-batched-bench.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-BATCHED-BENCH "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-batched-bench \- llama-batched-bench │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/powerpc64le\-linux\-gnu/ggml/backends0/libggml\-cpu\-power9.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/powerpc64le\-linux\-gnu/ggml/backends0/libggml\-cpu\-power10.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-h\fR, \fB\-\-help\fR, \fB\-\-usage\fR print usage and exit │ │ │ │ \fB\-\-version\fR show version and build info │ │ │ │ \fB\-cl\fR, \fB\-\-cache\-list\fR show list of models in cache │ │ │ │ \fB\-\-completion\-bash\fR print source\-able bash completion script for llama.cpp │ │ │ │ \fB\-\-verbose\-prompt\fR print a verbose prompt before generation (default: false) │ │ ├── ./usr/share/man/man1/llama-imatrix.1.gz │ │ │ ├── llama-imatrix.1 │ │ │ │ @@ -1,13 +1,13 @@ │ │ │ │ .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3. │ │ │ │ .TH LLAMA-IMATRIX "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-imatrix \- llama-imatrix │ │ │ │ .SH DESCRIPTION │ │ │ │ -load_backend: loaded CPU backend from \fI\,/usr/lib/powerpc64le\-linux\-gnu/ggml/backends0/libggml\-cpu\-power9.so\/\fP │ │ │ │ +load_backend: loaded CPU backend from \fI\,/usr/lib/powerpc64le\-linux\-gnu/ggml/backends0/libggml\-cpu\-power10.so\/\fP │ │ │ │ \fB\-\-\-\-\-\fR common params \fB\-\-\-\-\-\fR │ │ │ │ .PP │ │ │ │ \fB\-h\fR, \fB\-\-help\fR, \fB\-\-usage\fR print usage and exit │ │ │ │ \fB\-\-version\fR show version and build info │ │ │ │ \fB\-cl\fR, \fB\-\-cache\-list\fR show list of models in cache │ │ │ │ \fB\-\-completion\-bash\fR print source\-able bash completion script for llama.cpp │ │ │ │ \fB\-\-verbose\-prompt\fR print a verbose prompt before generation (default: false) │ │ ├── ./usr/share/man/man1/llama-mtmd-cli.1.gz │ │ │ ├── llama-mtmd-cli.1 │ │ │ │ @@ -2,15 +2,15 @@ │ │ │ │ .TH LLAMA-MTMD-CLI "1" "January 2026" "debian" "User Commands" │ │ │ │ .SH NAME │ │ │ │ llama-mtmd-cli \- llama-mtmd-cli │ │ │ │ .SH SYNOPSIS │ │ │ │ .B llama-mtmd-cli │ │ │ │ [\fI\,options\/\fR] \fI\,-m --mmproj --image --audio