Mock Version: 5.9 Mock Version: 5.9 Mock Version: 5.9 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -bs --noclean --target noarch --nodeps /builddir/build/SPECS/python-mirrors-countme.spec'], chrootPath='/var/lib/mock/f42-build-3400784-45124/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=864000uid=993gid=135user='mockbuild'unshare_net=TrueprintOutput=Falsenspawn_args=['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.4clthvi5:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']) Using nspawn with args ['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.4clthvi5:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', 'd2063b2ba59e495ebc65d3c9f1edba80', '-D', '/var/lib/mock/f42-build-3400784-45124/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.4clthvi5:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -bs --noclean --target noarch --nodeps /builddir/build/SPECS/python-mirrors-countme.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: noarch Building for target noarch setting SOURCE_DATE_EPOCH=1737158400 Wrote: /builddir/build/SRPMS/python-mirrors-countme-0.1.4-5.fc42.src.rpm Child return code was: 0 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -br --noclean --target noarch --nodeps /builddir/build/SPECS/python-mirrors-countme.spec'], chrootPath='/var/lib/mock/f42-build-3400784-45124/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=864000uid=993gid=135user='mockbuild'unshare_net=TrueraiseExc=FalseprintOutput=Falsenspawn_args=['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.4clthvi5:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']) Using nspawn with args ['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.4clthvi5:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', '2673ecccac4c4ad2bfc700db2037fefc', '-D', '/var/lib/mock/f42-build-3400784-45124/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.4clthvi5:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -br --noclean --target noarch --nodeps /builddir/build/SPECS/python-mirrors-countme.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: noarch Building for target noarch setting SOURCE_DATE_EPOCH=1737158400 Executing(%mkbuilddir): /bin/sh -e /var/tmp/rpm-tmp.RjOyqi + umask 022 + cd /builddir/build/BUILD/python-mirrors-countme-0.1.4-build + test -d /builddir/build/BUILD/python-mirrors-countme-0.1.4-build + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w /builddir/build/BUILD/python-mirrors-countme-0.1.4-build + /usr/bin/rm -rf /builddir/build/BUILD/python-mirrors-countme-0.1.4-build + /usr/bin/mkdir -p /builddir/build/BUILD/python-mirrors-countme-0.1.4-build + /usr/bin/mkdir -p /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/SPECPARTS + RPM_EC=0 ++ jobs -p + exit 0 Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.iBOBgB + umask 022 + cd /builddir/build/BUILD/python-mirrors-countme-0.1.4-build + cd /builddir/build/BUILD/python-mirrors-countme-0.1.4-build + rm -rf mirrors_countme-0.1.4 + /usr/lib/rpm/rpmuncompress -x /builddir/build/SOURCES/mirrors_countme-0.1.4.tar.gz + STATUS=0 + '[' 0 -ne 0 ']' + cd mirrors_countme-0.1.4 + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + RPM_EC=0 ++ jobs -p + exit 0 Executing(%generate_buildrequires): /bin/sh -e /var/tmp/rpm-tmp.idRl2Z + umask 022 + cd /builddir/build/BUILD/python-mirrors-countme-0.1.4-build + cd mirrors_countme-0.1.4 + echo pyproject-rpm-macros + echo python3-devel + echo 'python3dist(packaging)' + echo 'python3dist(pip) >= 19' + '[' -f pyproject.toml ']' + echo '(python3dist(tomli) if python3-devel < 3.11)' + rm -rfv '*.dist-info/' + '[' -f /usr/bin/python3 ']' + mkdir -p /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/.pyproject-builddir + echo -n + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer ' + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer ' + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib/gfortran/modules ' + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib/gfortran/modules ' + VALAFLAGS=-g + RUSTFLAGS='-Copt-level=3 -Cdebuginfo=2 -Ccodegen-units=1 -Cstrip=none -Cforce-frame-pointers=yes --cap-lints=warn' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 ' + LT_SYS_LIBRARY_PATH=/usr/lib: + CC=gcc + CXX=g++ + TMPDIR=/builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/.pyproject-builddir + RPM_TOXENV=py313 + HOSTNAME=rpmbuild + /usr/bin/python3 -Bs /usr/lib/rpm/redhat/pyproject_buildrequires.py --generate-extras --python3_pkgversion 3 --wheeldir /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/pyproject-wheeldir --output /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/python-mirrors-countme-0.1.4-5.fc42.noarch-pyproject-buildrequires Handling poetry-core from build-system.requires Requirement not satisfied: poetry-core Exiting dependency generation pass: build backend + cat /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/python-mirrors-countme-0.1.4-5.fc42.noarch-pyproject-buildrequires + rm -rfv '*.dist-info/' + RPM_EC=0 ++ jobs -p + exit 0 Wrote: /builddir/build/SRPMS/python-mirrors-countme-0.1.4-5.fc42.buildreqs.nosrc.rpm Child return code was: 11 Dynamic buildrequires detected Going to install missing buildrequires. See root.log for details. ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -br --noprep --noclean --target noarch --nodeps /builddir/build/SPECS/python-mirrors-countme.spec'], chrootPath='/var/lib/mock/f42-build-3400784-45124/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=864000uid=993gid=135user='mockbuild'unshare_net=TrueraiseExc=FalseprintOutput=Falsenspawn_args=['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.4clthvi5:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']) Using nspawn with args ['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.4clthvi5:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', '37551fc779ca46788fad24895a166d28', '-D', '/var/lib/mock/f42-build-3400784-45124/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.4clthvi5:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -br --noprep --noclean --target noarch --nodeps /builddir/build/SPECS/python-mirrors-countme.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: noarch Building for target noarch setting SOURCE_DATE_EPOCH=1737158400 Executing(%generate_buildrequires): /bin/sh -e /var/tmp/rpm-tmp.oQLwzJ + umask 022 + cd /builddir/build/BUILD/python-mirrors-countme-0.1.4-build + cd mirrors_countme-0.1.4 + echo pyproject-rpm-macros + echo python3-devel + echo 'python3dist(packaging)' + echo 'python3dist(pip) >= 19' + '[' -f pyproject.toml ']' + echo '(python3dist(tomli) if python3-devel < 3.11)' + rm -rfv '*.dist-info/' + '[' -f /usr/bin/python3 ']' + mkdir -p /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/.pyproject-builddir + echo -n + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer ' + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer ' + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib/gfortran/modules ' + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib/gfortran/modules ' + VALAFLAGS=-g + RUSTFLAGS='-Copt-level=3 -Cdebuginfo=2 -Ccodegen-units=1 -Cstrip=none -Cforce-frame-pointers=yes --cap-lints=warn' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 ' + LT_SYS_LIBRARY_PATH=/usr/lib: + CC=gcc + CXX=g++ + TMPDIR=/builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/.pyproject-builddir + RPM_TOXENV=py313 + HOSTNAME=rpmbuild + /usr/bin/python3 -Bs /usr/lib/rpm/redhat/pyproject_buildrequires.py --generate-extras --python3_pkgversion 3 --wheeldir /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/pyproject-wheeldir --output /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/python-mirrors-countme-0.1.4-5.fc42.noarch-pyproject-buildrequires Handling poetry-core from build-system.requires Requirement satisfied: poetry-core (installed: poetry-core 1.9.1) + cat /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/python-mirrors-countme-0.1.4-5.fc42.noarch-pyproject-buildrequires + rm -rfv mirrors_countme-0.1.4.dist-info/ removed 'mirrors_countme-0.1.4.dist-info/WHEEL' removed 'mirrors_countme-0.1.4.dist-info/entry_points.txt' removed 'mirrors_countme-0.1.4.dist-info/LICENSE.md' removed 'mirrors_countme-0.1.4.dist-info/METADATA' removed directory 'mirrors_countme-0.1.4.dist-info/' + RPM_EC=0 ++ jobs -p + exit 0 Wrote: /builddir/build/SRPMS/python-mirrors-countme-0.1.4-5.fc42.buildreqs.nosrc.rpm Child return code was: 11 Dynamic buildrequires detected Going to install missing buildrequires. See root.log for details. ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -ba --noprep --noclean --target noarch --nodeps /builddir/build/SPECS/python-mirrors-countme.spec'], chrootPath='/var/lib/mock/f42-build-3400784-45124/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=864000uid=993gid=135user='mockbuild'unshare_net=TrueprintOutput=Falsenspawn_args=['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.4clthvi5:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']) Using nspawn with args ['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.4clthvi5:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', '77b40dd8e3db448ca44c536d2f72c08f', '-D', '/var/lib/mock/f42-build-3400784-45124/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.4clthvi5:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/fuse', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -ba --noprep --noclean --target noarch --nodeps /builddir/build/SPECS/python-mirrors-countme.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: noarch Building for target noarch setting SOURCE_DATE_EPOCH=1737158400 Executing(%generate_buildrequires): /bin/sh -e /var/tmp/rpm-tmp.Wn6e3P + umask 022 + cd /builddir/build/BUILD/python-mirrors-countme-0.1.4-build + cd mirrors_countme-0.1.4 + echo pyproject-rpm-macros + echo python3-devel + echo 'python3dist(packaging)' + echo 'python3dist(pip) >= 19' + '[' -f pyproject.toml ']' + echo '(python3dist(tomli) if python3-devel < 3.11)' + rm -rfv '*.dist-info/' + '[' -f /usr/bin/python3 ']' + mkdir -p /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/.pyproject-builddir + echo -n + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer ' + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer ' + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib/gfortran/modules ' + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib/gfortran/modules ' + VALAFLAGS=-g + RUSTFLAGS='-Copt-level=3 -Cdebuginfo=2 -Ccodegen-units=1 -Cstrip=none -Cforce-frame-pointers=yes --cap-lints=warn' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 ' + LT_SYS_LIBRARY_PATH=/usr/lib: + CC=gcc + CXX=g++ + TMPDIR=/builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/.pyproject-builddir + RPM_TOXENV=py313 + HOSTNAME=rpmbuild + /usr/bin/python3 -Bs /usr/lib/rpm/redhat/pyproject_buildrequires.py --generate-extras --python3_pkgversion 3 --wheeldir /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/pyproject-wheeldir --output /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/python-mirrors-countme-0.1.4-5.fc42.noarch-pyproject-buildrequires Handling poetry-core from build-system.requires Requirement satisfied: poetry-core (installed: poetry-core 1.9.1) + cat /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/python-mirrors-countme-0.1.4-5.fc42.noarch-pyproject-buildrequires + rm -rfv mirrors_countme-0.1.4.dist-info/ removed 'mirrors_countme-0.1.4.dist-info/WHEEL' removed 'mirrors_countme-0.1.4.dist-info/entry_points.txt' removed 'mirrors_countme-0.1.4.dist-info/LICENSE.md' removed 'mirrors_countme-0.1.4.dist-info/METADATA' removed directory 'mirrors_countme-0.1.4.dist-info/' + RPM_EC=0 ++ jobs -p + exit 0 Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.2YdLwv + umask 022 + cd /builddir/build/BUILD/python-mirrors-countme-0.1.4-build + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer ' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer ' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib/gfortran/modules ' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib/gfortran/modules ' + export FCFLAGS + VALAFLAGS=-g + export VALAFLAGS + RUSTFLAGS='-Copt-level=3 -Cdebuginfo=2 -Ccodegen-units=1 -Cstrip=none -Cforce-frame-pointers=yes --cap-lints=warn' + export RUSTFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 ' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd mirrors_countme-0.1.4 + mkdir -p /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/.pyproject-builddir + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer ' + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer ' + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib/gfortran/modules ' + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib/gfortran/modules ' + VALAFLAGS=-g + RUSTFLAGS='-Copt-level=3 -Cdebuginfo=2 -Ccodegen-units=1 -Cstrip=none -Cforce-frame-pointers=yes --cap-lints=warn' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 ' + LT_SYS_LIBRARY_PATH=/usr/lib: + CC=gcc + CXX=g++ + TMPDIR=/builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/.pyproject-builddir + /usr/bin/python3 -Bs /usr/lib/rpm/redhat/pyproject_wheel.py /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/pyproject-wheeldir Processing /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4 Preparing metadata (pyproject.toml): started Running command Preparing metadata (pyproject.toml) Preparing metadata (pyproject.toml): finished with status 'done' Building wheels for collected packages: mirrors-countme Building wheel for mirrors-countme (pyproject.toml): started Running command Building wheel for mirrors-countme (pyproject.toml) Building wheel for mirrors-countme (pyproject.toml): finished with status 'done' Created wheel for mirrors-countme: filename=mirrors_countme-0.1.4-py3-none-any.whl size=43512 sha256=a5670f3e5c6f50bc34022b48f6cec95a64c456970727e686a08ef7a0b6e9bb62 Stored in directory: /builddir/.cache/pip/wheels/9f/b9/8d/9be30f4dc6c659a5824d1f0d610186190b8d9f7b08b2494377 Successfully built mirrors-countme + RPM_EC=0 ++ jobs -p + exit 0 Executing(%install): /bin/sh -e /var/tmp/rpm-tmp.B2aDAP + umask 022 + cd /builddir/build/BUILD/python-mirrors-countme-0.1.4-build + '[' /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT '!=' / ']' + rm -rf /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT ++ dirname /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT + mkdir -p /builddir/build/BUILD/python-mirrors-countme-0.1.4-build + mkdir /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer ' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer ' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib/gfortran/modules ' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib/gfortran/modules ' + export FCFLAGS + VALAFLAGS=-g + export VALAFLAGS + RUSTFLAGS='-Copt-level=3 -Cdebuginfo=2 -Ccodegen-units=1 -Cstrip=none -Cforce-frame-pointers=yes --cap-lints=warn' + export RUSTFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 ' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd mirrors_countme-0.1.4 ++ ls /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/pyproject-wheeldir/mirrors_countme-0.1.4-py3-none-any.whl ++ xargs basename --multiple ++ sed -E 's/([^-]+)-([^-]+)-.+\.whl/\1==\2/' + specifier=mirrors_countme==0.1.4 + '[' -z mirrors_countme==0.1.4 ']' + TMPDIR=/builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/.pyproject-builddir + /usr/bin/python3 -m pip install --root /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT --prefix /usr --no-deps --disable-pip-version-check --progress-bar off --verbose --ignore-installed --no-warn-script-location --no-index --no-cache-dir --find-links /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/pyproject-wheeldir mirrors_countme==0.1.4 Using pip 24.3.1 from /usr/lib/python3.13/site-packages/pip (python 3.13) Looking in links: /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/pyproject-wheeldir Processing ./pyproject-wheeldir/mirrors_countme-0.1.4-py3-none-any.whl Installing collected packages: mirrors_countme Creating /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin changing mode of /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/countme-delete-totals to 755 changing mode of /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/countme-parse-access-log to 755 changing mode of /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/countme-totals to 755 changing mode of /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/countme-trim-raw to 755 Successfully installed mirrors_countme-0.1.4 + '[' -d /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin ']' + '[' -z sP ']' + shebang_flags=-kasP + /usr/bin/python3 -B /usr/lib/rpm/redhat/pathfix.py -pni /usr/bin/python3 -kasP /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/countme-delete-totals /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/countme-parse-access-log /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/countme-totals /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/countme-trim-raw /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/countme-delete-totals: updating /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/countme-parse-access-log: updating /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/countme-totals: updating /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/countme-trim-raw: updating + rm -rfv /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/__pycache__ + rm -f /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/python-mirrors-countme-0.1.4-5.fc42.noarch-pyproject-ghost-distinfo + site_dirs=() + '[' -d /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages ']' + site_dirs+=("/usr/lib/python3.13/site-packages") + '[' /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib64/python3.13/site-packages '!=' /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages ']' + '[' -d /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib64/python3.13/site-packages ']' + for site_dir in ${site_dirs[@]} + for distinfo in /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT$site_dir/*.dist-info + echo '%ghost /usr/lib/python3.13/site-packages/mirrors_countme-0.1.4.dist-info' + sed -i s/pip/rpm/ /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme-0.1.4.dist-info/INSTALLER + PYTHONPATH=/usr/lib/rpm/redhat + /usr/bin/python3 -B /usr/lib/rpm/redhat/pyproject_preprocess_record.py --buildroot /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT --record /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme-0.1.4.dist-info/RECORD --output /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/python-mirrors-countme-0.1.4-5.fc42.noarch-pyproject-record + rm -fv /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme-0.1.4.dist-info/RECORD removed '/builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme-0.1.4.dist-info/RECORD' + rm -fv /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme-0.1.4.dist-info/REQUESTED removed '/builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme-0.1.4.dist-info/REQUESTED' ++ wc -l /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/python-mirrors-countme-0.1.4-5.fc42.noarch-pyproject-ghost-distinfo ++ cut -f1 '-d ' + lines=1 + '[' 1 -ne 1 ']' + RPM_FILES_ESCAPE=4.19 + /usr/bin/python3 /usr/lib/rpm/redhat/pyproject_save_files.py --output-files /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/python-mirrors-countme-0.1.4-5.fc42.noarch-pyproject-files --output-modules /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/python-mirrors-countme-0.1.4-5.fc42.noarch-pyproject-modules --buildroot /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT --sitelib /usr/lib/python3.13/site-packages --sitearch /usr/lib64/python3.13/site-packages --python-version 3.13 --pyproject-record /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/python-mirrors-countme-0.1.4-5.fc42.noarch-pyproject-record --prefix /usr mirrors_countme + for script in update-rawdb update-totals sqlite2csv split-totals-db + install -m0755 scripts/countme-update-rawdb.sh /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/ + for script in update-rawdb update-totals sqlite2csv split-totals-db + install -m0755 scripts/countme-update-totals.sh /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/ + for script in update-rawdb update-totals sqlite2csv split-totals-db + install -m0755 scripts/countme-sqlite2csv.sh /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/ + for script in update-rawdb update-totals sqlite2csv split-totals-db + install -m0755 scripts/countme-split-totals-db.sh /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/ + install -m0755 scripts/countme-rezip /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/bin/ + /usr/lib/rpm/check-buildroot + /usr/lib/rpm/redhat/brp-ldconfig + /usr/lib/rpm/brp-compress + /usr/lib/rpm/brp-strip /usr/bin/strip + /usr/lib/rpm/brp-strip-comment-note /usr/bin/strip /usr/bin/objdump + /usr/lib/rpm/redhat/brp-strip-lto /usr/bin/strip + /usr/lib/rpm/brp-strip-static-archive /usr/bin/strip + /usr/lib/rpm/check-rpaths + /usr/lib/rpm/redhat/brp-mangle-shebangs mangling shebang in /usr/bin/countme-rezip from /bin/sh -e to #!/usr/bin/sh -e mangling shebang in /usr/bin/countme-update-rawdb.sh from /bin/bash to #!/usr/bin/bash mangling shebang in /usr/bin/countme-split-totals-db.sh from /bin/sh -e to #!/usr/bin/sh -e mangling shebang in /usr/bin/countme-update-totals.sh from /bin/bash to #!/usr/bin/bash mangling shebang in /usr/bin/countme-sqlite2csv.sh from /bin/sh to #!/usr/bin/sh + /usr/lib/rpm/brp-remove-la-files + env /usr/lib/rpm/redhat/brp-python-bytecompile '' 1 0 -j32 Bytecompiling .py files below /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13 using python3.13 + /usr/lib/rpm/redhat/brp-python-hardlink + /usr/bin/add-determinism --brp -j32 /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme/scripts/__pycache__/__init__.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme/__pycache__/version.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme/scripts/__pycache__/countme_delete_totals.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme/scripts/__pycache__/countme_parse_access_log.cpython-313.opt-1.pyc: rewriting with normalized contents /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme/__pycache__/constants.cpython-313.opt-1.pyc: rewriting with normalized contents /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme/__pycache__/readers.cpython-313.opt-1.pyc: rewriting with normalized contents /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme/__pycache__/parse.cpython-313.opt-1.pyc: rewriting with normalized contents /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme/__pycache__/regex.cpython-313.opt-1.pyc: rewriting with normalized contents /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme/__pycache__/output_items.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme/scripts/__pycache__/countme_totals.cpython-313.opt-1.pyc: rewriting with normalized contents /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme/scripts/__pycache__/countme_trim_raw.cpython-313.opt-1.pyc: rewriting with normalized contents /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme/__pycache__/__init__.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme/__pycache__/matchers.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme/__pycache__/progress.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme/__pycache__/util.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme/__pycache__/writers.cpython-313.pyc: rewriting with normalized contents /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages/mirrors_countme/__pycache__/totals.cpython-313.pyc: rewriting with normalized contents Scanned 11 directories and 65 files, processed 17 inodes, 17 modified (0 replaced + 17 rewritten), 0 unsupported format, 0 errors Executing(%check): /bin/sh -e /var/tmp/rpm-tmp.iNcg3c + umask 022 + cd /builddir/build/BUILD/python-mirrors-countme-0.1.4-build + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer ' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer ' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib/gfortran/modules ' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib/gfortran/modules ' + export FCFLAGS + VALAFLAGS=-g + export VALAFLAGS + RUSTFLAGS='-Copt-level=3 -Cdebuginfo=2 -Ccodegen-units=1 -Cstrip=none -Cforce-frame-pointers=yes --cap-lints=warn' + export RUSTFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 ' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd mirrors_countme-0.1.4 + PYTHONPATH=/builddir/build/BUILD/python-mirrors-countme-0.1.4-build/BUILDROOT/usr/lib/python3.13/site-packages + /usr/bin/python3 -m pytest -v -n auto ============================= test session starts ============================== platform linux -- Python 3.13.2, pytest-8.3.4, pluggy-1.5.0 -- /usr/bin/python3 cachedir: .pytest_cache hypothesis profile 'default' -> database=DirectoryBasedExampleDatabase(PosixPath('/builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/.hypothesis/examples')) rootdir: /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4 configfile: pyproject.toml plugins: cov-5.0.0, xdist-3.6.1, hypothesis-6.98.8 created: 32/32 workers 32 workers [228 items] scheduling tests via LoadScheduling tests/scripts/test_countme_delete_totals.py::test_parse_args_with_noop tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_matchmode[--matchmode=mirrors-mirrors-MirrorMatcher] tests/scripts/test_countme_delete_totals.py::test_tm2ui tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_help tests/scripts/test_countme_totals.py::test_parse_args_with_version_argument tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_negating_flags[--no-dup-check-dupcheck] tests/scripts/test_countme_delete_totals.py::test_num_entries_without_data tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_progress[None-False] tests/scripts/test_countme_trim_raw.py::test_get_minmaxtime[min] tests/scripts/test_countme_delete_totals.py::test_last_week_with_data tests/scripts/test_countme_totals.py::test_cli tests/scripts/test_countme_delete_totals.py::test_num_entries_for_without_data tests/scripts/test_countme_delete_totals.py::test_trim_data tests/scripts/test_countme_trim_raw.py::test__num_entries[unique-ip-only] tests/scripts/test_countme_delete_totals.py::test_parse_args_with_version_argument tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_oldest_week[None-False] tests/scripts/test_countme_delete_totals.py::test_num2ui[123- 123] tests/scripts/test_countme_trim_raw.py::test__del_entries[unique-ip-only] tests/scripts/test_countme_trim_raw.py::test_next_week [gw14] [ 0%] PASSED tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_matchmode[--matchmode=mirrors-mirrors-MirrorMatcher] tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_version tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_missing_mandatory[format] tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_matchmode[None-countme-CountmeMatcher] tests/scripts/test_countme_delete_totals.py::test_cli[dryrun] tests/scripts/test_countme_delete_totals.py::test_num2ui[1234567890-1,234,567,890] tests/scripts/test_countme_totals.py::test_parse_args_with_required_arguments tests/scripts/test_countme_trim_raw.py::test_positive_int[1-1] tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_rw[--read-write-True] tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_dbfile_missing tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_formats[csv] [gw19] [ 0%] PASSED tests/scripts/test_countme_totals.py::test_parse_args_with_version_argument tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_keep[None-13] tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_negating_flags[--no-header-header] tests/scripts/test_countme_trim_raw.py::test_positive_int[-1-ValueError] [gw5] [ 1%] PASSED tests/scripts/test_countme_delete_totals.py::test_tm2ui [gw23] [ 1%] PASSED tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_help [gw30] [ 2%] PASSED tests/scripts/test_countme_trim_raw.py::test__num_entries[unique-ip-only] [gw27] [ 2%] PASSED tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_keep[None-13] tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_mutually_exclusive_group [gw25] [ 3%] PASSED tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_oldest_week[None-False] [gw1] [ 3%] PASSED tests/scripts/test_countme_delete_totals.py::test_parse_args_with_noop [gw31] [ 3%] PASSED tests/scripts/test_countme_trim_raw.py::test__del_entries[unique-ip-only] [gw16] [ 4%] PASSED tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_negating_flags[--no-dup-check-dupcheck] [gw21] [ 4%] PASSED tests/scripts/test_countme_trim_raw.py::test_positive_int[-1-ValueError] [gw20] [ 5%] PASSED tests/scripts/test_countme_totals.py::test_cli [gw4] [ 5%] PASSED tests/scripts/test_countme_delete_totals.py::test_num_entries_without_data [gw12] [ 6%] PASSED tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_missing_mandatory[format] [gw0] [ 6%] PASSED tests/scripts/test_countme_delete_totals.py::test_parse_args_with_version_argument [gw9] [ 7%] PASSED tests/scripts/test_countme_delete_totals.py::test_cli[dryrun] [gw28] [ 7%] PASSED tests/scripts/test_countme_trim_raw.py::test_get_minmaxtime[min] [gw11] [ 7%] PASSED tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_progress[None-False] [gw24] [ 8%] PASSED tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_dbfile_missing [gw10] [ 8%] PASSED tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_version [gw2] [ 9%] PASSED tests/scripts/test_countme_delete_totals.py::test_last_week_with_data [gw26] [ 9%] PASSED tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_rw[--read-write-True] [gw7] [ 10%] PASSED tests/scripts/test_countme_delete_totals.py::test_num2ui[123- 123] [gw14] [ 10%] PASSED tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_mutually_exclusive_group [gw3] [ 10%] PASSED tests/scripts/test_countme_delete_totals.py::test_num_entries_for_without_data [gw22] [ 11%] PASSED tests/scripts/test_countme_trim_raw.py::test_positive_int[1-1] [gw6] [ 11%] PASSED tests/scripts/test_countme_delete_totals.py::test_num2ui[1234567890-1,234,567,890] [gw17] [ 12%] PASSED tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_formats[csv] [gw18] [ 12%] PASSED tests/scripts/test_countme_totals.py::test_parse_args_with_required_arguments [gw15] [ 13%] PASSED tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_negating_flags[--no-header-header] [gw13] [ 13%] PASSED tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_matchmode[None-countme-CountmeMatcher] tests/scripts/test_countme_totals.py::test_parse_args_with_missing_required_argument tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_version [gw23] [ 14%] PASSED tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_version [gw19] [ 14%] PASSED tests/scripts/test_countme_totals.py::test_parse_args_with_missing_required_argument tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_keep[5-5] tests/scripts/test_countme_trim_raw.py::test__del_entries[all-entries] tests/scripts/test_countme_delete_totals.py::test_weeknum2tm tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_oldest_week[--oldest-week-True] tests/scripts/test_countme_delete_totals.py::test_parse_args_with_default_noop [gw27] [ 14%] PASSED tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_keep[5-5] tests/scripts/test_countme_trim_raw.py::test_tm2ui tests/scripts/test_countme_delete_totals.py::test_del_entries_for tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_formats[sqlite] tests/scripts/test_countme_totals.py::test_cli_keyboard_interrupt [gw5] [ 15%] PASSED tests/scripts/test_countme_delete_totals.py::test_weeknum2tm [gw25] [ 15%] PASSED tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_oldest_week[--oldest-week-True] tests/scripts/test_countme_trim_raw.py::test_positive_int[boop-ValueError] [gw1] [ 16%] PASSED tests/scripts/test_countme_delete_totals.py::test_parse_args_with_default_noop [gw16] [ 16%] PASSED tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_formats[sqlite] [gw30] [ 17%] PASSED tests/scripts/test_countme_trim_raw.py::test__del_entries[all-entries] tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_logs [gw21] [ 17%] PASSED tests/scripts/test_countme_trim_raw.py::test_positive_int[boop-ValueError] [gw20] [ 17%] PASSED tests/scripts/test_countme_totals.py::test_cli_keyboard_interrupt [gw4] [ 18%] PASSED tests/scripts/test_countme_delete_totals.py::test_del_entries_for [gw12] [ 18%] PASSED tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_logs tests/scripts/test_countme_delete_totals.py::test_parse_args_with_sqlite tests/scripts/test_countme_delete_totals.py::test_cli_keyboard_interrupt tests/scripts/test_countme_trim_raw.py::test_get_minmaxtime[max] tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_rw[None-False] tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_progress[--progress-True] tests/scripts/test_countme_delete_totals.py::test_num_entries_for_with_data tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_missing_mandatory[logfile] tests/scripts/test_countme_trim_raw.py::test_positive_int[0-ValueError] tests/scripts/test_countme_parse_access_log.py::test_cli [gw0] [ 19%] PASSED tests/scripts/test_countme_delete_totals.py::test_parse_args_with_sqlite tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_negating_flags[--no-index-index] [gw9] [ 19%] PASSED tests/scripts/test_countme_delete_totals.py::test_cli_keyboard_interrupt tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_rw[--noop-False] [gw28] [ 20%] PASSED tests/scripts/test_countme_trim_raw.py::test_get_minmaxtime[max] tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_matchmode[--matchmode=countme-countme-CountmeMatcher] [gw2] [ 20%] PASSED tests/scripts/test_countme_delete_totals.py::test_num_entries_for_with_data tests/scripts/test_countme_trim_raw.py::test_trim_data[rw-interrupt] [gw8] [ 21%] PASSED tests/scripts/test_countme_delete_totals.py::test_trim_data [gw11] [ 21%] PASSED tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_progress[--progress-True] tests/scripts/test_countme_delete_totals.py::test_num_entries_with_data [gw24] [ 21%] PASSED tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_rw[None-False] tests/scripts/test_countme_delete_totals.py::test_num2ui[123456789-123,456,789] tests/scripts/test_countme_trim_raw.py::test_trim_data[rw] tests/scripts/test_countme_trim_raw.py::test_trim_data[rw-unique-ip-only] tests/scripts/test_countme_delete_totals.py::test_get_trim_data [gw17] [ 22%] PASSED tests/scripts/test_countme_parse_access_log.py::test_cli tests/scripts/test_countme_totals.py::test_parse_args_with_optional_arguments [gw10] [ 22%] PASSED tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_missing_mandatory[logfile] [gw15] [ 23%] PASSED tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_negating_flags[--no-index-index] [gw22] [ 23%] PASSED tests/scripts/test_countme_trim_raw.py::test_positive_int[0-ValueError] [gw13] [ 24%] PASSED tests/scripts/test_countme_parse_access_log.py::TestParseArgs::test_matchmode[--matchmode=countme-countme-CountmeMatcher] [gw23] [ 24%] PASSED tests/scripts/test_countme_trim_raw.py::test_trim_data[rw-interrupt] [gw18] [ 25%] PASSED tests/scripts/test_countme_totals.py::test_parse_args_with_optional_arguments [gw14] [ 25%] PASSED tests/scripts/test_countme_trim_raw.py::test_trim_data[rw] [gw26] [ 25%] PASSED tests/scripts/test_countme_trim_raw.py::TestParseArgs::test_rw[--noop-False] [gw3] [ 26%] PASSED tests/scripts/test_countme_delete_totals.py::test_num_entries_with_data [gw19] [ 26%] PASSED tests/scripts/test_countme_trim_raw.py::test_trim_data[rw-unique-ip-only] [gw7] [ 27%] PASSED tests/scripts/test_countme_delete_totals.py::test_get_trim_data [gw6] [ 27%] PASSED tests/scripts/test_countme_delete_totals.py::test_num2ui[123456789-123,456,789] tests/scripts/test_countme_trim_raw.py::test_trim_data[ro] tests/scripts/test_countme_trim_raw.py::test_main[without-oldest-week-all-entries-with-entries] tests/scripts/test_countme_trim_raw.py::test_cli[True] tests/scripts/test_countme_trim_raw.py::test_main[without-oldest-week-unique-ip-only-with-entries] tests/scripts/test_countme_trim_raw.py::test_main[with-oldest-week-unique-ip-only-without-entries] tests/scripts/test_countme_trim_raw.py::test_main[without-oldest-week-all-entries-without-entries] tests/scripts/test_countme_trim_raw.py::test_main[without-oldest-week-unique-ip-only-without-entries] tests/scripts/test_countme_trim_raw.py::test_cli[False] tests/scripts/test_countme_trim_raw.py::test_main[with-oldest-week-unique-ip-only-with-entries] [gw12] [ 28%] PASSED tests/scripts/test_countme_trim_raw.py::test_cli[True] tests/scripts/test_countme_trim_raw.py::test_main[with-oldest-week-all-entries-without-entries] [gw27] [ 28%] PASSED tests/scripts/test_countme_trim_raw.py::test_trim_data[ro] tests/test_integrate.py::test_count_totals [gw4] [ 28%] PASSED tests/scripts/test_countme_trim_raw.py::test_cli[False] [gw20] [ 29%] PASSED tests/scripts/test_countme_trim_raw.py::test_main[with-oldest-week-unique-ip-only-without-entries] [gw25] [ 29%] PASSED tests/scripts/test_countme_trim_raw.py::test_main[without-oldest-week-unique-ip-only-with-entries] [gw1] [ 30%] PASSED tests/scripts/test_countme_trim_raw.py::test_main[without-oldest-week-unique-ip-only-without-entries] [gw5] [ 30%] PASSED tests/scripts/test_countme_trim_raw.py::test_main[without-oldest-week-all-entries-without-entries] [gw30] [ 31%] PASSED tests/scripts/test_countme_trim_raw.py::test_main[without-oldest-week-all-entries-with-entries] [gw21] [ 31%] PASSED tests/scripts/test_countme_trim_raw.py::test_main[with-oldest-week-unique-ip-only-with-entries] [gw16] [ 32%] PASSED tests/scripts/test_countme_trim_raw.py::test_main[with-oldest-week-all-entries-without-entries] [gw0] [ 32%] SKIPPED tests/test_integrate.py::test_count_totals tests/test_integrate.py::test_read_file tests/test_integrate.py::test_log tests/test_parse.py::test_parse[False] tests/test_matchers.py::TestLogMatcher::test_iter tests/scripts/test_countme_delete_totals.py::test_cli[readwrite] tests/test_matchers.py::TestLogMatcher::test_make_item tests/test_progress.py::test_log_date[121.43.225.226 - - [01/May/2023:00:00:02 +0000] "GET /metalink?repo=epel-8&arch=x86_64&infra=stock&content=centos&countme=4 HTTP/1.1" 200 7547 "-" "libdnf (CentOS Linux 8; generic; Linux.x86_64)"-01/May/2023] tests/test_parse.py::test_parse_from_iterator[without-index] tests/test_matchers.py::TestMirrorMatcher::test_make_item tests/test_parse.py::test_parse_from_iterator[without-header] tests/test_parse.py::test_parse_from_iterator[without-countme-workaround] [gw11] [ 32%] PASSED tests/test_matchers.py::TestLogMatcher::test_iter tests/test_matchers.py::TestCountmeMatcher::test_make_item tests/test_output_items.py::TestLogItem::test_timestamp tests/test_parse.py::test_parse_from_iterator[happy-path] tests/test_parse.py::test_parse[True] tests/test_output_items.py::TestLogItem::test_datetime [gw8] [ 33%] PASSED tests/scripts/test_countme_delete_totals.py::test_cli[readwrite] [gw13] [ 33%] PASSED tests/test_parse.py::test_parse[False] tests/test_progress.py::test_log_date[121.43.225.226 - - "GET /metalink?repo=epel-8&arch=x86_64&infra=stock&content=centos&countme=4 HTTP/1.1" 200 7547 "-" "libdnf (CentOS Linux 8; generic; Linux.x86_64)"-??/??/????] [gw24] [ 34%] PASSED tests/test_matchers.py::TestLogMatcher::test_make_item tests/test_parse.py::test_parse_from_iterator[without-dupcheck] [gw17] [ 34%] PASSED tests/test_parse.py::test_parse_from_iterator[without-index] tests/test_parse.py::test__convert_none_members [gw23] [ 35%] PASSED tests/test_progress.py::test_log_date[121.43.225.226 - - [01/May/2023:00:00:02 +0000] "GET /metalink?repo=epel-8&arch=x86_64&infra=stock&content=centos&countme=4 HTTP/1.1" 200 7547 "-" "libdnf (CentOS Linux 8; generic; Linux.x86_64)"-01/May/2023] [gw6] [ 35%] PASSED tests/test_parse.py::test_parse_from_iterator[without-countme-workaround] [gw2] [ 35%] PASSED tests/test_matchers.py::TestCountmeMatcher::test_make_item [gw10] [ 36%] PASSED tests/test_matchers.py::TestMirrorMatcher::test_make_item [gw22] [ 36%] PASSED tests/test_parse.py::test_parse_from_iterator[without-header] [gw9] [ 37%] SKIPPED tests/test_integrate.py::test_read_file [gw3] [ 37%] PASSED tests/test_parse.py::test_parse_from_iterator[happy-path] [gw15] [ 38%] PASSED tests/test_parse.py::test_parse[True] [gw19] [ 38%] PASSED tests/test_progress.py::test_log_date[121.43.225.226 - - "GET /metalink?repo=epel-8&arch=x86_64&infra=stock&content=centos&countme=4 HTTP/1.1" 200 7547 "-" "libdnf (CentOS Linux 8; generic; Linux.x86_64)"-??/??/????] [gw18] [ 39%] PASSED tests/test_parse.py::test_parse_from_iterator[without-dupcheck] [gw29] [ 39%] PASSED tests/scripts/test_countme_trim_raw.py::test_next_week tests/test_progress.py::test_log_date[1683208046.7402434-TypeError] tests/test_progress.py::test_log_total_size[gzip-file-missing] tests/test_progress.py::test_log_reader[plain] tests/test_progress.py::test_log_total_size[xz-file-missing] tests/test_progress.py::test_log_total_size[xz-file-exists] [gw27] [ 39%] PASSED tests/test_progress.py::test_log_date[1683208046.7402434-TypeError] tests/test_progress.py::test_log_reader[gzip] tests/test_progress.py::test_log_reader[xz] tests/test_progress.py::test_log_total_size[plain-file-missing] [gw5] [ 40%] PASSED tests/test_progress.py::test_log_reader[plain] tests/test_progress.py::test_log_total_size[gzip-file-exists] tests/test_progress.py::test_log_total_size[plain-file-exists] [gw1] [ 40%] PASSED tests/test_progress.py::test_log_reader[gzip] [gw25] [ 41%] PASSED tests/test_progress.py::test_log_reader[xz] tests/test_progress.py::TestDIYProgress::test__init__[without-file] [gw0] [ 41%] PASSED tests/test_progress.py::TestDIYProgress::test__init__[without-file] [gw16] [ 42%] PASSED tests/test_progress.py::test_log_total_size[plain-file-missing] tests/test_progress.py::TestDIYProgress::test_update[no-tick] tests/test_progress.py::TestDIYProgress::test_hrsize[g] [gw21] [ 42%] PASSED tests/test_progress.py::test_log_total_size[gzip-file-exists] tests/test_progress.py::TestDIYProgress::test_hrsize[m] [gw20] [ 42%] PASSED tests/test_progress.py::test_log_total_size[gzip-file-missing] tests/test_progress.py::TestDIYProgress::test_update[tick] tests/test_progress.py::TestDIYProgress::test_update[disabled] [gw24] [ 43%] PASSED tests/test_progress.py::TestDIYProgress::test_update[no-tick] [gw8] [ 43%] PASSED tests/test_progress.py::TestDIYProgress::test_update[tick] tests/test_progress.py::TestDIYProgress::test_set_description[disabled] [gw17] [ 44%] PASSED tests/test_progress.py::TestDIYProgress::test_update[disabled] [gw13] [ 44%] PASSED tests/test_progress.py::TestDIYProgress::test_hrsize[g] [gw30] [ 45%] PASSED tests/test_progress.py::test_log_total_size[plain-file-exists] [gw12] [ 45%] PASSED tests/test_progress.py::test_log_total_size[xz-file-missing] [gw23] [ 46%] PASSED tests/test_progress.py::TestDIYProgress::test_hrsize[m] tests/test_progress.py::TestDIYProgress::test_display[unscaled] [gw11] [ 46%] PASSED tests/test_progress.py::TestDIYProgress::test_set_description[disabled] tests/test_progress.py::TestDIYProgress::test_close[disabled] tests/test_progress.py::TestDIYProgress::test_iter tests/test_progress.py::TestDIYProgress::test_set_description[no-refresh] tests/test_progress.py::TestDIYProgress::test_display[scaled] [gw4] [ 46%] PASSED tests/test_progress.py::test_log_total_size[xz-file-exists] tests/test_progress.py::TestDIYProgress::test_hrsize[k] tests/test_progress.py::TestDIYProgress::test__init__[with-file] tests/test_progress.py::TestDIYProgress::test_hrsize[ ] tests/test_progress.py::TestReadProgress::test___init__ tests/test_progress.py::TestDIYProgress::test_hrsize[t] [gw2] [ 47%] PASSED tests/test_progress.py::TestDIYProgress::test_set_description[no-refresh] [gw15] [ 47%] PASSED tests/test_progress.py::TestDIYProgress::test_hrsize[ ] [gw27] [ 48%] PASSED tests/test_progress.py::TestReadProgress::test___init__ [gw9] [ 48%] PASSED tests/test_progress.py::TestDIYProgress::test__init__[with-file] [gw22] [ 49%] PASSED tests/test_progress.py::TestDIYProgress::test_hrsize[k] [gw6] [ 49%] PASSED tests/test_progress.py::TestDIYProgress::test_close[disabled] [gw31] [ 50%] PASSED tests/scripts/test_countme_trim_raw.py::test_tm2ui [gw18] [ 50%] PASSED tests/test_progress.py::TestDIYProgress::test_hrsize[t] tests/test_readers.py::TestSQLiteReader::test__init__[wrong-filefields] tests/test_readers.py::TestSQLiteReader::test__init__[happy-path] [gw5] [ 50%] PASSED tests/test_readers.py::TestSQLiteReader::test__init__[wrong-filefields] tests/test_readers.py::TestSQLiteReader::test__init__[no-filefields] tests/test_regex.py::test_countme_log_re[test_case0] [gw25] [ 51%] PASSED tests/test_readers.py::TestSQLiteReader::test__init__[happy-path] [gw0] [ 51%] PASSED tests/test_regex.py::test_countme_log_re[test_case0] [gw1] [ 52%] PASSED tests/test_readers.py::TestSQLiteReader::test__init__[no-filefields] tests/test_readers.py::TestSQLiteReader::test__iter_rows tests/scripts/test_countme_trim_raw.py::test__num_entries[all-entries] tests/test_readers.py::TestSQLiteReader::test__get_fields tests/test_regex.py::test_countme_user_agent_re[test_case1] tests/test_regex.py::test_countme_user_agent_re[test_case2] tests/test_progress.py::TestReadProgress::test__iter_log_lines tests/test_regex.py::test_log_re[test_case0] tests/test_progress.py::TestReadProgress::test___iter__ tests/test_regex.py::test_countme_user_agent_re_invalid[16.160.95.167 - - [31/May/2021:00:00:02 +0000] "GET /badpath?repo=epel-8&arch=x86_64&infra=stock&content=almalinux&countme=2 HTTP/1.1" 200 26137 "-" "libdnf (AlmaLinux 8.3; generic; Linux.x86_64)"] [gw29] [ 52%] PASSED tests/scripts/test_countme_trim_raw.py::test__num_entries[all-entries] [gw16] [ 53%] PASSED tests/test_readers.py::TestSQLiteReader::test__iter_rows tests/test_regex.py::test_log_date_re[test_case0] tests/test_readers.py::TestSQLiteReader::test___iter__ tests/test_progress.py::TestReadProgress::test__progress_obj [gw8] [ 53%] PASSED tests/test_regex.py::test_countme_user_agent_re[test_case1] tests/test_regex.py::test_countme_user_agent_re[test_case0] [gw21] [ 53%] PASSED tests/test_readers.py::TestSQLiteReader::test__get_fields [gw13] [ 54%] PASSED tests/test_regex.py::test_countme_user_agent_re[test_case2] [gw23] [ 54%] PASSED tests/test_regex.py::test_log_re[test_case0] [gw17] [ 55%] PASSED tests/test_regex.py::test_log_date_re[test_case0] [gw24] [ 55%] PASSED tests/test_regex.py::test_countme_user_agent_re_invalid[16.160.95.167 - - [31/May/2021:00:00:02 +0000] "GET /badpath?repo=epel-8&arch=x86_64&infra=stock&content=almalinux&countme=2 HTTP/1.1" 200 26137 "-" "libdnf (AlmaLinux 8.3; generic; Linux.x86_64)"] [gw10] [ 56%] PASSED tests/test_progress.py::TestDIYProgress::test_iter [gw4] [ 56%] PASSED tests/test_progress.py::TestReadProgress::test__progress_obj [gw12] [ 57%] PASSED tests/test_progress.py::TestReadProgress::test___iter__ tests/test_totals.py::TestRawDB::test_minmaxtime[maxtime] tests/test_totals.py::test_weekdate[7-7-ValueError] tests/test_totals.py::test_daterange [gw11] [ 57%] PASSED tests/test_regex.py::test_countme_user_agent_re[test_case0] tests/test_totals.py::TestRawDB::test___init__ tests/test_totals.py::TestCSVCountItem::test_from_totalsitem tests/test_regex.py::test_mirrors_log_re[test_case0] tests/test_totals.py::TestRawDB::test_complete_weeks [gw6] [ 57%] PASSED tests/test_regex.py::test_mirrors_log_re[test_case0] [gw30] [ 58%] PASSED tests/test_readers.py::TestSQLiteReader::test___iter__ [gw20] [ 58%] PASSED tests/test_progress.py::TestReadProgress::test__iter_log_lines [gw15] [ 59%] PASSED tests/test_totals.py::TestRawDB::test___init__ [gw9] [ 59%] PASSED tests/test_totals.py::test_daterange tests/test_totals.py::TestRawDB::test_week_iter [gw2] [ 60%] PASSED tests/test_totals.py::test_weekdate[7-7-ValueError] tests/test_totals.py::TestRawDBU::test_minmaxtime[mintime] [gw18] [ 60%] PASSED tests/test_totals.py::TestRawDB::test_minmaxtime[maxtime] tests/test_totals.py::TestRawDBU::test___init__ tests/scripts/test_countme_trim_raw.py::test_main[with-oldest-week-all-entries-with-entries] tests/test_totals.py::TestRawDB::test_week_count tests/test_totals.py::TestRawDBU::test_minmaxtime[maxtime] tests/test_totals.py::test_totals[with-countme-raw-with-csv_dump-with-progress] tests/test_totals.py::TestRawDBU::test_complete_weeks [gw0] [ 60%] PASSED tests/test_totals.py::TestRawDBU::test_minmaxtime[mintime] [gw5] [ 61%] PASSED tests/test_totals.py::TestRawDB::test_week_iter [gw1] [ 61%] PASSED tests/test_totals.py::TestRawDBU::test___init__ [gw25] [ 62%] PASSED tests/test_totals.py::TestRawDB::test_week_count [gw31] [ 62%] PASSED tests/scripts/test_countme_trim_raw.py::test_main[with-oldest-week-all-entries-with-entries] tests/test_totals.py::TestRawDBU::test_week_iter tests/test_totals.py::TestSplitWeekDays::test_fetchone [gw29] [ 63%] PASSED tests/test_totals.py::TestRawDBU::test_minmaxtime[maxtime] tests/test_totals.py::test_totals[with-countme-raw-without-csv_dump-with-progress] tests/test_totals.py::test_totals[with-countme-raw-with-csv_dump-without-progress] [gw21] [ 63%] PASSED tests/test_totals.py::TestRawDBU::test_week_iter tests/test_totals.py::TestSplitWeekDays::test___init__ tests/test_totals.py::test_totals[without-countme-raw-with-csv_dump-with-progress] [gw8] [ 64%] PASSED tests/test_totals.py::test_totals[with-countme-raw-with-csv_dump-with-progress] tests/test_totals.py::test_weekdate[0-1-expected0] [gw13] [ 64%] PASSED tests/test_totals.py::TestSplitWeekDays::test_fetchone tests/test_totals.py::TestSplitWeekDays::test___iter__ tests/test_totals.py::test_totals[with-countme-raw-without-csv_dump-without-progress] tests/test_util.py::test_weeknum[1683208046.7402434-ValueError] tests/test_totals.py::TestSplitWeekDays::test_fetchall [gw3] [ 64%] FAILED tests/test_progress.py::TestDIYProgress::test_display[unscaled] [gw17] [ 65%] PASSED tests/test_totals.py::TestSplitWeekDays::test_fetchall [gw10] [ 65%] PASSED tests/test_totals.py::test_weekdate[0-1-expected0] [gw30] [ 66%] PASSED tests/test_totals.py::TestSplitWeekDays::test___iter__ [gw6] [ 66%] PASSED tests/test_util.py::test_weeknum[1683208046.7402434-ValueError] [gw24] [ 67%] PASSED tests/test_totals.py::TestSplitWeekDays::test___init__ [gw4] [ 67%] PASSED tests/test_totals.py::test_totals[without-countme-raw-with-csv_dump-with-progress] tests/test_totals.py::TestRawDBU::test_week_count tests/test_util.py::test_weeknum[1683208046.7402434-2782] tests/test_totals.py::test_totals[without-countme-raw-without-csv_dump-with-progress] tests/test_totals.py::test_totals[without-countme-raw-with-csv_dump-without-progress] tests/test_util.py::test_offset_to_timezone[-0400-expected0] tests/test_util.py::test_offset_to_timezone[1683208046.7402434-ValueError] tests/test_util.py::test_offset_to_timezone[+0500-expected1] [gw18] [ 67%] PASSED tests/test_util.py::test_offset_to_timezone[-0400-expected0] tests/test_util.py::test_parse_logtime[29/Mar/2020:16:04:28 +0200-expected2] [gw25] [ 68%] PASSED tests/test_util.py::test_offset_to_timezone[1683208046.7402434-ValueError] tests/test_util.py::test_parse_logtime[29/Mar/2020:16:04:28 +0000-expected0] [gw9] [ 68%] PASSED tests/test_util.py::test_weeknum[1683208046.7402434-2782] [gw23] [ 69%] PASSED tests/test_totals.py::test_totals[with-countme-raw-without-csv_dump-with-progress] tests/test_util.py::test_parse_logtime[29/Mar/2020:16:04:28 -0800-expected3] tests/test_util.py::test__fetchone_or_none[with-result] tests/test_util.py::test_parse_logtime[29/Mar/2020:16:04:28 -0000-expected1] tests/test_util.py::test__fetchone_or_none[without-result] [gw12] [ 69%] PASSED tests/test_totals.py::test_totals[with-countme-raw-with-csv_dump-without-progress] tests/test_util.py::test_parse_querydict[foo=bar&foo=baz-expected1] [gw21] [ 70%] PASSED tests/test_util.py::test__fetchone_or_none[with-result] [gw13] [ 70%] PASSED tests/test_util.py::test__fetchone_or_none[without-result] [gw31] [ 71%] PASSED tests/test_util.py::test_parse_logtime[29/Mar/2020:16:04:28 +0200-expected2] [gw0] [ 71%] PASSED tests/test_util.py::test_parse_logtime[29/Mar/2020:16:04:28 +0000-expected0] tests/test_regex.py::test_mirrors_log_re_invalid[16.160.95.167 - - [31/May/2021:00:00:02 +0000] "GET /badpath?repo=epel-8&arch=x86_64&infra=stock&content=almalinux&countme=2 HTTP/1.1" 200 26137 "-" "libdnf (AlmaLinux 8.3; generic; Linux.x86_64)"] [gw5] [ 71%] PASSED tests/test_util.py::test_offset_to_timezone[+0500-expected1] [gw20] [ 72%] PASSED tests/test_totals.py::TestRawDBU::test_week_count [gw29] [ 72%] PASSED tests/test_util.py::test_parse_logtime[29/Mar/2020:16:04:28 -0800-expected3] [gw2] [ 73%] PASSED tests/test_totals.py::test_totals[without-countme-raw-with-csv_dump-without-progress] tests/test_writers.py::TestItemWriter::test___init__[happy-path] [gw15] [ 73%] PASSED tests/test_totals.py::test_totals[without-countme-raw-without-csv_dump-with-progress] tests/test_writers.py::TestItemWriter::test___init__[unknown-timefield] tests/test_util.py::TestMinMaxPropMixin::test_minmaxtime_properties[mintime] tests/test_util.py::TestMinMaxPropMixin::test_minmaxtime_properties[maxtime_countme] tests/test_version.py::test_version_info tests/test_util.py::TestMinMaxPropMixin::test_minmaxtime_properties[maxtime_unique] [gw1] [ 74%] PASSED tests/test_util.py::test_parse_logtime[29/Mar/2020:16:04:28 -0000-expected1] [gw8] [ 74%] PASSED tests/test_util.py::test_parse_querydict[foo=bar&foo=baz-expected1] [gw3] [ 75%] PASSED tests/test_regex.py::test_mirrors_log_re_invalid[16.160.95.167 - - [31/May/2021:00:00:02 +0000] "GET /badpath?repo=epel-8&arch=x86_64&infra=stock&content=almalinux&countme=2 HTTP/1.1" 200 26137 "-" "libdnf (AlmaLinux 8.3; generic; Linux.x86_64)"] tests/test_writers.py::TestItemWriter::test_passing_methods[commit] [gw10] [ 75%] PASSED tests/test_version.py::test_version_info [gw6] [ 75%] PASSED tests/test_writers.py::TestItemWriter::test___init__[happy-path] [gw30] [ 76%] PASSED tests/test_writers.py::TestItemWriter::test___init__[unknown-timefield] [gw24] [ 76%] PASSED tests/test_util.py::TestMinMaxPropMixin::test_minmaxtime_properties[mintime] tests/test_util.py::TestMinMaxPropMixin::test_minmaxtime_properties[mintime_countme] [gw4] [ 77%] PASSED tests/test_util.py::TestMinMaxPropMixin::test_minmaxtime_properties[maxtime_countme] tests/test_writers.py::TestAWKWriter::test__get_writer tests/test_util.py::TestMinMaxPropMixin::test_minmaxtime_properties[mintime_unique] tests/test_writers.py::TestCSVWriter::test__get_writer tests/test_writers.py::TestItemWriter::test_write_items tests/test_writers.py::TestItemWriter::test_passing_methods[write_index] [gw21] [ 77%] PASSED tests/test_writers.py::TestAWKWriter::test__get_writer [gw17] [ 78%] PASSED tests/test_util.py::TestMinMaxPropMixin::test_minmaxtime_properties[maxtime_unique] [gw11] [ 78%] PASSED tests/test_totals.py::test_totals[with-countme-raw-without-csv_dump-without-progress] tests/test_writers.py::TestAWKWriter::test_write_item tests/test_writers.py::TestCSVWriter::test_write_header [gw12] [ 78%] PASSED tests/test_util.py::TestMinMaxPropMixin::test_minmaxtime_properties[mintime_unique] tests/test_writers.py::TestItemWriter::test_not_implemented_methods[_get_writer-args0] [gw31] [ 79%] PASSED tests/test_writers.py::TestCSVWriter::test_write_header [gw25] [ 79%] PASSED tests/test_writers.py::TestCSVWriter::test__get_writer tests/test_writers.py::TestItemWriter::test_passing_methods[write_header] [gw9] [ 80%] PASSED tests/test_writers.py::TestItemWriter::test_write_items [gw13] [ 80%] PASSED tests/test_writers.py::TestAWKWriter::test_write_item [gw23] [ 81%] PASSED tests/test_util.py::TestMinMaxPropMixin::test_minmaxtime_properties[mintime_countme] [gw20] [ 81%] PASSED tests/test_writers.py::TestItemWriter::test_not_implemented_methods[_get_writer-args0] [gw0] [ 82%] PASSED tests/test_writers.py::TestItemWriter::test_passing_methods[write_index] [gw18] [ 82%] PASSED tests/test_writers.py::TestItemWriter::test_passing_methods[commit] [gw2] [ 82%] PASSED tests/test_writers.py::TestItemWriter::test_passing_methods[write_header] tests/test_writers.py::TestAWKWriter::test__write_row tests/test_writers.py::TestJSONWriter::test__get_writer tests/test_writers.py::TestCSVWriter::test_write_item tests/test_writers.py::TestSQLiteWriter::test_write_header tests/test_writers.py::TestItemWriter::test_not_implemented_methods[write_item-args1] tests/test_writers.py::TestSQLiteWriter::test_write_item tests/test_writers.py::TestAWKWriter::test_write_header tests/test_writers.py::TestSQLiteWriter::test__sqltype tests/test_writers.py::TestSQLiteWriter::test__get_writer[fileobj] tests/test_writers.py::test_make_writer[CSV] [gw7] [ 83%] FAILED tests/test_output_items.py::TestLogItem::test_timestamp [gw3] [ 83%] PASSED tests/test_writers.py::TestAWKWriter::test_write_header tests/test_util.py::TestMinMaxPropMixin::test_minmaxtime_properties[maxtime] [gw6] [ 84%] PASSED tests/test_writers.py::TestSQLiteWriter::test_write_header [gw15] [ 84%] PASSED tests/test_writers.py::TestItemWriter::test_not_implemented_methods[write_item-args1] tests/test_writers.py::TestSQLiteWriter::test_write_items [gw8] [ 85%] PASSED tests/test_writers.py::TestAWKWriter::test__write_row [gw5] [ 85%] PASSED tests/test_writers.py::TestJSONWriter::test__get_writer [gw29] [ 85%] PASSED tests/test_writers.py::TestCSVWriter::test_write_item tests/test_writers.py::TestJSONWriter::test_write_item [gw21] [ 86%] PASSED tests/test_writers.py::test_make_writer[CSV] tests/test_writers.py::TestSQLiteWriter::test_mintime_maxtime[max] [gw24] [ 86%] PASSED tests/test_writers.py::TestSQLiteWriter::test_write_item [gw30] [ 87%] PASSED tests/test_writers.py::TestSQLiteWriter::test__get_writer[fileobj] [gw1] [ 87%] PASSED tests/test_writers.py::TestJSONWriter::test_write_item [gw12] [ 88%] PASSED tests/test_writers.py::TestSQLiteWriter::test_mintime_maxtime[max] [gw11] [ 88%] PASSED tests/test_util.py::TestMinMaxPropMixin::test_minmaxtime_properties[maxtime] [gw4] [ 89%] PASSED tests/test_writers.py::TestSQLiteWriter::test_write_items [gw10] [ 89%] PASSED tests/test_writers.py::TestSQLiteWriter::test__sqltype tests/test_writers.py::TestSQLiteWriter::test__get_writer[filename] tests/test_writers.py::test_make_writer[SQLite] tests/test_writers.py::TestSQLiteWriter::test_write_index tests/test_writers.py::TestSQLiteWriter::test_mintime_maxtime[min] tests/test_writers.py::test_make_writer[illegal] [gw17] [ 89%] PASSED tests/test_writers.py::TestSQLiteWriter::test__get_writer[filename] tests/test_writers.py::test_make_writer[JSON] tests/test_writers.py::test_make_writer[AWK] tests/test_writers.py::TestSQLiteWriter::test_has_item tests/test_writers.py::TestSQLiteWriter::test_commit [gw9] [ 90%] PASSED tests/test_writers.py::TestSQLiteWriter::test_mintime_maxtime[min] [gw25] [ 90%] PASSED tests/test_writers.py::TestSQLiteWriter::test_write_index [gw31] [ 91%] PASSED tests/test_writers.py::test_make_writer[SQLite] [gw13] [ 91%] PASSED tests/test_writers.py::test_make_writer[AWK] [gw20] [ 92%] PASSED tests/test_writers.py::test_make_writer[illegal] [gw18] [ 92%] PASSED tests/test_writers.py::TestSQLiteWriter::test_commit [gw23] [ 92%] PASSED tests/test_writers.py::TestSQLiteWriter::test_has_item [gw0] [ 93%] PASSED tests/test_writers.py::test_make_writer[JSON] tests/test_progress.py::TestDIYProgress::test_close[enabled] [gw7] [ 93%] PASSED tests/test_progress.py::TestDIYProgress::test_close[enabled] [gw27] [ 94%] FAILED tests/test_totals.py::TestRawDB::test_complete_weeks tests/test_totals.py::test_totals[without-countme-raw-without-csv_dump-without-progress] [gw27] [ 94%] PASSED tests/test_totals.py::test_totals[without-countme-raw-without-csv_dump-without-progress] [gw26] [ 95%] FAILED tests/test_output_items.py::TestLogItem::test_datetime [gw16] [ 95%] FAILED tests/test_totals.py::TestRawDBU::test_complete_weeks tests/test_util.py::test_parse_querydict[foo=bar&baz=gnu-expected0] [gw16] [ 96%] PASSED tests/test_util.py::test_parse_querydict[foo=bar&baz=gnu-expected0] tests/test_progress.py::TestDIYProgress::test_hrsize[e] [gw26] [ 96%] PASSED tests/test_progress.py::TestDIYProgress::test_hrsize[e] [gw22] [ 96%] FAILED tests/test_totals.py::TestCSVCountItem::test_from_totalsitem tests/test_util.py::test_weeknum[345600-0] [gw22] [ 97%] PASSED tests/test_util.py::test_weeknum[345600-0] [gw19] [ 97%] FAILED tests/test_progress.py::TestDIYProgress::test_display[scaled] tests/test_totals.py::TestRawDB::test_minmaxtime[mintime] [gw19] [ 98%] PASSED tests/test_totals.py::TestRawDB::test_minmaxtime[mintime] [gw14] [ 98%] PASSED tests/test_parse.py::test__convert_none_members tests/test_progress.py::TestDIYProgress::test_hrsize[p] [gw14] [ 99%] PASSED tests/test_progress.py::TestDIYProgress::test_hrsize[p] [gw28] [ 99%] FAILED tests/test_integrate.py::test_log tests/test_progress.py::TestDIYProgress::test_set_description[happy-path] [gw28] [100%] PASSED tests/test_progress.py::TestDIYProgress::test_set_description[happy-path] =================================== FAILURES =================================== ____________________ TestDIYProgress.test_display[unscaled] ____________________ [gw3] linux -- Python 3.13.2 /usr/bin/python3 self = unit_scale = False @pytest.mark.parametrize("unit_scale", (True, False), ids=("scaled", "unscaled")) > @given(count=integers(min_value=0, max_value=int(TEST_TOTAL * 1.01 + 1))) tests/test_progress.py:241: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib/python3.13/site-packages/hypothesis/core.py:1245: in _raise_to_user raise the_error_hypothesis_found _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (, False, 0) kwargs = {}, arg_drawtime = 0.0, start = 226805.0775075, result = None finish = 226805.3134866, in_drawtime = 0.0 runtime = datetime.timedelta(microseconds=235979) current_deadline = timedelta(milliseconds=200) @proxies(self.test) def test(*args, **kwargs): arg_drawtime = math.fsum(data.draw_times.values()) start = time.perf_counter() try: result = self.test(*args, **kwargs) finally: finish = time.perf_counter() in_drawtime = math.fsum(data.draw_times.values()) - arg_drawtime runtime = datetime.timedelta(seconds=finish - start - in_drawtime) self._timing_features = { "execute_test": finish - start - in_drawtime, **data.draw_times, } if (current_deadline := self.settings.deadline) is not None: if not is_final: current_deadline = (current_deadline // 4) * 5 if runtime >= current_deadline: > raise DeadlineExceeded(runtime, self.settings.deadline) E hypothesis.errors.DeadlineExceeded: Test took 235.98ms, which exceeds the deadline of 200.00ms E Falsifying explicit example: test_display( E self=, E unit_scale=False, E count=0, E ) /usr/lib/python3.13/site-packages/hypothesis/core.py:845: DeadlineExceeded __________________________ TestLogItem.test_timestamp __________________________ [gw7] linux -- Python 3.13.2 /usr/bin/python3 self = timestamp = 1 @given(timestamp=integers(min_value=0, max_value=MAX_TIMESTAMP)) > def test_timestamp(self, timestamp): tests/test_output_items.py:35: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (, 1) kwargs = {}, arg_drawtime = 0.011646200000541285, start = 226804.4985669 result = None, finish = 226804.8166043, in_drawtime = 0.0 runtime = datetime.timedelta(microseconds=318037) current_deadline = datetime.timedelta(microseconds=250000) @proxies(self.test) def test(*args, **kwargs): arg_drawtime = math.fsum(data.draw_times.values()) start = time.perf_counter() try: result = self.test(*args, **kwargs) finally: finish = time.perf_counter() in_drawtime = math.fsum(data.draw_times.values()) - arg_drawtime runtime = datetime.timedelta(seconds=finish - start - in_drawtime) self._timing_features = { "execute_test": finish - start - in_drawtime, **data.draw_times, } if (current_deadline := self.settings.deadline) is not None: if not is_final: current_deadline = (current_deadline // 4) * 5 if runtime >= current_deadline: > raise DeadlineExceeded(runtime, self.settings.deadline) E hypothesis.errors.DeadlineExceeded: Test took 318.04ms, which exceeds the deadline of 200.00ms /usr/lib/python3.13/site-packages/hypothesis/core.py:845: DeadlineExceeded During handling of the above exception, another exception occurred: self = @given(timestamp=integers(min_value=0, max_value=MAX_TIMESTAMP)) > def test_timestamp(self, timestamp): tests/test_output_items.py:35: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = status = Status.INTERESTING interesting_origin = InterestingOrigin(exc_type=, filename='/usr/lib/python3.13/site-packages/hypothesis/core.py', lineno=845, context=(), group_elems=()) def conclude_test(self, status, interesting_origin): """Says that ``status`` occurred at node ``node``. This updates the node if necessary and checks for consistency.""" if status == Status.OVERRUN: return i = self.__index_in_current_node node = self.__current_node if i < len(node.values) or isinstance(node.transition, Branch): inconsistent_generation() new_transition = Conclusion(status, interesting_origin) if node.transition is not None and node.transition != new_transition: # As an, I'm afraid, horrible bodge, we deliberately ignore flakiness # where tests go from interesting to valid, because it's much easier # to produce good error messages for these further up the stack. if isinstance(node.transition, Conclusion) and ( node.transition.status != Status.INTERESTING or new_transition.status != Status.VALID ): > raise Flaky( f"Inconsistent test results! Test case was {node.transition!r} " f"on first run but {new_transition!r} on second" ) E hypothesis.errors.Flaky: Inconsistent test results! Test case was Conclusion(status=Status.VALID, interesting_origin=None) on first run but Conclusion(status=Status.INTERESTING, interesting_origin=InterestingOrigin(exc_type=, filename='/usr/lib/python3.13/site-packages/hypothesis/core.py', lineno=845, context=(), group_elems=())) on second /usr/lib/python3.13/site-packages/hypothesis/internal/conjecture/datatree.py:1007: Flaky ________________________ TestRawDB.test_complete_weeks _________________________ [gw27] linux -- Python 3.13.2 /usr/bin/python3 self = , data = data(...) rawdb = @settings(suppress_health_check=(HealthCheck.function_scoped_fixture,)) > @given(data=data()) tests/test_totals.py:142: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (, data(...), ) kwargs = {}, arg_drawtime = 0.005958299996564165, start = 226821.7157508 result = None, finish = 226822.2874996, in_drawtime = 0.1923399000079371 runtime = datetime.timedelta(microseconds=379409) current_deadline = datetime.timedelta(microseconds=250000) @proxies(self.test) def test(*args, **kwargs): arg_drawtime = math.fsum(data.draw_times.values()) start = time.perf_counter() try: result = self.test(*args, **kwargs) finally: finish = time.perf_counter() in_drawtime = math.fsum(data.draw_times.values()) - arg_drawtime runtime = datetime.timedelta(seconds=finish - start - in_drawtime) self._timing_features = { "execute_test": finish - start - in_drawtime, **data.draw_times, } if (current_deadline := self.settings.deadline) is not None: if not is_final: current_deadline = (current_deadline // 4) * 5 if runtime >= current_deadline: > raise DeadlineExceeded(runtime, self.settings.deadline) E hypothesis.errors.DeadlineExceeded: Test took 379.41ms, which exceeds the deadline of 200.00ms /usr/lib/python3.13/site-packages/hypothesis/core.py:845: DeadlineExceeded During handling of the above exception, another exception occurred: self = rawdb = @settings(suppress_health_check=(HealthCheck.function_scoped_fixture,)) > @given(data=data()) tests/test_totals.py:142: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = status = Status.INTERESTING interesting_origin = InterestingOrigin(exc_type=, filename='/usr/lib/python3.13/site-packages/hypothesis/core.py', lineno=845, context=(), group_elems=()) def conclude_test(self, status, interesting_origin): """Says that ``status`` occurred at node ``node``. This updates the node if necessary and checks for consistency.""" if status == Status.OVERRUN: return i = self.__index_in_current_node node = self.__current_node if i < len(node.values) or isinstance(node.transition, Branch): inconsistent_generation() new_transition = Conclusion(status, interesting_origin) if node.transition is not None and node.transition != new_transition: # As an, I'm afraid, horrible bodge, we deliberately ignore flakiness # where tests go from interesting to valid, because it's much easier # to produce good error messages for these further up the stack. if isinstance(node.transition, Conclusion) and ( node.transition.status != Status.INTERESTING or new_transition.status != Status.VALID ): > raise Flaky( f"Inconsistent test results! Test case was {node.transition!r} " f"on first run but {new_transition!r} on second" ) E hypothesis.errors.Flaky: Inconsistent test results! Test case was Conclusion(status=Status.VALID, interesting_origin=None) on first run but Conclusion(status=Status.INTERESTING, interesting_origin=InterestingOrigin(exc_type=, filename='/usr/lib/python3.13/site-packages/hypothesis/core.py', lineno=845, context=(), group_elems=())) on second /usr/lib/python3.13/site-packages/hypothesis/internal/conjecture/datatree.py:1007: Flaky __________________________ TestLogItem.test_datetime ___________________________ [gw26] linux -- Python 3.13.2 /usr/bin/python3 self = timestamp = 287093976 @given(timestamp=integers(min_value=0, max_value=MAX_TIMESTAMP)) > def test_datetime(self, timestamp): tests/test_output_items.py:28: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (, 287093976) kwargs = {}, arg_drawtime = 0.019086299987975508, start = 226806.8873696 result = None, finish = 226807.1914009, in_drawtime = 0.0 runtime = datetime.timedelta(microseconds=304031) current_deadline = datetime.timedelta(microseconds=250000) @proxies(self.test) def test(*args, **kwargs): arg_drawtime = math.fsum(data.draw_times.values()) start = time.perf_counter() try: result = self.test(*args, **kwargs) finally: finish = time.perf_counter() in_drawtime = math.fsum(data.draw_times.values()) - arg_drawtime runtime = datetime.timedelta(seconds=finish - start - in_drawtime) self._timing_features = { "execute_test": finish - start - in_drawtime, **data.draw_times, } if (current_deadline := self.settings.deadline) is not None: if not is_final: current_deadline = (current_deadline // 4) * 5 if runtime >= current_deadline: > raise DeadlineExceeded(runtime, self.settings.deadline) E hypothesis.errors.DeadlineExceeded: Test took 304.03ms, which exceeds the deadline of 200.00ms /usr/lib/python3.13/site-packages/hypothesis/core.py:845: DeadlineExceeded The above exception was the direct cause of the following exception: self = @given(timestamp=integers(min_value=0, max_value=MAX_TIMESTAMP)) > def test_datetime(self, timestamp): tests/test_output_items.py:28: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = data = ConjectureData(VALID, 8 bytes, frozen) def execute_once( self, data, *, print_example=False, is_final=False, expected_failure=None, example_kwargs=None, ): """Run the test function once, using ``data`` as input. If the test raises an exception, it will propagate through to the caller of this method. Depending on its type, this could represent an ordinary test failure, or a fatal error, or a control exception. If this method returns normally, the test might have passed, or it might have placed ``data`` in an unsuccessful state and then swallowed the corresponding control exception. """ self.ever_executed = True data.is_find = self.is_find self._string_repr = "" text_repr = None if self.settings.deadline is None and not TESTCASE_CALLBACKS: test = self.test else: @proxies(self.test) def test(*args, **kwargs): arg_drawtime = math.fsum(data.draw_times.values()) start = time.perf_counter() try: result = self.test(*args, **kwargs) finally: finish = time.perf_counter() in_drawtime = math.fsum(data.draw_times.values()) - arg_drawtime runtime = datetime.timedelta(seconds=finish - start - in_drawtime) self._timing_features = { "execute_test": finish - start - in_drawtime, **data.draw_times, } if (current_deadline := self.settings.deadline) is not None: if not is_final: current_deadline = (current_deadline // 4) * 5 if runtime >= current_deadline: raise DeadlineExceeded(runtime, self.settings.deadline) return result def run(data): # Set up dynamic context needed by a single test run. if self.stuff.selfy is not None: data.hypothesis_runner = self.stuff.selfy # Generate all arguments to the test function. args = self.stuff.args kwargs = dict(self.stuff.kwargs) if example_kwargs is None: kw, argslices = context.prep_args_kwargs_from_strategies( self.stuff.given_kwargs ) else: kw = example_kwargs argslices = {} kwargs.update(kw) if expected_failure is not None: nonlocal text_repr text_repr = repr_call(test, args, kwargs) if text_repr in self.xfail_example_reprs: warnings.warn( f"We generated {text_repr}, which seems identical " "to one of your `@example(...).xfail()` cases. " "Revise the strategy to avoid this overlap?", HypothesisWarning, # Checked in test_generating_xfailed_examples_warns! stacklevel=6, ) if print_example or current_verbosity() >= Verbosity.verbose: printer = RepresentationPrinter(context=context) if print_example: printer.text("Falsifying example:") else: printer.text("Trying example:") if self.print_given_args: printer.text(" ") printer.repr_call( test.__name__, args, kwargs, force_split=True, arg_slices=argslices, leading_comment=( "# " + context.data.slice_comments[(0, 0)] if (0, 0) in context.data.slice_comments else None ), ) report(printer.getvalue()) if TESTCASE_CALLBACKS: printer = RepresentationPrinter(context=context) printer.repr_call( test.__name__, args, kwargs, force_split=True, arg_slices=argslices, leading_comment=( "# " + context.data.slice_comments[(0, 0)] if (0, 0) in context.data.slice_comments else None ), ) self._string_repr = printer.getvalue() self._jsonable_arguments = { **dict(enumerate(map(to_jsonable, args))), **{k: to_jsonable(v) for k, v in kwargs.items()}, } try: return test(*args, **kwargs) except TypeError as e: # If we sampled from a sequence of strategies, AND failed with a # TypeError, *AND that exception mentions SearchStrategy*, add a note: if "SearchStrategy" in str(e) and hasattr( data, "_sampled_from_all_strategies_elements_message" ): msg, format_arg = data._sampled_from_all_strategies_elements_message add_note(e, msg.format(format_arg)) raise # self.test_runner can include the execute_example method, or setup/teardown # _example, so it's important to get the PRNG and build context in place first. with local_settings(self.settings): with deterministic_PRNG(): with BuildContext(data, is_final=is_final) as context: # Run the test function once, via the executor hook. # In most cases this will delegate straight to `run(data)`. result = self.test_runner(data, run) # If a failure was expected, it should have been raised already, so # instead raise an appropriate diagnostic error. if expected_failure is not None: exception, traceback = expected_failure if isinstance(exception, DeadlineExceeded) and ( runtime_secs := self._timing_features.get("execute_test") ): report( "Unreliable test timings! On an initial run, this " "test took %.2fms, which exceeded the deadline of " "%.2fms, but on a subsequent run it took %.2f ms, " "which did not. If you expect this sort of " "variability in your test timings, consider turning " "deadlines off for this test by setting deadline=None." % ( exception.runtime.total_seconds() * 1000, self.settings.deadline.total_seconds() * 1000, runtime_secs * 1000, ) ) else: report("Failed to reproduce exception. Expected: \n" + traceback) > raise Flaky( f"Hypothesis {text_repr} produces unreliable results: " "Falsified on the first call but did not on a subsequent one" ) from exception E hypothesis.errors.Flaky: Hypothesis test_datetime(self=, timestamp=287093976) produces unreliable results: Falsified on the first call but did not on a subsequent one E Falsifying example: test_datetime( E self=, E timestamp=287093976, E ) E Unreliable test timings! On an initial run, this test took 304.03ms, which exceeded the deadline of 200.00ms, but on a subsequent run it took 7.43 ms, which did not. If you expect this sort of variability in your test timings, consider turning deadlines off for this test by setting deadline=None. /usr/lib/python3.13/site-packages/hypothesis/core.py:962: Flaky ________________________ TestRawDBU.test_complete_weeks ________________________ [gw16] linux -- Python 3.13.2 /usr/bin/python3 self = , data = data(...) rawdb = @settings(suppress_health_check=(HealthCheck.function_scoped_fixture,)) > @given(data=data()) tests/test_totals.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (, data(...), ) kwargs = {}, arg_drawtime = 0.09155370001099072, start = 226815.2409182 result = None, finish = 226815.754732, in_drawtime = 0.19020839998847805 runtime = datetime.timedelta(microseconds=323605) current_deadline = datetime.timedelta(microseconds=250000) @proxies(self.test) def test(*args, **kwargs): arg_drawtime = math.fsum(data.draw_times.values()) start = time.perf_counter() try: result = self.test(*args, **kwargs) finally: finish = time.perf_counter() in_drawtime = math.fsum(data.draw_times.values()) - arg_drawtime runtime = datetime.timedelta(seconds=finish - start - in_drawtime) self._timing_features = { "execute_test": finish - start - in_drawtime, **data.draw_times, } if (current_deadline := self.settings.deadline) is not None: if not is_final: current_deadline = (current_deadline // 4) * 5 if runtime >= current_deadline: > raise DeadlineExceeded(runtime, self.settings.deadline) E hypothesis.errors.DeadlineExceeded: Test took 323.60ms, which exceeds the deadline of 200.00ms /usr/lib/python3.13/site-packages/hypothesis/core.py:845: DeadlineExceeded The above exception was the direct cause of the following exception: self = rawdb = @settings(suppress_health_check=(HealthCheck.function_scoped_fixture,)) > @given(data=data()) tests/test_totals.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = data = ConjectureData(VALID, 1 bytes, frozen) def execute_once( self, data, *, print_example=False, is_final=False, expected_failure=None, example_kwargs=None, ): """Run the test function once, using ``data`` as input. If the test raises an exception, it will propagate through to the caller of this method. Depending on its type, this could represent an ordinary test failure, or a fatal error, or a control exception. If this method returns normally, the test might have passed, or it might have placed ``data`` in an unsuccessful state and then swallowed the corresponding control exception. """ self.ever_executed = True data.is_find = self.is_find self._string_repr = "" text_repr = None if self.settings.deadline is None and not TESTCASE_CALLBACKS: test = self.test else: @proxies(self.test) def test(*args, **kwargs): arg_drawtime = math.fsum(data.draw_times.values()) start = time.perf_counter() try: result = self.test(*args, **kwargs) finally: finish = time.perf_counter() in_drawtime = math.fsum(data.draw_times.values()) - arg_drawtime runtime = datetime.timedelta(seconds=finish - start - in_drawtime) self._timing_features = { "execute_test": finish - start - in_drawtime, **data.draw_times, } if (current_deadline := self.settings.deadline) is not None: if not is_final: current_deadline = (current_deadline // 4) * 5 if runtime >= current_deadline: raise DeadlineExceeded(runtime, self.settings.deadline) return result def run(data): # Set up dynamic context needed by a single test run. if self.stuff.selfy is not None: data.hypothesis_runner = self.stuff.selfy # Generate all arguments to the test function. args = self.stuff.args kwargs = dict(self.stuff.kwargs) if example_kwargs is None: kw, argslices = context.prep_args_kwargs_from_strategies( self.stuff.given_kwargs ) else: kw = example_kwargs argslices = {} kwargs.update(kw) if expected_failure is not None: nonlocal text_repr text_repr = repr_call(test, args, kwargs) if text_repr in self.xfail_example_reprs: warnings.warn( f"We generated {text_repr}, which seems identical " "to one of your `@example(...).xfail()` cases. " "Revise the strategy to avoid this overlap?", HypothesisWarning, # Checked in test_generating_xfailed_examples_warns! stacklevel=6, ) if print_example or current_verbosity() >= Verbosity.verbose: printer = RepresentationPrinter(context=context) if print_example: printer.text("Falsifying example:") else: printer.text("Trying example:") if self.print_given_args: printer.text(" ") printer.repr_call( test.__name__, args, kwargs, force_split=True, arg_slices=argslices, leading_comment=( "# " + context.data.slice_comments[(0, 0)] if (0, 0) in context.data.slice_comments else None ), ) report(printer.getvalue()) if TESTCASE_CALLBACKS: printer = RepresentationPrinter(context=context) printer.repr_call( test.__name__, args, kwargs, force_split=True, arg_slices=argslices, leading_comment=( "# " + context.data.slice_comments[(0, 0)] if (0, 0) in context.data.slice_comments else None ), ) self._string_repr = printer.getvalue() self._jsonable_arguments = { **dict(enumerate(map(to_jsonable, args))), **{k: to_jsonable(v) for k, v in kwargs.items()}, } try: return test(*args, **kwargs) except TypeError as e: # If we sampled from a sequence of strategies, AND failed with a # TypeError, *AND that exception mentions SearchStrategy*, add a note: if "SearchStrategy" in str(e) and hasattr( data, "_sampled_from_all_strategies_elements_message" ): msg, format_arg = data._sampled_from_all_strategies_elements_message add_note(e, msg.format(format_arg)) raise # self.test_runner can include the execute_example method, or setup/teardown # _example, so it's important to get the PRNG and build context in place first. with local_settings(self.settings): with deterministic_PRNG(): with BuildContext(data, is_final=is_final) as context: # Run the test function once, via the executor hook. # In most cases this will delegate straight to `run(data)`. result = self.test_runner(data, run) # If a failure was expected, it should have been raised already, so # instead raise an appropriate diagnostic error. if expected_failure is not None: exception, traceback = expected_failure if isinstance(exception, DeadlineExceeded) and ( runtime_secs := self._timing_features.get("execute_test") ): report( "Unreliable test timings! On an initial run, this " "test took %.2fms, which exceeded the deadline of " "%.2fms, but on a subsequent run it took %.2f ms, " "which did not. If you expect this sort of " "variability in your test timings, consider turning " "deadlines off for this test by setting deadline=None." % ( exception.runtime.total_seconds() * 1000, self.settings.deadline.total_seconds() * 1000, runtime_secs * 1000, ) ) else: report("Failed to reproduce exception. Expected: \n" + traceback) > raise Flaky( f"Hypothesis {text_repr} produces unreliable results: " "Falsified on the first call but did not on a subsequent one" ) from exception E hypothesis.errors.Flaky: Hypothesis test_complete_weeks(self=, data=data(...), rawdb=) produces unreliable results: Falsified on the first call but did not on a subsequent one E Falsifying example: test_complete_weeks( E self=, E rawdb=, E data=data(...), E ) E Draw 1: None E Unreliable test timings! On an initial run, this test took 323.60ms, which exceeded the deadline of 200.00ms, but on a subsequent run it took 15.31 ms, which did not. If you expect this sort of variability in your test timings, consider turning deadlines off for this test by setting deadline=None. /usr/lib/python3.13/site-packages/hypothesis/core.py:962: Flaky ____________________ TestCSVCountItem.test_from_totalsitem _____________________ [gw22] linux -- Python 3.13.2 /usr/bin/python3 self = weeknum = 53509 @given( > weeknum=integers( min_value=0, max_value=( (dt.date(dt.MAXYEAR, 12, 31).toordinal() - dt.date(1970, 1, 1).toordinal()) // 7 - 2 ), ) ) tests/test_totals.py:49: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (, 53509) kwargs = {}, arg_drawtime = 0.014521900011459365, start = 226819.3374288 result = None, finish = 226819.6235532, in_drawtime = 0.0 runtime = datetime.timedelta(microseconds=286124) current_deadline = datetime.timedelta(microseconds=250000) @proxies(self.test) def test(*args, **kwargs): arg_drawtime = math.fsum(data.draw_times.values()) start = time.perf_counter() try: result = self.test(*args, **kwargs) finally: finish = time.perf_counter() in_drawtime = math.fsum(data.draw_times.values()) - arg_drawtime runtime = datetime.timedelta(seconds=finish - start - in_drawtime) self._timing_features = { "execute_test": finish - start - in_drawtime, **data.draw_times, } if (current_deadline := self.settings.deadline) is not None: if not is_final: current_deadline = (current_deadline // 4) * 5 if runtime >= current_deadline: > raise DeadlineExceeded(runtime, self.settings.deadline) E hypothesis.errors.DeadlineExceeded: Test took 286.12ms, which exceeds the deadline of 200.00ms /usr/lib/python3.13/site-packages/hypothesis/core.py:845: DeadlineExceeded The above exception was the direct cause of the following exception: self = @given( > weeknum=integers( min_value=0, max_value=( (dt.date(dt.MAXYEAR, 12, 31).toordinal() - dt.date(1970, 1, 1).toordinal()) // 7 - 2 ), ) ) tests/test_totals.py:49: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = data = ConjectureData(VALID, 4 bytes, frozen) def execute_once( self, data, *, print_example=False, is_final=False, expected_failure=None, example_kwargs=None, ): """Run the test function once, using ``data`` as input. If the test raises an exception, it will propagate through to the caller of this method. Depending on its type, this could represent an ordinary test failure, or a fatal error, or a control exception. If this method returns normally, the test might have passed, or it might have placed ``data`` in an unsuccessful state and then swallowed the corresponding control exception. """ self.ever_executed = True data.is_find = self.is_find self._string_repr = "" text_repr = None if self.settings.deadline is None and not TESTCASE_CALLBACKS: test = self.test else: @proxies(self.test) def test(*args, **kwargs): arg_drawtime = math.fsum(data.draw_times.values()) start = time.perf_counter() try: result = self.test(*args, **kwargs) finally: finish = time.perf_counter() in_drawtime = math.fsum(data.draw_times.values()) - arg_drawtime runtime = datetime.timedelta(seconds=finish - start - in_drawtime) self._timing_features = { "execute_test": finish - start - in_drawtime, **data.draw_times, } if (current_deadline := self.settings.deadline) is not None: if not is_final: current_deadline = (current_deadline // 4) * 5 if runtime >= current_deadline: raise DeadlineExceeded(runtime, self.settings.deadline) return result def run(data): # Set up dynamic context needed by a single test run. if self.stuff.selfy is not None: data.hypothesis_runner = self.stuff.selfy # Generate all arguments to the test function. args = self.stuff.args kwargs = dict(self.stuff.kwargs) if example_kwargs is None: kw, argslices = context.prep_args_kwargs_from_strategies( self.stuff.given_kwargs ) else: kw = example_kwargs argslices = {} kwargs.update(kw) if expected_failure is not None: nonlocal text_repr text_repr = repr_call(test, args, kwargs) if text_repr in self.xfail_example_reprs: warnings.warn( f"We generated {text_repr}, which seems identical " "to one of your `@example(...).xfail()` cases. " "Revise the strategy to avoid this overlap?", HypothesisWarning, # Checked in test_generating_xfailed_examples_warns! stacklevel=6, ) if print_example or current_verbosity() >= Verbosity.verbose: printer = RepresentationPrinter(context=context) if print_example: printer.text("Falsifying example:") else: printer.text("Trying example:") if self.print_given_args: printer.text(" ") printer.repr_call( test.__name__, args, kwargs, force_split=True, arg_slices=argslices, leading_comment=( "# " + context.data.slice_comments[(0, 0)] if (0, 0) in context.data.slice_comments else None ), ) report(printer.getvalue()) if TESTCASE_CALLBACKS: printer = RepresentationPrinter(context=context) printer.repr_call( test.__name__, args, kwargs, force_split=True, arg_slices=argslices, leading_comment=( "# " + context.data.slice_comments[(0, 0)] if (0, 0) in context.data.slice_comments else None ), ) self._string_repr = printer.getvalue() self._jsonable_arguments = { **dict(enumerate(map(to_jsonable, args))), **{k: to_jsonable(v) for k, v in kwargs.items()}, } try: return test(*args, **kwargs) except TypeError as e: # If we sampled from a sequence of strategies, AND failed with a # TypeError, *AND that exception mentions SearchStrategy*, add a note: if "SearchStrategy" in str(e) and hasattr( data, "_sampled_from_all_strategies_elements_message" ): msg, format_arg = data._sampled_from_all_strategies_elements_message add_note(e, msg.format(format_arg)) raise # self.test_runner can include the execute_example method, or setup/teardown # _example, so it's important to get the PRNG and build context in place first. with local_settings(self.settings): with deterministic_PRNG(): with BuildContext(data, is_final=is_final) as context: # Run the test function once, via the executor hook. # In most cases this will delegate straight to `run(data)`. result = self.test_runner(data, run) # If a failure was expected, it should have been raised already, so # instead raise an appropriate diagnostic error. if expected_failure is not None: exception, traceback = expected_failure if isinstance(exception, DeadlineExceeded) and ( runtime_secs := self._timing_features.get("execute_test") ): report( "Unreliable test timings! On an initial run, this " "test took %.2fms, which exceeded the deadline of " "%.2fms, but on a subsequent run it took %.2f ms, " "which did not. If you expect this sort of " "variability in your test timings, consider turning " "deadlines off for this test by setting deadline=None." % ( exception.runtime.total_seconds() * 1000, self.settings.deadline.total_seconds() * 1000, runtime_secs * 1000, ) ) else: report("Failed to reproduce exception. Expected: \n" + traceback) > raise Flaky( f"Hypothesis {text_repr} produces unreliable results: " "Falsified on the first call but did not on a subsequent one" ) from exception E hypothesis.errors.Flaky: Hypothesis test_from_totalsitem(self=, weeknum=53509) produces unreliable results: Falsified on the first call but did not on a subsequent one E Falsifying example: test_from_totalsitem( E self=, E weeknum=53509, E ) E Unreliable test timings! On an initial run, this test took 286.12ms, which exceeded the deadline of 200.00ms, but on a subsequent run it took 0.79 ms, which did not. If you expect this sort of variability in your test timings, consider turning deadlines off for this test by setting deadline=None. /usr/lib/python3.13/site-packages/hypothesis/core.py:962: Flaky _____________________ TestDIYProgress.test_display[scaled] _____________________ [gw19] linux -- Python 3.13.2 /usr/bin/python3 self = unit_scale = True, count = 1 @pytest.mark.parametrize("unit_scale", (True, False), ids=("scaled", "unscaled")) > @given(count=integers(min_value=0, max_value=int(TEST_TOTAL * 1.01 + 1))) tests/test_progress.py:241: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (, True, 1) kwargs = {}, arg_drawtime = 0.01003629999468103, start = 226838.2710142 result = None, finish = 226838.9568296, in_drawtime = 0.0 runtime = datetime.timedelta(microseconds=685815) current_deadline = datetime.timedelta(microseconds=250000) @proxies(self.test) def test(*args, **kwargs): arg_drawtime = math.fsum(data.draw_times.values()) start = time.perf_counter() try: result = self.test(*args, **kwargs) finally: finish = time.perf_counter() in_drawtime = math.fsum(data.draw_times.values()) - arg_drawtime runtime = datetime.timedelta(seconds=finish - start - in_drawtime) self._timing_features = { "execute_test": finish - start - in_drawtime, **data.draw_times, } if (current_deadline := self.settings.deadline) is not None: if not is_final: current_deadline = (current_deadline // 4) * 5 if runtime >= current_deadline: > raise DeadlineExceeded(runtime, self.settings.deadline) E hypothesis.errors.DeadlineExceeded: Test took 685.81ms, which exceeds the deadline of 200.00ms /usr/lib/python3.13/site-packages/hypothesis/core.py:845: DeadlineExceeded The above exception was the direct cause of the following exception: self = unit_scale = True @pytest.mark.parametrize("unit_scale", (True, False), ids=("scaled", "unscaled")) > @given(count=integers(min_value=0, max_value=int(TEST_TOTAL * 1.01 + 1))) tests/test_progress.py:241: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = data = ConjectureData(VALID, 4 bytes, frozen) def execute_once( self, data, *, print_example=False, is_final=False, expected_failure=None, example_kwargs=None, ): """Run the test function once, using ``data`` as input. If the test raises an exception, it will propagate through to the caller of this method. Depending on its type, this could represent an ordinary test failure, or a fatal error, or a control exception. If this method returns normally, the test might have passed, or it might have placed ``data`` in an unsuccessful state and then swallowed the corresponding control exception. """ self.ever_executed = True data.is_find = self.is_find self._string_repr = "" text_repr = None if self.settings.deadline is None and not TESTCASE_CALLBACKS: test = self.test else: @proxies(self.test) def test(*args, **kwargs): arg_drawtime = math.fsum(data.draw_times.values()) start = time.perf_counter() try: result = self.test(*args, **kwargs) finally: finish = time.perf_counter() in_drawtime = math.fsum(data.draw_times.values()) - arg_drawtime runtime = datetime.timedelta(seconds=finish - start - in_drawtime) self._timing_features = { "execute_test": finish - start - in_drawtime, **data.draw_times, } if (current_deadline := self.settings.deadline) is not None: if not is_final: current_deadline = (current_deadline // 4) * 5 if runtime >= current_deadline: raise DeadlineExceeded(runtime, self.settings.deadline) return result def run(data): # Set up dynamic context needed by a single test run. if self.stuff.selfy is not None: data.hypothesis_runner = self.stuff.selfy # Generate all arguments to the test function. args = self.stuff.args kwargs = dict(self.stuff.kwargs) if example_kwargs is None: kw, argslices = context.prep_args_kwargs_from_strategies( self.stuff.given_kwargs ) else: kw = example_kwargs argslices = {} kwargs.update(kw) if expected_failure is not None: nonlocal text_repr text_repr = repr_call(test, args, kwargs) if text_repr in self.xfail_example_reprs: warnings.warn( f"We generated {text_repr}, which seems identical " "to one of your `@example(...).xfail()` cases. " "Revise the strategy to avoid this overlap?", HypothesisWarning, # Checked in test_generating_xfailed_examples_warns! stacklevel=6, ) if print_example or current_verbosity() >= Verbosity.verbose: printer = RepresentationPrinter(context=context) if print_example: printer.text("Falsifying example:") else: printer.text("Trying example:") if self.print_given_args: printer.text(" ") printer.repr_call( test.__name__, args, kwargs, force_split=True, arg_slices=argslices, leading_comment=( "# " + context.data.slice_comments[(0, 0)] if (0, 0) in context.data.slice_comments else None ), ) report(printer.getvalue()) if TESTCASE_CALLBACKS: printer = RepresentationPrinter(context=context) printer.repr_call( test.__name__, args, kwargs, force_split=True, arg_slices=argslices, leading_comment=( "# " + context.data.slice_comments[(0, 0)] if (0, 0) in context.data.slice_comments else None ), ) self._string_repr = printer.getvalue() self._jsonable_arguments = { **dict(enumerate(map(to_jsonable, args))), **{k: to_jsonable(v) for k, v in kwargs.items()}, } try: return test(*args, **kwargs) except TypeError as e: # If we sampled from a sequence of strategies, AND failed with a # TypeError, *AND that exception mentions SearchStrategy*, add a note: if "SearchStrategy" in str(e) and hasattr( data, "_sampled_from_all_strategies_elements_message" ): msg, format_arg = data._sampled_from_all_strategies_elements_message add_note(e, msg.format(format_arg)) raise # self.test_runner can include the execute_example method, or setup/teardown # _example, so it's important to get the PRNG and build context in place first. with local_settings(self.settings): with deterministic_PRNG(): with BuildContext(data, is_final=is_final) as context: # Run the test function once, via the executor hook. # In most cases this will delegate straight to `run(data)`. result = self.test_runner(data, run) # If a failure was expected, it should have been raised already, so # instead raise an appropriate diagnostic error. if expected_failure is not None: exception, traceback = expected_failure if isinstance(exception, DeadlineExceeded) and ( runtime_secs := self._timing_features.get("execute_test") ): report( "Unreliable test timings! On an initial run, this " "test took %.2fms, which exceeded the deadline of " "%.2fms, but on a subsequent run it took %.2f ms, " "which did not. If you expect this sort of " "variability in your test timings, consider turning " "deadlines off for this test by setting deadline=None." % ( exception.runtime.total_seconds() * 1000, self.settings.deadline.total_seconds() * 1000, runtime_secs * 1000, ) ) else: report("Failed to reproduce exception. Expected: \n" + traceback) > raise Flaky( f"Hypothesis {text_repr} produces unreliable results: " "Falsified on the first call but did not on a subsequent one" ) from exception E hypothesis.errors.Flaky: Hypothesis test_display(self=, unit_scale=True, count=1) produces unreliable results: Falsified on the first call but did not on a subsequent one E Falsifying example: test_display( E self=, E unit_scale=True, E count=1, E ) E Unreliable test timings! On an initial run, this test took 685.81ms, which exceeded the deadline of 200.00ms, but on a subsequent run it took 161.37 ms, which did not. If you expect this sort of variability in your test timings, consider turning deadlines off for this test by setting deadline=None. /usr/lib/python3.13/site-packages/hypothesis/core.py:962: Flaky ___________________________________ test_log ___________________________________ [gw28] linux -- Python 3.13.2 /usr/bin/python3 @settings(suppress_health_check=(HealthCheck.too_slow,), deadline=datetime.timedelta(seconds=1)) > @given(loglines=log_data()) tests/test_integrate.py:157: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ([(datetime.datetime(2025, 2, 28, 16, 39, 33, 589922), IPv6Address('2001:1ff:ffff:ffff:ffff:ffff:ffff:fffe'), 'epel-7'...1.230'), 'Fedora'), (datetime.datetime(2025, 2, 28, 16, 39, 33, 589922), IPv4Address('90.6.64.118'), 'centos8'), ...],) kwargs = {}, arg_drawtime = 0.4249272999877576, start = 227182.5309457 result = None, finish = 227183.9516467, in_drawtime = 0.0 runtime = datetime.timedelta(seconds=1, microseconds=420701) current_deadline = timedelta(milliseconds=1000) @proxies(self.test) def test(*args, **kwargs): arg_drawtime = math.fsum(data.draw_times.values()) start = time.perf_counter() try: result = self.test(*args, **kwargs) finally: finish = time.perf_counter() in_drawtime = math.fsum(data.draw_times.values()) - arg_drawtime runtime = datetime.timedelta(seconds=finish - start - in_drawtime) self._timing_features = { "execute_test": finish - start - in_drawtime, **data.draw_times, } if (current_deadline := self.settings.deadline) is not None: if not is_final: current_deadline = (current_deadline // 4) * 5 if runtime >= current_deadline: > raise DeadlineExceeded(runtime, self.settings.deadline) E hypothesis.errors.DeadlineExceeded: Test took 1420.70ms, which exceeds the deadline of 1000.00ms E Falsifying example: test_log( E loglines=[(datetime.datetime(2025, 2, 28, 16, 39, 33, 589922), IPv6Address( E 42540528726795050063891204319802818558, E ), 'epel-7'), E (datetime.datetime(2025, 2, 28, 16, 39, 33, 589922), E IPv4Address(3325256798), E 'centos8'), E (datetime.datetime(2025, 2, 28, 16, 39, 33, 589922), E IPv4Address(2851995648), E 'centos8'), E (datetime.datetime(2025, 2, 28, 16, 39, 33, 589922), IPv6Address( E 42540488320432167789079031612388147399, E ), 'epel-7'), E (datetime.datetime(2025, 2, 28, 16, 39, 33, 589922), E IPv4Address(b"'\xac)\xe6"), E 'Fedora'), E (datetime.datetime(2025, 2, 28, 16, 39, 33, 589922), E IPv4Address(b'Z\x06@v'), E 'centos8'), E (datetime.datetime(2025, 2, 28, 16, 39, 33, 589922), E IPv4Address(b'\xf6\x05 \xa3'), E 'Fedora'), E (datetime.datetime(2025, 2, 28, 16, 39, 33, 589922), E IPv4Address(1682548399), E 'centos8'), E (datetime.datetime(2025, 2, 28, 16, 39, 33, 589922), E IPv4Address(3232706601), E 'Fedora'), E (datetime.datetime(2025, 2, 28, 16, 39, 33, 589922), E IPv4Address(3221225480), E 'Fedora'), E (datetime.datetime(2025, 3, 1, 16, 39, 33, 589922), IPv6Address( E 42540528726795050063891204319802818558, E ), 'epel-7'), E (datetime.datetime(2025, 3, 1, 16, 39, 33, 589922), E IPv4Address(3325256798), E 'Fedora'), E (datetime.datetime(2025, 3, 1, 16, 39, 33, 589922), E IPv4Address(2851995648), E 'centos8'), E (datetime.datetime(2025, 3, 1, 16, 39, 33, 589922), IPv6Address( E 42540488320432167789079031612388147399, E ), 'epel-7'), E (datetime.datetime(2025, 3, 1, 16, 39, 33, 589922), E IPv4Address(b"'\xac)\xe6"), E 'epel-7'), E (datetime.datetime(2025, 3, 1, 16, 39, 33, 589922), E IPv4Address(b'Z\x06@v'), E 'centos8'), E (datetime.datetime(2025, 3, 1, 16, 39, 33, 589922), E IPv4Address(b'\xf6\x05 \xa3'), E 'epel-7'), E (datetime.datetime(2025, 3, 1, 16, 39, 33, 589922), E IPv4Address(1682548399), E 'epel-7'), E (datetime.datetime(2025, 3, 1, 16, 39, 33, 589922), E IPv4Address(3232706601), E 'epel-7'), E (datetime.datetime(2025, 3, 1, 16, 39, 33, 589922), E IPv4Address(3221225480), E 'Fedora'), E (datetime.datetime(2025, 3, 2, 16, 39, 33, 589922), IPv6Address( E 42540528726795050063891204319802818558, E ), 'epel-7'), E (datetime.datetime(2025, 3, 2, 16, 39, 33, 589922), E IPv4Address(3325256798), E 'epel-7'), E (datetime.datetime(2025, 3, 2, 16, 39, 33, 589922), E IPv4Address(2851995648), E 'centos8'), E (datetime.datetime(2025, 3, 2, 16, 39, 33, 589922), IPv6Address( E 42540488320432167789079031612388147399, E ), 'centos8'), E (datetime.datetime(2025, 3, 2, 16, 39, 33, 589922), E IPv4Address(b"'\xac)\xe6"), E 'Fedora'), E (datetime.datetime(2025, 3, 2, 16, 39, 33, 589922), E IPv4Address(b'Z\x06@v'), E 'centos8'), E (datetime.datetime(2025, 3, 2, 16, 39, 33, 589922), E IPv4Address(b'\xf6\x05 \xa3'), E 'Fedora'), E (datetime.datetime(2025, 3, 2, 16, 39, 33, 589922), E IPv4Address(1682548399), E 'Fedora'), E (datetime.datetime(2025, 3, 2, 16, 39, 33, 589922), E IPv4Address(3232706601), E 'epel-7'), E (datetime.datetime(2025, 3, 2, 16, 39, 33, 589922), E IPv4Address(3221225480), E 'Fedora'), E (datetime.datetime(2025, 3, 3, 16, 39, 33, 589922), IPv6Address( E 42540528726795050063891204319802818558, E ), 'centos8'), E (datetime.datetime(2025, 3, 3, 16, 39, 33, 589922), E IPv4Address(3325256798), E 'Fedora'), E (datetime.datetime(2025, 3, 3, 16, 39, 33, 589922), E IPv4Address(2851995648), E 'Fedora'), E (datetime.datetime(2025, 3, 3, 16, 39, 33, 589922), IPv6Address( E 42540488320432167789079031612388147399, E ), 'Fedora'), E (datetime.datetime(2025, 3, 3, 16, 39, 33, 589922), E IPv4Address(b"'\xac)\xe6"), E 'epel-7'), E (datetime.datetime(2025, 3, 3, 16, 39, 33, 589922), E IPv4Address(b'Z\x06@v'), E 'centos8'), E (datetime.datetime(2025, 3, 3, 16, 39, 33, 589922), E IPv4Address(b'\xf6\x05 \xa3'), E 'centos8'), E (datetime.datetime(2025, 3, 3, 16, 39, 33, 589922), E IPv4Address(1682548399), E 'epel-7'), E (datetime.datetime(2025, 3, 3, 16, 39, 33, 589922), E IPv4Address(3232706601), E 'Fedora'), E (datetime.datetime(2025, 3, 3, 16, 39, 33, 589922), E IPv4Address(3221225480), E 'Fedora'), E (datetime.datetime(2025, 3, 4, 16, 39, 33, 589922), IPv6Address( E 42540528726795050063891204319802818558, E ), 'epel-7'), E (datetime.datetime(2025, 3, 4, 16, 39, 33, 589922), E IPv4Address(3325256798), E 'epel-7'), E (datetime.datetime(2025, 3, 4, 16, 39, 33, 589922), E IPv4Address(2851995648), E 'Fedora'), E (datetime.datetime(2025, 3, 4, 16, 39, 33, 589922), IPv6Address( E 42540488320432167789079031612388147399, E ), 'centos8'), E (datetime.datetime(2025, 3, 4, 16, 39, 33, 589922), E IPv4Address(b"'\xac)\xe6"), E 'Fedora'), E (datetime.datetime(2025, 3, 4, 16, 39, 33, 589922), E IPv4Address(b'Z\x06@v'), E 'centos8'), E (datetime.datetime(2025, 3, 4, 16, 39, 33, 589922), E IPv4Address(b'\xf6\x05 \xa3'), E 'epel-7'), E (datetime.datetime(2025, 3, 4, 16, 39, 33, 589922), E IPv4Address(1682548399), E 'centos8'), E (datetime.datetime(2025, 3, 4, 16, 39, 33, 589922), E IPv4Address(3232706601), E 'Fedora'), E (datetime.datetime(2025, 3, 4, 16, 39, 33, 589922), E IPv4Address(3221225480), E 'epel-7'), E (datetime.datetime(2025, 3, 5, 16, 39, 33, 589922), IPv6Address( E 42540528726795050063891204319802818558, E ), 'Fedora'), E (datetime.datetime(2025, 3, 5, 16, 39, 33, 589922), E IPv4Address(3325256798), E 'Fedora'), E (datetime.datetime(2025, 3, 5, 16, 39, 33, 589922), E IPv4Address(2851995648), E 'Fedora'), E (datetime.datetime(2025, 3, 5, 16, 39, 33, 589922), IPv6Address( E 42540488320432167789079031612388147399, E ), 'Fedora'), E (datetime.datetime(2025, 3, 5, 16, 39, 33, 589922), E IPv4Address(b"'\xac)\xe6"), E 'centos8'), E (datetime.datetime(2025, 3, 5, 16, 39, 33, 589922), E IPv4Address(b'Z\x06@v'), E 'Fedora'), E (datetime.datetime(2025, 3, 5, 16, 39, 33, 589922), E IPv4Address(b'\xf6\x05 \xa3'), E 'Fedora'), E (datetime.datetime(2025, 3, 5, 16, 39, 33, 589922), E IPv4Address(1682548399), E 'epel-7'), E (datetime.datetime(2025, 3, 5, 16, 39, 33, 589922), E IPv4Address(3232706601), E 'epel-7'), E (datetime.datetime(2025, 3, 5, 16, 39, 33, 589922), E IPv4Address(3221225480), E 'Fedora'), E (datetime.datetime(2025, 3, 6, 16, 39, 33, 589922), IPv6Address( E 42540528726795050063891204319802818558, E ), 'centos8'), E (datetime.datetime(2025, 3, 6, 16, 39, 33, 589922), E IPv4Address(3325256798), E 'centos8'), E (datetime.datetime(2025, 3, 6, 16, 39, 33, 589922), E IPv4Address(2851995648), E 'centos8'), E (datetime.datetime(2025, 3, 6, 16, 39, 33, 589922), IPv6Address( E 42540488320432167789079031612388147399, E ), 'Fedora'), E (datetime.datetime(2025, 3, 6, 16, 39, 33, 589922), E IPv4Address(b"'\xac)\xe6"), E 'epel-7'), E (datetime.datetime(2025, 3, 6, 16, 39, 33, 589922), E IPv4Address(b'Z\x06@v'), E 'Fedora'), E (datetime.datetime(2025, 3, 6, 16, 39, 33, 589922), E IPv4Address(b'\xf6\x05 \xa3'), E 'Fedora'), E (datetime.datetime(2025, 3, 6, 16, 39, 33, 589922), E IPv4Address(1682548399), E 'epel-7'), E (datetime.datetime(2025, 3, 6, 16, 39, 33, 589922), E IPv4Address(3232706601), E 'Fedora'), E (datetime.datetime(2025, 3, 6, 16, 39, 33, 589922), E IPv4Address(3221225480), E 'Fedora'), E (datetime.datetime(2025, 3, 7, 16, 39, 33, 589922), IPv6Address( E 42540528726795050063891204319802818558, E ), 'epel-7'), E (datetime.datetime(2025, 3, 7, 16, 39, 33, 589922), E IPv4Address(3325256798), E 'centos8'), E (datetime.datetime(2025, 3, 7, 16, 39, 33, 589922), E IPv4Address(2851995648), E 'epel-7'), E (datetime.datetime(2025, 3, 7, 16, 39, 33, 589922), IPv6Address( E 42540488320432167789079031612388147399, E ), 'Fedora'), E (datetime.datetime(2025, 3, 7, 16, 39, 33, 589922), E IPv4Address(b"'\xac)\xe6"), E 'epel-7'), E (datetime.datetime(2025, 3, 7, 16, 39, 33, 589922), E IPv4Address(b'Z\x06@v'), E 'Fedora'), E (datetime.datetime(2025, 3, 7, 16, 39, 33, 589922), E IPv4Address(b'\xf6\x05 \xa3'), E 'Fedora'), E (datetime.datetime(2025, 3, 7, 16, 39, 33, 589922), E IPv4Address(1682548399), E 'Fedora'), E (datetime.datetime(2025, 3, 7, 16, 39, 33, 589922), E IPv4Address(3232706601), E 'epel-7'), E (datetime.datetime(2025, 3, 7, 16, 39, 33, 589922), E IPv4Address(3221225480), E 'Fedora'), E (datetime.datetime(2025, 3, 8, 16, 39, 33, 589922), IPv6Address( E 42540528726795050063891204319802818558, E ), 'centos8'), E (datetime.datetime(2025, 3, 8, 16, 39, 33, 589922), E IPv4Address(3325256798), E 'epel-7'), E (datetime.datetime(2025, 3, 8, 16, 39, 33, 589922), E IPv4Address(2851995648), E 'centos8'), E (datetime.datetime(2025, 3, 8, 16, 39, 33, 589922), IPv6Address( E 42540488320432167789079031612388147399, E ), 'centos8'), E (datetime.datetime(2025, 3, 8, 16, 39, 33, 589922), E IPv4Address(b"'\xac)\xe6"), E 'Fedora'), E (datetime.datetime(2025, 3, 8, 16, 39, 33, 589922), E IPv4Address(b'Z\x06@v'), E 'centos8'), E (datetime.datetime(2025, 3, 8, 16, 39, 33, 589922), E IPv4Address(b'\xf6\x05 \xa3'), E 'centos8'), E (datetime.datetime(2025, 3, 8, 16, 39, 33, 589922), E IPv4Address(1682548399), E 'epel-7'), E (datetime.datetime(2025, 3, 8, 16, 39, 33, 589922), E IPv4Address(3232706601), E 'epel-7'), E (datetime.datetime(2025, 3, 8, 16, 39, 33, 589922), E IPv4Address(3221225480), E 'centos8'), E (datetime.datetime(2025, 3, 9, 16, 39, 33, 589922), IPv6Address( E 42540528726795050063891204319802818558, E ), 'centos8'), E (datetime.datetime(2025, 3, 9, 16, 39, 33, 589922), E IPv4Address(3325256798), E 'epel-7'), E (datetime.datetime(2025, 3, 9, 16, 39, 33, 589922), E IPv4Address(2851995648), E 'epel-7'), E (datetime.datetime(2025, 3, 9, 16, 39, 33, 589922), IPv6Address( E 42540488320432167789079031612388147399, E ), 'centos8'), E (datetime.datetime(2025, 3, 9, 16, 39, 33, 589922), E IPv4Address(b"'\xac)\xe6"), E 'centos8'), E (datetime.datetime(2025, 3, 9, 16, 39, 33, 589922), E IPv4Address(b'Z\x06@v'), E 'Fedora'), E (datetime.datetime(2025, 3, 9, 16, 39, 33, 589922), E IPv4Address(b'\xf6\x05 \xa3'), E 'Fedora'), E (datetime.datetime(2025, 3, 9, 16, 39, 33, 589922), E IPv4Address(1682548399), E 'Fedora'), E (datetime.datetime(2025, 3, 9, 16, 39, 33, 589922), E IPv4Address(3232706601), E 'centos8'), E (datetime.datetime(2025, 3, 9, 16, 39, 33, 589922), E IPv4Address(3221225480), E 'centos8'), E (datetime.datetime(2025, 3, 10, 16, 39, 33, 589922), IPv6Address( E 42540528726795050063891204319802818558, E ), 'centos8'), E (datetime.datetime(2025, 3, 10, 16, 39, 33, 589922), E IPv4Address(3325256798), E 'epel-7'), E (datetime.datetime(2025, 3, 10, 16, 39, 33, 589922), E IPv4Address(2851995648), E 'epel-7'), E (datetime.datetime(2025, 3, 10, 16, 39, 33, 589922), IPv6Address( E 42540488320432167789079031612388147399, E ), 'Fedora'), E (datetime.datetime(2025, 3, 10, 16, 39, 33, 589922), E IPv4Address(b"'\xac)\xe6"), E 'epel-7'), E (datetime.datetime(2025, 3, 10, 16, 39, 33, 589922), E IPv4Address(b'Z\x06@v'), E 'epel-7'), E (datetime.datetime(2025, 3, 10, 16, 39, 33, 589922), E IPv4Address(b'\xf6\x05 \xa3'), E 'Fedora'), E (datetime.datetime(2025, 3, 10, 16, 39, 33, 589922), E IPv4Address(1682548399), E 'epel-7'), E (datetime.datetime(2025, 3, 10, 16, 39, 33, 589922), E IPv4Address(3232706601), E 'Fedora'), E (datetime.datetime(2025, 3, 10, 16, 39, 33, 589922), E IPv4Address(3221225480), E 'centos8'), E (datetime.datetime(2025, 3, 11, 16, 39, 33, 589922), IPv6Address( E 42540528726795050063891204319802818558, E ), 'centos8'), E (datetime.datetime(2025, 3, 11, 16, 39, 33, 589922), E IPv4Address(3325256798), E 'centos8'), E (datetime.datetime(2025, 3, 11, 16, 39, 33, 589922), E IPv4Address(2851995648), E 'centos8'), E (datetime.datetime(2025, 3, 11, 16, 39, 33, 589922), IPv6Address( E 42540488320432167789079031612388147399, E ), 'Fedora'), E (datetime.datetime(2025, 3, 11, 16, 39, 33, 589922), E IPv4Address(b"'\xac)\xe6"), E 'epel-7'), E (datetime.datetime(2025, 3, 11, 16, 39, 33, 589922), E IPv4Address(b'Z\x06@v'), E 'epel-7'), E (datetime.datetime(2025, 3, 11, 16, 39, 33, 589922), E IPv4Address(b'\xf6\x05 \xa3'), E 'Fedora'), E (datetime.datetime(2025, 3, 11, 16, 39, 33, 589922), E IPv4Address(1682548399), E 'centos8'), E (datetime.datetime(2025, 3, 11, 16, 39, 33, 589922), E IPv4Address(3232706601), E 'centos8'), E (datetime.datetime(2025, 3, 11, 16, 39, 33, 589922), E IPv4Address(3221225480), E 'centos8'), E (datetime.datetime(2025, 3, 12, 16, 39, 33, 589922), IPv6Address( E 42540528726795050063891204319802818558, E ), 'Fedora'), E (datetime.datetime(2025, 3, 12, 16, 39, 33, 589922), E IPv4Address(3325256798), E 'epel-7'), E (datetime.datetime(2025, 3, 12, 16, 39, 33, 589922), E IPv4Address(2851995648), E 'epel-7'), E (datetime.datetime(2025, 3, 12, 16, 39, 33, 589922), IPv6Address( E 42540488320432167789079031612388147399, E ), 'centos8'), E (datetime.datetime(2025, 3, 12, 16, 39, 33, 589922), E IPv4Address(b"'\xac)\xe6"), E 'epel-7'), E (datetime.datetime(2025, 3, 12, 16, 39, 33, 589922), E IPv4Address(b'Z\x06@v'), E 'centos8'), E (datetime.datetime(2025, 3, 12, 16, 39, 33, 589922), E IPv4Address(b'\xf6\x05 \xa3'), E 'epel-7'), E (datetime.datetime(2025, 3, 12, 16, 39, 33, 589922), E IPv4Address(1682548399), E 'epel-7'), E (datetime.datetime(2025, 3, 12, 16, 39, 33, 589922), E IPv4Address(3232706601), E 'centos8'), E (datetime.datetime(2025, 3, 12, 16, 39, 33, 589922), E IPv4Address(3221225480), E 'epel-7')], E ) /usr/lib/python3.13/site-packages/hypothesis/core.py:845: DeadlineExceeded ---------------------------------- Hypothesis ---------------------------------- WARNING: Hypothesis has spent more than five minutes working to shrink a failing example, and stopped because it is making very slow progress. When you re-run your tests, shrinking will resume and may take this long before aborting again. PLEASE REPORT THIS if you can provide a reproducing example, so that we can improve shrinking performance for everyone. =============================== warnings summary =============================== tests/scripts/test_countme_trim_raw.py:15: 32 warnings /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/tests/scripts/test_countme_trim_raw.py:15: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). NOW_TIMESTAMP = int(dt.datetime.utcnow().timestamp()) tests/scripts/test_countme_trim_raw.py: 100 warnings /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/tests/scripts/test_countme_trim_raw.py:124: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). dt_value = dt.datetime.utcfromtimestamp(value).replace(tzinfo=dt.UTC) tests/scripts/test_countme_trim_raw.py: 100 warnings /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/tests/scripts/test_countme_trim_raw.py:165: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). dt_value = dt.datetime.utcfromtimestamp(timestamp).replace(tzinfo=dt.UTC) tests/test_output_items.py: 22 warnings /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/tests/test_output_items.py:36: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). dt_value = dt.datetime.utcfromtimestamp(timestamp).replace(tzinfo=dt.UTC) tests/test_writers.py::TestSQLiteWriter::test__sqltype /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/tests/test_writers.py:155: DeprecationWarning: Creating NamedTuple classes using keyword arguments is deprecated and will be disallowed in Python 3.15. Use the class-based or functional syntax instead. AllTypesItemTuple = NamedTuple("AllTypesItemTuple", **fieldname_typehints) tests/test_output_items.py: 84 warnings /builddir/build/BUILD/python-mirrors-countme-0.1.4-build/mirrors_countme-0.1.4/tests/test_output_items.py:29: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). dt_value = dt.datetime.utcfromtimestamp(timestamp).replace(tzinfo=dt.UTC) -- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html ---------- coverage: platform linux, python 3.13.2-final-0 ----------- Name Stmts Miss Cover ------------------------------------------------------------------------- mirrors_countme/__init__.py 0 0 100% mirrors_countme/constants.py 8 0 100% mirrors_countme/matchers.py 38 0 100% mirrors_countme/output_items.py 33 0 100% mirrors_countme/parse.py 29 0 100% mirrors_countme/progress.py 116 0 100% mirrors_countme/readers.py 29 0 100% mirrors_countme/regex.py 21 0 100% mirrors_countme/scripts/__init__.py 0 0 100% mirrors_countme/scripts/countme_delete_totals.py 57 0 100% mirrors_countme/scripts/countme_parse_access_log.py 31 0 100% mirrors_countme/scripts/countme_totals.py 18 0 100% mirrors_countme/scripts/countme_trim_raw.py 90 0 100% mirrors_countme/totals.py 159 0 100% mirrors_countme/util.py 47 0 100% mirrors_countme/version.py 3 0 100% mirrors_countme/writers.py 90 0 100% ------------------------------------------------------------------------- TOTAL 769 0 100% Coverage HTML written to dir htmlcov Coverage XML written to file coverage.xml =========================== short test summary info ============================ FAILED tests/test_progress.py::TestDIYProgress::test_display[unscaled] - hypo... FAILED tests/test_output_items.py::TestLogItem::test_timestamp - hypothesis.e... FAILED tests/test_totals.py::TestRawDB::test_complete_weeks - hypothesis.erro... FAILED tests/test_output_items.py::TestLogItem::test_datetime - hypothesis.er... FAILED tests/test_totals.py::TestRawDBU::test_complete_weeks - hypothesis.err... FAILED tests/test_totals.py::TestCSVCountItem::test_from_totalsitem - hypothe... FAILED tests/test_progress.py::TestDIYProgress::test_display[scaled] - hypoth... FAILED tests/test_integrate.py::test_log - hypothesis.errors.DeadlineExceeded... ====== 8 failed, 218 passed, 2 skipped, 339 warnings in 692.46s (0:11:32) ====== RPM build errors: error: Bad exit status from /var/tmp/rpm-tmp.iNcg3c (%check) Bad exit status from /var/tmp/rpm-tmp.iNcg3c (%check) Child return code was: 1 EXCEPTION: [Error('Command failed: \n # /usr/bin/systemd-nspawn -q -M 77b40dd8e3db448ca44c536d2f72c08f -D /var/lib/mock/f42-build-3400784-45124/root -a -u mockbuild --capability=cap_ipc_lock --bind=/tmp/mock-resolv.4clthvi5:/etc/resolv.conf --bind=/dev/btrfs-control --bind=/dev/mapper/control --bind=/dev/fuse --bind=/dev/loop-control --bind=/dev/loop0 --bind=/dev/loop1 --bind=/dev/loop2 --bind=/dev/loop3 --bind=/dev/loop4 --bind=/dev/loop5 --bind=/dev/loop6 --bind=/dev/loop7 --bind=/dev/loop8 --bind=/dev/loop9 --bind=/dev/loop10 --bind=/dev/loop11 --console=pipe --setenv=TERM=vt100 --setenv=SHELL=/bin/bash --setenv=HOME=/builddir --setenv=HOSTNAME=mock --setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin \'--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"\' \'--setenv=PS1= \\s-\\v\\$ \' --setenv=LANG=C.UTF-8 --resolv-conf=off bash --login -c \'/usr/bin/rpmbuild -ba --noprep --noclean --target noarch --nodeps /builddir/build/SPECS/python-mirrors-countme.spec\'\n', 1)] Traceback (most recent call last): File "/usr/lib/python3.13/site-packages/mockbuild/trace_decorator.py", line 93, in trace result = func(*args, **kw) File "/usr/lib/python3.13/site-packages/mockbuild/util.py", line 610, in do_with_status raise exception.Error("Command failed: \n # %s\n%s" % (cmd_pretty(command, env), output), child.returncode) mockbuild.exception.Error: Command failed: # /usr/bin/systemd-nspawn -q -M 77b40dd8e3db448ca44c536d2f72c08f -D /var/lib/mock/f42-build-3400784-45124/root -a -u mockbuild --capability=cap_ipc_lock --bind=/tmp/mock-resolv.4clthvi5:/etc/resolv.conf --bind=/dev/btrfs-control --bind=/dev/mapper/control --bind=/dev/fuse --bind=/dev/loop-control --bind=/dev/loop0 --bind=/dev/loop1 --bind=/dev/loop2 --bind=/dev/loop3 --bind=/dev/loop4 --bind=/dev/loop5 --bind=/dev/loop6 --bind=/dev/loop7 --bind=/dev/loop8 --bind=/dev/loop9 --bind=/dev/loop10 --bind=/dev/loop11 --console=pipe --setenv=TERM=vt100 --setenv=SHELL=/bin/bash --setenv=HOME=/builddir --setenv=HOSTNAME=mock --setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin '--setenv=PROMPT_COMMAND=printf "\033]0;\007"' '--setenv=PS1= \s-\v\$ ' --setenv=LANG=C.UTF-8 --resolv-conf=off bash --login -c '/usr/bin/rpmbuild -ba --noprep --noclean --target noarch --nodeps /builddir/build/SPECS/python-mirrors-countme.spec'