# STDOUT: ---v---v---v---v---v--- ansible-playbook [core 2.16.0] config file = /etc/ansible/ansible.cfg configured module search path = ['/home/jenkins/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /opt/ansible-2.16/lib/python3.11/site-packages/ansible ansible collection location = /WORKDIR/git-weekly-cibzvipf1b/.collection executable location = /opt/ansible-2.16/bin/ansible-playbook python version = 3.11.5 (main, Sep 7 2023, 00:00:00) [GCC 11.4.1 20230605 (Red Hat 11.4.1-2)] (/opt/ansible-2.16/bin/python) jinja version = 3.1.2 libyaml = True Using /etc/ansible/ansible.cfg as config file statically imported: /WORKDIR/git-weekly-cibzvipf1b/tests/get_services_state.yml statically imported: /WORKDIR/git-weekly-cibzvipf1b/tests/restore_services_state.yml Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_bz1855544.yml ************************************************** 2 plays in /WORKDIR/git-weekly-cibzvipf1b/tests/tests_bz1855544.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /WORKDIR/git-weekly-cibzvipf1b/tests/tests_bz1855544.yml:4 Saturday 22 June 2024 13:21:49 +0000 (0:00:00.013) 0:00:00.013 ********* ok: [sut] => { "ansible_facts": { "pcptest_pw": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n65343431623161346664373330646165636437656265656632613961363839303132393064663934\n3137396633373562393466633037356533326566343338350a386238333034336162333932313162\n62643937336534356131376134303463306466316433366636643562633637376336653034646334\n3063663466333735390a333330366461386166633233373133326237323663333831653232646566\n3363\n" } }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cibzvipf1b/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Bug 1855544 - metrics role should automate the setup of Grafana datasources] *** TASK [Gathering Facts] ********************************************************* task path: /WORKDIR/git-weekly-cibzvipf1b/tests/tests_bz1855544.yml:9 Saturday 22 June 2024 13:21:49 +0000 (0:00:00.012) 0:00:00.026 ********* ok: [sut] TASK [Stop test] *************************************************************** task path: /WORKDIR/git-weekly-cibzvipf1b/tests/tests_bz1855544.yml:17 Saturday 22 June 2024 13:21:50 +0000 (0:00:00.876) 0:00:00.903 ********* META: end_host conditional evaluated to False, continuing execution for sut skipping: [sut] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for sut" } MSG: end_host conditional evaluated to false, continuing execution for sut TASK [Get initial state of services] ******************************************* task path: /WORKDIR/git-weekly-cibzvipf1b/tests/get_services_state.yml:3 Saturday 22 June 2024 13:21:50 +0000 (0:00:00.012) 0:00:00.916 ********* ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles.service": { "name": "systemd-tmpfiles.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [Run the role] ************************************************************ task path: /WORKDIR/git-weekly-cibzvipf1b/tests/tests_bz1855544.yml:27 Saturday 22 June 2024 13:21:51 +0000 (0:00:01.539) 0:00:02.455 ********* TASK [fedora.linux_system_roles.metrics : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:3 Saturday 22 June 2024 13:21:51 +0000 (0:00:00.023) 0:00:02.478 ********* skipping: [sut] => { "changed": false, "false_condition": "__metrics_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add Elasticsearch to metrics domain list] *** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:8 Saturday 22 June 2024 13:21:51 +0000 (0:00:00.014) 0:00:02.492 ********* skipping: [sut] => { "changed": false, "false_condition": "metrics_from_elasticsearch | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add SQL Server to metrics domain list] *** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:13 Saturday 22 June 2024 13:21:51 +0000 (0:00:00.011) 0:00:02.504 ********* skipping: [sut] => { "changed": false, "false_condition": "metrics_from_mssql | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add Postfix to metrics domain list] *** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:18 Saturday 22 June 2024 13:21:51 +0000 (0:00:00.011) 0:00:02.516 ********* skipping: [sut] => { "changed": false, "false_condition": "metrics_from_postfix | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add bpftrace to metrics domain list] *** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:23 Saturday 22 June 2024 13:21:51 +0000 (0:00:00.011) 0:00:02.527 ********* ok: [sut] => { "ansible_facts": { "__metrics_domains": [ "bpftrace" ] }, "changed": false } TASK [fedora.linux_system_roles.metrics : Setup metrics access for roles] ****** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:28 Saturday 22 June 2024 13:21:51 +0000 (0:00:00.013) 0:00:02.541 ********* ok: [sut] => { "ansible_facts": { "__metrics_accounts": [ { "saslpassword": "metrics", "sasluser": "metrics", "user": "metrics" } ] }, "changed": false } TASK [Configure Elasticsearch metrics] ***************************************** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:35 Saturday 22 June 2024 13:21:51 +0000 (0:00:00.015) 0:00:02.557 ********* skipping: [sut] => { "changed": false, "false_condition": "metrics_from_elasticsearch | d(false) | bool or metrics_into_elasticsearch | d(false) | bool\n", "skip_reason": "Conditional result was False" } TASK [Configure SQL Server metrics.] ******************************************* task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:50 Saturday 22 June 2024 13:21:51 +0000 (0:00:00.017) 0:00:02.574 ********* skipping: [sut] => { "changed": false, "false_condition": "metrics_from_mssql | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Configure Postfix metrics.] ********************************************** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:58 Saturday 22 June 2024 13:21:51 +0000 (0:00:00.012) 0:00:02.587 ********* skipping: [sut] => { "changed": false, "false_condition": "metrics_from_postfix | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Setup bpftrace metrics.] ************************************************* task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:66 Saturday 22 June 2024 13:21:51 +0000 (0:00:00.014) 0:00:02.602 ********* TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:4 Saturday 22 June 2024 13:21:51 +0000 (0:00:00.027) 0:00:02.629 ********* ok: [sut] => (item=/WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/default.yml) => { "ansible_facts": { "bpftrace_metrics_provider": "pcp" }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/default.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/default.yml" } ok: [sut] => (item=/WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/RedHat.yml) => { "ansible_facts": {}, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/RedHat.yml" } skipping: [sut] => (item=/WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=/WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml) => { "ansible_facts": { "__bpftrace_packages": [ "bpftrace" ], "__bpftrace_packages_pcp": [ "pcp-pmda-bpftrace" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml" } ok: [sut] => (item=/WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml) => { "ansible_facts": { "__bpftrace_packages": [ "bpftrace" ], "__bpftrace_packages_pcp": [ "pcp-pmda-bpftrace" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml" } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Check if system is ostree] *** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:18 Saturday 22 June 2024 13:21:51 +0000 (0:00:00.035) 0:00:02.665 ********* ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:23 Saturday 22 June 2024 13:21:52 +0000 (0:00:00.252) 0:00:02.918 ********* ok: [sut] => { "ansible_facts": { "__ansible_pcp_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Establish bpftrace package names] *** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:27 Saturday 22 June 2024 13:21:52 +0000 (0:00:00.018) 0:00:02.936 ********* ok: [sut] => { "ansible_facts": { "__bpftrace_packages_extra": [ "bpftrace" ] }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Establish bpftrace metrics package names] *** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:34 Saturday 22 June 2024 13:21:52 +0000 (0:00:00.017) 0:00:02.954 ********* ok: [sut] => { "ansible_facts": { "__bpftrace_packages_extra": [ "pcp-pmda-bpftrace", "bpftrace" ] }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Install needed bpftrace metrics packages] *** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:41 Saturday 22 June 2024 13:21:52 +0000 (0:00:00.016) 0:00:02.971 ********* changed: [sut] => { "changed": true, "rc": 0, "results": [ "Installed: pcp-6.2.2-3.el9.x86_64", "Installed: libatomic-11.4.1-3.el9.x86_64", "Installed: python3-bcc-0.30.0-5.el9.noarch", "Installed: pcp-conf-6.2.2-3.el9.x86_64", "Installed: environment-modules-5.3.0-1.el9.x86_64", "Installed: compiler-rt-17.0.6-1.el9.x86_64", "Installed: llvm-libs-17.0.6-5.el9.x86_64", "Installed: python3-netaddr-0.8.0-5.el9.noarch", "Installed: gcc-toolset-13-libstdc++-devel-13.3.1-2.el9.x86_64", "Installed: policycoreutils-python-utils-3.6-2.1.el9.noarch", "Installed: gcc-toolset-13-binutils-2.40-21.el9.x86_64", "Installed: pcp-pmda-bpftrace-6.2.2-3.el9.x86_64", "Installed: python3-pcp-6.2.2-3.el9.x86_64", "Installed: tcl-1:8.6.10-7.el9.x86_64", "Installed: gcc-toolset-13-binutils-gold-2.40-21.el9.x86_64", "Installed: bpftrace-0.20.4-3.el9.x86_64", "Installed: gcc-toolset-13-gcc-13.3.1-2.el9.x86_64", "Installed: gcc-toolset-13-runtime-13.0-2.el9.x86_64", "Installed: gcc-toolset-13-gcc-c++-13.3.1-2.el9.x86_64", "Installed: libuv-1:1.42.0-1.el9.x86_64", "Installed: bcc-0.30.0-5.el9.x86_64", "Installed: bcc-tools-0.30.0-5.el9.x86_64", "Installed: python3-pyelftools-0.27-4.el9.noarch", "Installed: scl-utils-1:2.0.3-4.el9.x86_64", "Installed: clang-libs-17.0.6-5.el9.x86_64", "Installed: pcp-libs-6.2.2-3.el9.x86_64", "Installed: libomp-17.0.6-1.el9.x86_64", "Installed: clang-resource-filesystem-17.0.6-5.el9.noarch", "Installed: pcp-selinux-6.2.2-3.el9.x86_64", "Installed: libomp-devel-17.0.6-1.el9.x86_64" ] } lsrpackages: bpftrace pcp-pmda-bpftrace TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Extract allowed bpftrace user accounts] *** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:48 Saturday 22 June 2024 13:22:37 +0000 (0:00:45.747) 0:00:48.718 ********* ok: [sut] => (item={'user': 'metrics', 'sasluser': 'metrics', 'saslpassword': 'metrics'}) => { "ansible_facts": { "__bpftrace_usernames": "root,metrics" }, "ansible_loop_var": "item", "changed": false, "item": { "saslpassword": "metrics", "sasluser": "metrics", "user": "metrics" } } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Ensure PCP bpftrace configuration directory exists] *** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:54 Saturday 22 June 2024 13:22:37 +0000 (0:00:00.020) 0:00:48.738 ********* ok: [sut] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/pcp/bpftrace", "secontext": "system_u:object_r:etc_t:s0", "size": 44, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Ensure PCP bpftrace agent is configured] *** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:61 Saturday 22 June 2024 13:22:38 +0000 (0:00:00.346) 0:00:49.085 ********* changed: [sut] => { "changed": true, "checksum": "2559785fc812966eeb50dc5f6f22139195666360", "dest": "/etc/pcp/bpftrace/bpftrace.conf", "gid": 0, "group": "root", "md5sum": "909e6f7319f4f70cee4e4a7569e3c635", "mode": "0600", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 1048, "src": "/root/.ansible/tmp/ansible-tmp-1719062558.350349-2590-246733738969920/source", "state": "file", "uid": 0 } TASK [Setup metric querying service.] ****************************************** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:75 Saturday 22 June 2024 13:22:39 +0000 (0:00:00.910) 0:00:49.996 ********* TASK [fedora.linux_system_roles.private_metrics_subrole_redis : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_redis/tasks/main.yml:4 Saturday 22 June 2024 13:22:39 +0000 (0:00:00.031) 0:00:50.028 ********* The conditional check 'item is file' failed. The error was: error while evaluating conditional (item is file): Unable to look up a name or access an attribute in template string ({% if item is file %} True {% else %} False {% endif %}). Make sure your variable name does not contain invalid characters like '-': stat: path should be string, bytes, os.PathLike or integer, not AnsibleUndefined. stat: path should be string, bytes, os.PathLike or integer, not AnsibleUndefined. Unable to look up a name or access an attribute in template string ({% if item is file %} True {% else %} False {% endif %}). Make sure your variable name does not contain invalid characters like '-': stat: path should be string, bytes, os.PathLike or integer, not AnsibleUndefined. stat: path should be string, bytes, os.PathLike or integer, not AnsibleUndefined The error appears to be in '/WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_redis/tasks/main.yml': line 4, column 3, but may be elsewhere in the file depending on the exact syntax problem. The offending line appears to be: # yamllint disable rule:line-length - name: Set platform/version specific variables ^ here fatal: [sut]: FAILED! => {} MSG: 'dict object' has no attribute 'ansible_architecture'. 'dict object' has no attribute 'ansible_architecture' TASK [Handle test failure] ***************************************************** task path: /WORKDIR/git-weekly-cibzvipf1b/tests/tests_bz1855544.yml:47 Saturday 22 June 2024 13:22:39 +0000 (0:00:00.017) 0:00:50.045 ********* included: /WORKDIR/git-weekly-cibzvipf1b/tests/handle_test_failure.yml for sut TASK [Collect logs] ************************************************************ task path: /WORKDIR/git-weekly-cibzvipf1b/tests/handle_test_failure.yml:2 Saturday 22 June 2024 13:22:39 +0000 (0:00:00.015) 0:00:50.061 ********* ok: [sut] => { "changed": false, "cmd": "journalctl -ex\necho '##################'\necho List of SELinux AVCs - note list may be empty\ngrep type=AVC /var/log/audit/audit.log\necho '##################'\nls -alrtF /run\nif [ -d /run/pcp ]; then\n ls -alrtF /run/pcp\nelse\n echo ERROR - /run/pcp does not exist\nfi\n", "delta": "0:00:00.045234", "end": "2024-06-22 13:22:39.614609", "rc": 0, "start": "2024-06-22 13:22:39.569375" } STDOUT: Jun 22 13:14:00 localhost systemd[1]: run-credentials-systemd\x2dsysusers.service.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-credentials-systemd\x2dsysusers.service.mount has successfully entered the 'dead' state. Jun 22 13:14:00 localhost systemd[1]: initrd-cleanup.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit initrd-cleanup.service has successfully entered the 'dead' state. Jun 22 13:14:00 localhost systemd[1]: Finished Cleaning Up and Shutting Down Daemons. ░░ Subject: A start job for unit initrd-cleanup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-cleanup.service has finished successfully. ░░ ░░ The job identifier is 56. Jun 22 13:14:00 localhost systemd[1]: initrd-udevadm-cleanup-db.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit initrd-udevadm-cleanup-db.service has successfully entered the 'dead' state. Jun 22 13:14:00 localhost systemd[1]: Finished Cleanup udev Database. ░░ Subject: A start job for unit initrd-udevadm-cleanup-db.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-udevadm-cleanup-db.service has finished successfully. ░░ ░░ The job identifier is 68. Jun 22 13:14:00 localhost systemd[1]: Reached target Switch Root. ░░ Subject: A start job for unit initrd-switch-root.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-switch-root.target has finished successfully. ░░ ░░ The job identifier is 59. Jun 22 13:14:00 localhost systemd[1]: Starting Switch Root... ░░ Subject: A start job for unit initrd-switch-root.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-switch-root.service has begun execution. ░░ ░░ The job identifier is 65. Jun 22 13:14:01 localhost kernel: memfd_create() without MFD_EXEC nor MFD_NOEXEC_SEAL, pid=1 'systemd' Jun 22 13:14:01 localhost systemd[1]: Switching root. Jun 22 13:14:01 localhost systemd-journald[229]: Journal stopped ░░ Subject: The journal has been stopped ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The system journal process has shut down and closed all currently ░░ active journal files. Jun 22 13:14:08 localhost systemd-journald[229]: Received SIGTERM from PID 1 (systemd). Jun 22 13:14:08 localhost kernel: audit: type=1404 audit(1719062044.021:2): enforcing=1 old_enforcing=0 auid=4294967295 ses=4294967295 enabled=1 old-enabled=1 lsm=selinux res=1 Jun 22 13:14:08 localhost kernel: SELinux: policy capability network_peer_controls=1 Jun 22 13:14:08 localhost kernel: SELinux: policy capability open_perms=1 Jun 22 13:14:08 localhost kernel: SELinux: policy capability extended_socket_class=1 Jun 22 13:14:08 localhost kernel: SELinux: policy capability always_check_network=0 Jun 22 13:14:08 localhost kernel: SELinux: policy capability cgroup_seclabel=1 Jun 22 13:14:08 localhost kernel: SELinux: policy capability nnp_nosuid_transition=1 Jun 22 13:14:08 localhost kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jun 22 13:14:08 localhost kernel: audit: type=1403 audit(1719062044.582:3): auid=4294967295 ses=4294967295 lsm=selinux res=1 Jun 22 13:14:08 localhost systemd[1]: Successfully loaded SELinux policy in 564.812ms. Jun 22 13:14:08 localhost systemd[1]: Relabelled /dev, /dev/shm, /run, /sys/fs/cgroup in 23.028ms. Jun 22 13:14:08 localhost systemd[1]: systemd 252-35.el9 running in system mode (+PAM +AUDIT +SELINUX -APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS -FIDO2 +IDN2 -IDN -IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -BPF_FRAMEWORK +XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified) Jun 22 13:14:08 localhost systemd[1]: Detected virtualization xen. Jun 22 13:14:08 localhost systemd[1]: Detected architecture x86-64. Jun 22 13:14:08 localhost systemd[1]: Initializing machine ID from random generator. Jun 22 13:14:08 localhost systemd[1]: Installed transient /etc/machine-id file. Jun 22 13:14:08 localhost systemd-rc-local-generator[443]: /etc/rc.d/rc.local is not marked executable, skipping. Jun 22 13:14:08 localhost systemd[1]: initrd-switch-root.service: Deactivated successfully. Jun 22 13:14:08 localhost systemd[1]: Stopped Switch Root. Jun 22 13:14:08 localhost systemd[1]: systemd-journald.service: Scheduled restart job, restart counter is at 1. Jun 22 13:14:08 localhost systemd[1]: Created slice Slice /system/getty. Jun 22 13:14:08 localhost systemd[1]: Created slice Slice /system/modprobe. Jun 22 13:14:08 localhost systemd[1]: Created slice Slice /system/serial-getty. Jun 22 13:14:08 localhost systemd[1]: Created slice Slice /system/sshd-keygen. Jun 22 13:14:08 localhost systemd[1]: Created slice User and Session Slice. Jun 22 13:14:08 localhost systemd[1]: Started Dispatch Password Requests to Console Directory Watch. Jun 22 13:14:08 localhost systemd[1]: Started Forward Password Requests to Wall Directory Watch. Jun 22 13:14:08 localhost systemd[1]: Set up automount Arbitrary Executable File Formats File System Automount Point. Jun 22 13:14:08 localhost systemd[1]: Reached target Local Encrypted Volumes. Jun 22 13:14:08 localhost systemd[1]: Stopped target Switch Root. Jun 22 13:14:08 localhost systemd[1]: Stopped target Initrd File Systems. Jun 22 13:14:08 localhost systemd[1]: Stopped target Initrd Root File System. Jun 22 13:14:08 localhost systemd[1]: Reached target Local Integrity Protected Volumes. Jun 22 13:14:08 localhost systemd[1]: Reached target Path Units. Jun 22 13:14:08 localhost systemd[1]: Reached target Slice Units. Jun 22 13:14:08 localhost systemd[1]: Reached target Swaps. Jun 22 13:14:08 localhost systemd[1]: Reached target Local Verity Protected Volumes. Jun 22 13:14:08 localhost systemd[1]: Listening on RPCbind Server Activation Socket. Jun 22 13:14:08 localhost systemd[1]: Reached target RPC Port Mapper. Jun 22 13:14:08 localhost systemd[1]: Listening on Process Core Dump Socket. Jun 22 13:14:08 localhost systemd[1]: Listening on initctl Compatibility Named Pipe. Jun 22 13:14:08 localhost systemd[1]: Listening on udev Control Socket. Jun 22 13:14:08 localhost systemd[1]: Listening on udev Kernel Socket. Jun 22 13:14:08 localhost systemd[1]: Mounting Huge Pages File System... Jun 22 13:14:08 localhost systemd[1]: Mounting POSIX Message Queue File System... Jun 22 13:14:08 localhost systemd[1]: Mounting Kernel Debug File System... Jun 22 13:14:08 localhost systemd[1]: Mounting Kernel Trace File System... Jun 22 13:14:08 localhost systemd[1]: Kernel Module supporting RPCSEC_GSS was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab). Jun 22 13:14:08 localhost systemd[1]: Starting Create List of Static Device Nodes... Jun 22 13:14:08 localhost systemd[1]: Starting Load Kernel Module configfs... Jun 22 13:14:08 localhost systemd[1]: Starting Load Kernel Module drm... Jun 22 13:14:08 localhost systemd[1]: Starting Load Kernel Module fuse... Jun 22 13:14:08 localhost systemd[1]: Starting Read and set NIS domainname from /etc/sysconfig/network... Jun 22 13:14:08 localhost systemd[1]: systemd-fsck-root.service: Deactivated successfully. Jun 22 13:14:08 localhost systemd[1]: Stopped File System Check on Root Device. Jun 22 13:14:08 localhost systemd[1]: Stopped Journal Service. Jun 22 13:14:08 localhost systemd[1]: Starting Journal Service... Jun 22 13:14:08 localhost systemd[1]: Load Kernel Modules was skipped because no trigger condition checks were met. Jun 22 13:14:08 localhost systemd[1]: Starting Generate network units from Kernel command line... Jun 22 13:14:08 localhost systemd[1]: TPM2 PCR Machine ID Measurement was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). Jun 22 13:14:08 localhost systemd[1]: Starting Remount Root and Kernel File Systems... Jun 22 13:14:08 localhost systemd[1]: Repartition Root Disk was skipped because no trigger condition checks were met. Jun 22 13:14:08 localhost systemd[1]: Starting Apply Kernel Variables... Jun 22 13:14:08 localhost systemd[1]: Starting Coldplug All udev Devices... Jun 22 13:14:08 localhost systemd[1]: Mounted Huge Pages File System. Jun 22 13:14:08 localhost systemd[1]: Mounted POSIX Message Queue File System. Jun 22 13:14:08 localhost systemd[1]: Mounted Kernel Debug File System. Jun 22 13:14:08 localhost systemd[1]: Mounted Kernel Trace File System. Jun 22 13:14:08 localhost systemd[1]: Finished Create List of Static Device Nodes. Jun 22 13:14:08 localhost systemd[1]: Finished Read and set NIS domainname from /etc/sysconfig/network. Jun 22 13:14:08 localhost systemd[1]: Finished Generate network units from Kernel command line. Jun 22 13:14:08 localhost systemd-journald[477]: Journal started ░░ Subject: The journal has been started ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The system journal process has started up, opened the journal ░░ files for writing and is now ready to process requests. Jun 22 13:14:08 localhost systemd-journald[477]: Runtime Journal (/run/log/journal/d63c337a94f048b8ac03035012b6922a) is 8.0M, max 70.6M, 62.6M free. ░░ Subject: Disk space used by the journal ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Runtime Journal (/run/log/journal/d63c337a94f048b8ac03035012b6922a) is currently using 8.0M. ░░ Maximum allowed usage is set to 70.6M. ░░ Leaving at least 35.3M free (of currently available 690.5M of disk space). ░░ Enforced usage limit is thus 70.6M, of which 62.6M are still available. ░░ ░░ The limits controlling how much disk space is used by the journal may ░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=, ░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in ░░ /etc/systemd/journald.conf. See journald.conf(5) for details. Jun 22 13:14:08 localhost systemd[1]: Queued start job for default target Multi-User System. Jun 22 13:14:08 localhost systemd[1]: systemd-journald.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-journald.service has successfully entered the 'dead' state. Jun 22 13:14:08 localhost systemd[1]: Started Journal Service. Jun 22 13:14:08 localhost systemd[1]: Finished Remount Root and Kernel File Systems. ░░ Subject: A start job for unit systemd-remount-fs.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-remount-fs.service has finished successfully. ░░ ░░ The job identifier is 122. Jun 22 13:14:08 localhost systemd[1]: First Boot Wizard was skipped because of an unmet condition check (ConditionFirstBoot=yes). ░░ Subject: A start job for unit systemd-firstboot.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-firstboot.service has finished successfully. ░░ ░░ The job identifier is 165. Jun 22 13:14:09 localhost systemd[1]: Rebuild Hardware Database was skipped because of an unmet condition check (ConditionNeedsUpdate=/etc). ░░ Subject: A start job for unit systemd-hwdb-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hwdb-update.service has finished successfully. ░░ ░░ The job identifier is 141. Jun 22 13:14:09 localhost systemd[1]: Starting Flush Journal to Persistent Storage... ░░ Subject: A start job for unit systemd-journal-flush.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-flush.service has begun execution. ░░ ░░ The job identifier is 132. Jun 22 13:14:09 localhost systemd[1]: Starting Load/Save OS Random Seed... ░░ Subject: A start job for unit systemd-random-seed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-random-seed.service has begun execution. ░░ ░░ The job identifier is 150. Jun 22 13:14:09 localhost systemd[1]: Create System Users was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-sysusers.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-sysusers.service has finished successfully. ░░ ░░ The job identifier is 149. Jun 22 13:14:09 localhost systemd[1]: Starting Create Static Device Nodes in /dev... ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev.service has begun execution. ░░ ░░ The job identifier is 164. Jun 22 13:14:09 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@configfs.service has successfully entered the 'dead' state. Jun 22 13:14:09 localhost systemd[1]: Finished Load Kernel Module configfs. ░░ Subject: A start job for unit modprobe@configfs.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has finished successfully. ░░ ░░ The job identifier is 129. Jun 22 13:14:09 localhost systemd[1]: Finished Apply Kernel Variables. ░░ Subject: A start job for unit systemd-sysctl.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-sysctl.service has finished successfully. ░░ ░░ The job identifier is 163. Jun 22 13:14:09 localhost systemd[1]: Finished Coldplug All udev Devices. ░░ Subject: A start job for unit systemd-udev-trigger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udev-trigger.service has finished successfully. ░░ ░░ The job identifier is 175. Jun 22 13:14:09 localhost systemd[1]: Finished Load/Save OS Random Seed. ░░ Subject: A start job for unit systemd-random-seed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-random-seed.service has finished successfully. ░░ ░░ The job identifier is 150. Jun 22 13:14:09 localhost systemd[1]: First Boot Complete was skipped because of an unmet condition check (ConditionFirstBoot=yes). ░░ Subject: A start job for unit first-boot-complete.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit first-boot-complete.target has finished successfully. ░░ ░░ The job identifier is 151. Jun 22 13:14:09 localhost systemd[1]: Mounting Kernel Configuration File System... ░░ Subject: A start job for unit sys-kernel-config.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-kernel-config.mount has begun execution. ░░ ░░ The job identifier is 128. Jun 22 13:14:09 localhost systemd-journald[477]: Runtime Journal (/run/log/journal/d63c337a94f048b8ac03035012b6922a) is 8.0M, max 70.6M, 62.6M free. ░░ Subject: Disk space used by the journal ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Runtime Journal (/run/log/journal/d63c337a94f048b8ac03035012b6922a) is currently using 8.0M. ░░ Maximum allowed usage is set to 70.6M. ░░ Leaving at least 35.3M free (of currently available 690.4M of disk space). ░░ Enforced usage limit is thus 70.6M, of which 62.6M are still available. ░░ ░░ The limits controlling how much disk space is used by the journal may ░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=, ░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in ░░ /etc/systemd/journald.conf. See journald.conf(5) for details. Jun 22 13:14:09 localhost systemd-journald[477]: Received client request to flush runtime journal. Jun 22 13:14:09 localhost systemd[1]: Finished Flush Journal to Persistent Storage. ░░ Subject: A start job for unit systemd-journal-flush.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-flush.service has finished successfully. ░░ ░░ The job identifier is 132. Jun 22 13:14:09 localhost systemd[1]: Mounted Kernel Configuration File System. ░░ Subject: A start job for unit sys-kernel-config.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-kernel-config.mount has finished successfully. ░░ ░░ The job identifier is 128. Jun 22 13:14:09 localhost kernel: ACPI: bus type drm_connector registered Jun 22 13:14:09 localhost systemd[1]: modprobe@drm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@drm.service has successfully entered the 'dead' state. Jun 22 13:14:09 localhost systemd[1]: Finished Load Kernel Module drm. ░░ Subject: A start job for unit modprobe@drm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has finished successfully. ░░ ░░ The job identifier is 222. Jun 22 13:14:09 localhost kernel: fuse: init (API version 7.36) Jun 22 13:14:09 localhost systemd[1]: modprobe@fuse.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@fuse.service has successfully entered the 'dead' state. Jun 22 13:14:09 localhost systemd[1]: Finished Load Kernel Module fuse. ░░ Subject: A start job for unit modprobe@fuse.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@fuse.service has finished successfully. ░░ ░░ The job identifier is 160. Jun 22 13:14:09 localhost systemd[1]: Mounting FUSE Control File System... ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has begun execution. ░░ ░░ The job identifier is 159. Jun 22 13:14:09 localhost systemd[1]: Mounted FUSE Control File System. ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has finished successfully. ░░ ░░ The job identifier is 159. Jun 22 13:14:09 localhost systemd[1]: Finished Create Static Device Nodes in /dev. ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully. ░░ ░░ The job identifier is 164. Jun 22 13:14:09 localhost systemd[1]: Reached target Preparation for Local File Systems. ░░ Subject: A start job for unit local-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 124. Jun 22 13:14:09 localhost systemd[1]: Reached target Local File Systems. ░░ Subject: A start job for unit local-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs.target has finished successfully. ░░ ░░ The job identifier is 121. Jun 22 13:14:09 localhost systemd[1]: Rebuild Dynamic Linker Cache was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit ldconfig.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit ldconfig.service has finished successfully. ░░ ░░ The job identifier is 126. Jun 22 13:14:09 localhost systemd[1]: Mark the need to relabel after reboot was skipped because of an unmet condition check (ConditionSecurity=!selinux). ░░ Subject: A start job for unit selinux-autorelabel-mark.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit selinux-autorelabel-mark.service has finished successfully. ░░ ░░ The job identifier is 120. Jun 22 13:14:09 localhost systemd[1]: Set Up Additional Binary Formats was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-binfmt.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-binfmt.service has finished successfully. ░░ ░░ The job identifier is 161. Jun 22 13:14:09 localhost systemd[1]: Update Boot Loader Random Seed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-boot-random-seed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-random-seed.service has finished successfully. ░░ ░░ The job identifier is 137. Jun 22 13:14:09 localhost systemd[1]: Starting Automatic Boot Loader Update... ░░ Subject: A start job for unit systemd-boot-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-update.service has begun execution. ░░ ░░ The job identifier is 176. Jun 22 13:14:09 localhost systemd[1]: Starting Commit a transient machine-id on disk... ░░ Subject: A start job for unit systemd-machine-id-commit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has begun execution. ░░ ░░ The job identifier is 139. Jun 22 13:14:09 localhost systemd[1]: Starting Create Volatile Files and Directories... ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has begun execution. ░░ ░░ The job identifier is 171. Jun 22 13:14:09 localhost systemd[1]: Starting Rule-based Manager for Device Events and Files... ░░ Subject: A start job for unit systemd-udevd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has begun execution. ░░ ░░ The job identifier is 166. Jun 22 13:14:09 localhost systemd-udevd[493]: Using default interface naming scheme 'rhel-9.0'. Jun 22 13:14:09 localhost systemd[1]: Finished Commit a transient machine-id on disk. ░░ Subject: A start job for unit systemd-machine-id-commit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has finished successfully. ░░ ░░ The job identifier is 139. Jun 22 13:14:09 localhost bootctl[490]: Couldn't find EFI system partition, skipping. Jun 22 13:14:09 localhost systemd[1]: Finished Automatic Boot Loader Update. ░░ Subject: A start job for unit systemd-boot-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-update.service has finished successfully. ░░ ░░ The job identifier is 176. Jun 22 13:14:09 localhost systemd[1]: etc-machine\x2did.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit etc-machine\x2did.mount has successfully entered the 'dead' state. Jun 22 13:14:09 localhost systemd[1]: Finished Create Volatile Files and Directories. ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has finished successfully. ░░ ░░ The job identifier is 171. Jun 22 13:14:09 localhost systemd[1]: Mounting RPC Pipe File System... ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution. ░░ ░░ The job identifier is 235. Jun 22 13:14:09 localhost systemd[1]: Starting Security Auditing Service... ░░ Subject: A start job for unit auditd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has begun execution. ░░ ░░ The job identifier is 207. Jun 22 13:14:09 localhost systemd[1]: Starting RPC Bind... ░░ Subject: A start job for unit rpcbind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpcbind.service has begun execution. ░░ ░░ The job identifier is 203. Jun 22 13:14:09 localhost systemd[1]: Rebuild Journal Catalog was skipped because of an unmet condition check (ConditionNeedsUpdate=/var). ░░ Subject: A start job for unit systemd-journal-catalog-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-catalog-update.service has finished successfully. ░░ ░░ The job identifier is 156. Jun 22 13:14:09 localhost systemd[1]: Update is Completed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-update-done.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-done.service has finished successfully. ░░ ░░ The job identifier is 177. Jun 22 13:14:10 localhost kernel: RPC: Registered named UNIX socket transport module. Jun 22 13:14:10 localhost kernel: RPC: Registered udp transport module. Jun 22 13:14:10 localhost kernel: RPC: Registered tcp transport module. Jun 22 13:14:10 localhost kernel: RPC: Registered tcp-with-tls transport module. Jun 22 13:14:10 localhost kernel: RPC: Registered tcp NFSv4.1 backchannel transport module. Jun 22 13:14:10 localhost systemd[1]: Mounted RPC Pipe File System. ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully. ░░ ░░ The job identifier is 235. Jun 22 13:14:10 localhost systemd[1]: Reached target rpc_pipefs.target. ░░ Subject: A start job for unit rpc_pipefs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc_pipefs.target has finished successfully. ░░ ░░ The job identifier is 234. Jun 22 13:14:10 localhost systemd[1]: Started Rule-based Manager for Device Events and Files. ░░ Subject: A start job for unit systemd-udevd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has finished successfully. ░░ ░░ The job identifier is 166. Jun 22 13:14:10 localhost systemd[1]: Starting Load Kernel Module configfs... ░░ Subject: A start job for unit modprobe@configfs.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has begun execution. ░░ ░░ The job identifier is 260. Jun 22 13:14:10 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@configfs.service has successfully entered the 'dead' state. Jun 22 13:14:10 localhost systemd[1]: Finished Load Kernel Module configfs. ░░ Subject: A start job for unit modprobe@configfs.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has finished successfully. ░░ ░░ The job identifier is 260. Jun 22 13:14:10 localhost systemd[1]: Condition check resulted in /dev/ttyS0 being skipped. ░░ Subject: A start job for unit dev-ttyS0.device has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dev-ttyS0.device has finished successfully. ░░ ░░ The job identifier is 243. Jun 22 13:14:10 localhost auditd[527]: No plugins found, not dispatching events Jun 22 13:14:10 localhost auditd[527]: Init complete, auditd 3.1.2 listening for events (startup state enable) Jun 22 13:14:10 localhost systemd[1]: Started RPC Bind. ░░ Subject: A start job for unit rpcbind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpcbind.service has finished successfully. ░░ ░░ The job identifier is 203. Jun 22 13:14:10 localhost kernel: input: PC Speaker as /devices/platform/pcspkr/input/input5 Jun 22 13:14:10 localhost systemd-udevd[512]: Network interface NamePolicy= disabled on kernel command line. Jun 22 13:14:10 localhost kernel: piix4_smbus 0000:00:01.3: SMBus base address uninitialized - upgrade BIOS or use force_addr=0xaddr Jun 22 13:14:10 localhost kernel: RAPL PMU: API unit is 2^-32 Joules, 0 fixed counters, 655360 ms ovfl timer Jun 22 13:14:10 localhost kernel: cirrus 0000:00:02.0: vgaarb: deactivate vga console Jun 22 13:14:10 localhost kernel: Console: switching to colour dummy device 80x25 Jun 22 13:14:10 localhost kernel: [drm] Initialized cirrus 2.0.0 2019 for 0000:00:02.0 on minor 0 Jun 22 13:14:10 localhost kernel: fbcon: cirrusdrmfb (fb0) is primary device Jun 22 13:14:10 localhost kernel: Console: switching to colour frame buffer device 128x48 Jun 22 13:14:10 localhost kernel: cirrus 0000:00:02.0: [drm] fb0: cirrusdrmfb frame buffer device Jun 22 13:14:10 localhost augenrules[532]: /sbin/augenrules: No change Jun 22 13:14:10 localhost augenrules[557]: No rules Jun 22 13:14:10 localhost augenrules[557]: enabled 1 Jun 22 13:14:10 localhost augenrules[557]: failure 1 Jun 22 13:14:10 localhost augenrules[557]: pid 527 Jun 22 13:14:10 localhost augenrules[557]: rate_limit 0 Jun 22 13:14:10 localhost augenrules[557]: backlog_limit 8192 Jun 22 13:14:10 localhost augenrules[557]: lost 0 Jun 22 13:14:10 localhost augenrules[557]: backlog 0 Jun 22 13:14:10 localhost augenrules[557]: backlog_wait_time 60000 Jun 22 13:14:10 localhost augenrules[557]: backlog_wait_time_actual 0 Jun 22 13:14:10 localhost augenrules[557]: enabled 1 Jun 22 13:14:10 localhost augenrules[557]: failure 1 Jun 22 13:14:10 localhost augenrules[557]: pid 527 Jun 22 13:14:10 localhost augenrules[557]: rate_limit 0 Jun 22 13:14:10 localhost augenrules[557]: backlog_limit 8192 Jun 22 13:14:10 localhost augenrules[557]: lost 0 Jun 22 13:14:10 localhost augenrules[557]: backlog 4 Jun 22 13:14:10 localhost augenrules[557]: backlog_wait_time 60000 Jun 22 13:14:10 localhost augenrules[557]: backlog_wait_time_actual 0 Jun 22 13:14:10 localhost augenrules[557]: enabled 1 Jun 22 13:14:10 localhost augenrules[557]: failure 1 Jun 22 13:14:10 localhost augenrules[557]: pid 527 Jun 22 13:14:10 localhost augenrules[557]: rate_limit 0 Jun 22 13:14:10 localhost augenrules[557]: backlog_limit 8192 Jun 22 13:14:10 localhost augenrules[557]: lost 0 Jun 22 13:14:10 localhost augenrules[557]: backlog 4 Jun 22 13:14:10 localhost augenrules[557]: backlog_wait_time 60000 Jun 22 13:14:10 localhost augenrules[557]: backlog_wait_time_actual 0 Jun 22 13:14:10 localhost systemd[1]: Started Security Auditing Service. ░░ Subject: A start job for unit auditd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has finished successfully. ░░ ░░ The job identifier is 207. Jun 22 13:14:10 localhost systemd[1]: Starting Record System Boot/Shutdown in UTMP... ░░ Subject: A start job for unit systemd-update-utmp.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has begun execution. ░░ ░░ The job identifier is 202. Jun 22 13:14:10 localhost systemd[1]: Finished Record System Boot/Shutdown in UTMP. ░░ Subject: A start job for unit systemd-update-utmp.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has finished successfully. ░░ ░░ The job identifier is 202. Jun 22 13:14:10 localhost systemd[1]: Reached target System Initialization. ░░ Subject: A start job for unit sysinit.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysinit.target has finished successfully. ░░ ░░ The job identifier is 119. Jun 22 13:14:10 localhost systemd[1]: Started dnf makecache --timer. ░░ Subject: A start job for unit dnf-makecache.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dnf-makecache.timer has finished successfully. ░░ ░░ The job identifier is 184. Jun 22 13:14:10 localhost systemd[1]: Started Daily rotation of log files. ░░ Subject: A start job for unit logrotate.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.timer has finished successfully. ░░ ░░ The job identifier is 182. Jun 22 13:14:10 localhost systemd[1]: Started Daily Cleanup of Temporary Directories. ░░ Subject: A start job for unit systemd-tmpfiles-clean.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-clean.timer has finished successfully. ░░ ░░ The job identifier is 183. Jun 22 13:14:10 localhost systemd[1]: Reached target Timer Units. ░░ Subject: A start job for unit timers.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit timers.target has finished successfully. ░░ ░░ The job identifier is 181. Jun 22 13:14:10 localhost systemd[1]: Listening on D-Bus System Message Bus Socket. ░░ Subject: A start job for unit dbus.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus.socket has finished successfully. ░░ ░░ The job identifier is 188. Jun 22 13:14:10 localhost systemd[1]: Listening on SSSD Kerberos Cache Manager responder socket. ░░ Subject: A start job for unit sssd-kcm.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd-kcm.socket has finished successfully. ░░ ░░ The job identifier is 193. Jun 22 13:14:10 localhost systemd[1]: Reached target Socket Units. ░░ Subject: A start job for unit sockets.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sockets.target has finished successfully. ░░ ░░ The job identifier is 192. Jun 22 13:14:11 localhost systemd[1]: Starting D-Bus System Message Bus... ░░ Subject: A start job for unit dbus-broker.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has begun execution. ░░ ░░ The job identifier is 189. Jun 22 13:14:11 localhost systemd[1]: TPM2 PCR Barrier (Initialization) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). ░░ Subject: A start job for unit systemd-pcrphase-sysinit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase-sysinit.service has finished successfully. ░░ ░░ The job identifier is 146. Jun 22 13:14:11 localhost systemd[1]: Started D-Bus System Message Bus. ░░ Subject: A start job for unit dbus-broker.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has finished successfully. ░░ ░░ The job identifier is 189. Jun 22 13:14:11 localhost systemd[1]: Reached target Basic System. ░░ Subject: A start job for unit basic.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit basic.target has finished successfully. ░░ ░░ The job identifier is 116. Jun 22 13:14:11 localhost systemd[1]: Starting NTP client/server... ░░ Subject: A start job for unit chronyd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has begun execution. ░░ ░░ The job identifier is 224. Jun 22 13:14:11 localhost systemd[1]: Starting Initial cloud-init job (pre-networking)... ░░ Subject: A start job for unit cloud-init-local.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has begun execution. ░░ ░░ The job identifier is 209. Jun 22 13:14:11 localhost dbus-broker-lau[570]: Ready Jun 22 13:14:11 localhost systemd[1]: Starting Restore /run/initramfs on shutdown... ░░ Subject: A start job for unit dracut-shutdown.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has begun execution. ░░ ░░ The job identifier is 153. Jun 22 13:14:11 localhost systemd[1]: Started irqbalance daemon. ░░ Subject: A start job for unit irqbalance.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit irqbalance.service has finished successfully. ░░ ░░ The job identifier is 205. Jun 22 13:14:11 localhost systemd[1]: Load CPU microcode update was skipped because of an unmet condition check (ConditionPathExists=/sys/devices/system/cpu/microcode/reload). ░░ Subject: A start job for unit microcode.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit microcode.service has finished successfully. ░░ ░░ The job identifier is 191. Jun 22 13:14:11 localhost /usr/sbin/irqbalance[575]: libcap-ng used by "/usr/sbin/irqbalance" failed dropping bounding set due to not having CAP_SETPCAP in capng_apply Jun 22 13:14:11 localhost systemd[1]: Started Hardware RNG Entropy Gatherer Daemon. ░░ Subject: A start job for unit rngd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rngd.service has finished successfully. ░░ ░░ The job identifier is 227. Jun 22 13:14:11 localhost systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 218. Jun 22 13:14:11 localhost systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 215. Jun 22 13:14:11 localhost systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 217. Jun 22 13:14:11 localhost systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 214. Jun 22 13:14:11 localhost systemd[1]: System Security Services Daemon was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit sssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd.service has finished successfully. ░░ ░░ The job identifier is 199. Jun 22 13:14:11 localhost systemd[1]: Reached target User and Group Name Lookups. ░░ Subject: A start job for unit nss-user-lookup.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nss-user-lookup.target has finished successfully. ░░ ░░ The job identifier is 200. Jun 22 13:14:11 localhost systemd[1]: Starting User Login Management... ░░ Subject: A start job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 220. Jun 22 13:14:11 localhost systemd[1]: Starting Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 266. Jun 22 13:14:11 localhost systemd[1]: Finished Restore /run/initramfs on shutdown. ░░ Subject: A start job for unit dracut-shutdown.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has finished successfully. ░░ ░░ The job identifier is 153. Jun 22 13:14:11 localhost systemd-logind[579]: New seat seat0. ░░ Subject: A new seat seat0 is now available ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new seat seat0 has been configured and is now available. Jun 22 13:14:11 localhost systemd-logind[579]: Watching system buttons on /dev/input/event0 (Power Button) Jun 22 13:14:11 localhost systemd-logind[579]: Watching system buttons on /dev/input/event1 (Sleep Button) Jun 22 13:14:11 localhost systemd-logind[579]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) Jun 22 13:14:11 localhost systemd[1]: Started User Login Management. ░░ Subject: A start job for unit systemd-logind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has finished successfully. ░░ ░░ The job identifier is 220. Jun 22 13:14:11 localhost systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Jun 22 13:14:11 localhost systemd[1]: Finished Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 266. Jun 22 13:14:11 localhost chronyd[584]: chronyd version 4.5 starting (+CMDMON +NTP +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +ASYNCDNS +NTS +SECHASH +IPV6 +DEBUG) Jun 22 13:14:11 localhost chronyd[584]: Loaded 0 symmetric keys Jun 22 13:14:11 localhost chronyd[584]: Frequency 0.000 +/- 1000000.000 ppm read from /var/lib/chrony/drift Jun 22 13:14:11 localhost chronyd[584]: Using right/UTC timezone to obtain leap second data Jun 22 13:14:11 localhost chronyd[584]: Loaded seccomp filter (level 2) Jun 22 13:14:11 localhost systemd[1]: Started NTP client/server. ░░ Subject: A start job for unit chronyd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has finished successfully. ░░ ░░ The job identifier is 224. Jun 22 13:14:11 localhost rngd[578]: Disabling 7: PKCS11 Entropy generator (pkcs11) Jun 22 13:14:11 localhost rngd[578]: Disabling 5: NIST Network Entropy Beacon (nist) Jun 22 13:14:11 localhost rngd[578]: Disabling 9: Qrypt quantum entropy beacon (qrypt) Jun 22 13:14:11 localhost rngd[578]: Initializing available sources Jun 22 13:14:11 localhost rngd[578]: [hwrng ]: Initialization Failed Jun 22 13:14:11 localhost rngd[578]: [rdrand]: Enabling RDRAND rng support Jun 22 13:14:11 localhost rngd[578]: [rdrand]: Initialized Jun 22 13:14:11 localhost rngd[578]: [jitter]: JITTER timeout set to 5 sec Jun 22 13:14:12 localhost rngd[578]: [jitter]: Initializing AES buffer Jun 22 13:14:15 localhost cloud-init[590]: Cloud-init v. 23.4-11.el9 running 'init-local' at Sat, 22 Jun 2024 13:14:15 +0000. Up 20.75 seconds. Jun 22 13:14:16 localhost dhclient[593]: Internet Systems Consortium DHCP Client 4.4.2b1 Jun 22 13:14:16 localhost dhclient[593]: Copyright 2004-2019 Internet Systems Consortium. Jun 22 13:14:16 localhost dhclient[593]: All rights reserved. Jun 22 13:14:16 localhost dhclient[593]: For info, please visit https://www.isc.org/software/dhcp/ Jun 22 13:14:16 localhost dhclient[593]: Jun 22 13:14:16 localhost dhclient[593]: Listening on LPF/eth0/0e:f5:71:43:d0:93 Jun 22 13:14:16 localhost dhclient[593]: Sending on LPF/eth0/0e:f5:71:43:d0:93 Jun 22 13:14:16 localhost dhclient[593]: Sending on Socket/fallback Jun 22 13:14:16 localhost dhclient[593]: DHCPDISCOVER on eth0 to 255.255.255.255 port 67 interval 5 (xid=0x3e903d4d) Jun 22 13:14:16 localhost dhclient[593]: DHCPOFFER of 10.31.42.228 from 10.31.40.1 Jun 22 13:14:16 localhost dhclient[593]: DHCPREQUEST for 10.31.42.228 on eth0 to 255.255.255.255 port 67 (xid=0x3e903d4d) Jun 22 13:14:16 localhost dhclient[593]: DHCPACK of 10.31.42.228 from 10.31.40.1 (xid=0x3e903d4d) Jun 22 13:14:16 localhost dhclient[593]: bound to 10.31.42.228 -- renewal in 1646 seconds. Jun 22 13:14:17 localhost rngd[578]: [jitter]: Unable to obtain AES key, disabling JITTER source Jun 22 13:14:17 localhost rngd[578]: [jitter]: Initialization Failed Jun 22 13:14:17 localhost rngd[578]: [namedpipe]: Initialization Failed Jun 22 13:14:17 localhost rngd[578]: Process privileges have been dropped to 2:2 Jun 22 13:14:17 localhost systemd[1]: Starting Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 329. Jun 22 13:14:17 localhost systemd[1]: Started Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 329. Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-hostnamed[608]: Hostname set to (static) Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Finished Initial cloud-init job (pre-networking). ░░ Subject: A start job for unit cloud-init-local.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has finished successfully. ░░ ░░ The job identifier is 209. Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Reached target Preparation for Network. ░░ Subject: A start job for unit network-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-pre.target has finished successfully. ░░ ░░ The job identifier is 155. Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager... ░░ Subject: A start job for unit NetworkManager.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has begun execution. ░░ ░░ The job identifier is 187. Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.7391] NetworkManager (version 1.48.0-1.el9) is starting... (boot:75853f0f-b81b-42ec-8587-2aa752780df7) Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.7394] Read config: /etc/NetworkManager/NetworkManager.conf (run: 15-carrier-timeout.conf) Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.7986] manager[0x56382cd9a080]: monitoring kernel firmware directory '/lib/firmware'. Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8014] hostname: hostname: using hostnamed Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8014] hostname: static hostname changed from (none) to "ip-10-31-42-228.us-east-1.aws.redhat.com" Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8034] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink (auto) Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8435] manager[0x56382cd9a080]: rfkill: Wi-Fi hardware radio set enabled Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8435] manager[0x56382cd9a080]: rfkill: WWAN hardware radio set enabled Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8563] Loaded device plugin: NMTeamFactory (/usr/lib64/NetworkManager/1.48.0-1.el9/libnm-device-plugin-team.so) Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8564] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8572] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8573] manager: Networking is enabled by state file Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8596] settings: Loaded settings plugin: keyfile (internal) Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8627] settings: Loaded settings plugin: ifcfg-rh ("/usr/lib64/NetworkManager/1.48.0-1.el9/libnm-settings-plugin-ifcfg-rh.so") Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Listening on Load/Save RF Kill Switch Status /dev/rfkill Watch. ░░ Subject: A start job for unit systemd-rfkill.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-rfkill.socket has finished successfully. ░░ ░░ The job identifier is 394. Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8658] Warning: the ifcfg-rh plugin is deprecated, please migrate connections to the keyfile format using "nmcli connection migrate" Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8668] dhcp: init: Using DHCP client 'internal' Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8672] manager: (lo): new Loopback device (/org/freedesktop/NetworkManager/Devices/1) Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8684] device (lo): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external') Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8696] device (lo): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external') Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8718] device (lo): Activation: starting connection 'lo' (300c644b-fdb6-466f-99e9-f9e4115ae5b9) Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8730] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2) Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8734] device (eth0): state change: unmanaged -> unavailable (reason 'managed', sys-iface-state: 'external') Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8752] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager" Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8757] device (lo): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external') Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8760] device (lo): state change: prepare -> config (reason 'none', sys-iface-state: 'external') Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8763] device (lo): state change: config -> ip-config (reason 'none', sys-iface-state: 'external') Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8765] device (eth0): carrier: link connected Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8768] device (lo): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external') Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8776] device (eth0): state change: unavailable -> disconnected (reason 'carrier-changed', sys-iface-state: 'managed') Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8783] policy: auto-activating connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8788] device (eth0): Activation: starting connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8789] device (eth0): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'managed') Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8792] manager: NetworkManager state is now CONNECTING Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8794] device (eth0): state change: prepare -> config (reason 'none', sys-iface-state: 'managed') Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8800] device (eth0): state change: config -> ip-config (reason 'none', sys-iface-state: 'managed') Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8802] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds) Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 401. Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8852] dhcp4 (eth0): state changed new lease, address=10.31.42.228 Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8857] policy: set 'System eth0' (eth0) as default for IPv4 routing and DNS Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started Network Manager. ░░ Subject: A start job for unit NetworkManager.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has finished successfully. ░░ ░░ The job identifier is 187. Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Reached target Network. ░░ Subject: A start job for unit network.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network.target has finished successfully. ░░ ░░ The job identifier is 190. Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062057.8941] device (eth0): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'managed') Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Wait Online... ░░ Subject: A start job for unit NetworkManager-wait-online.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has begun execution. ░░ ░░ The job identifier is 186. Jun 22 13:14:17 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting GSSAPI Proxy Daemon... ░░ Subject: A start job for unit gssproxy.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has begun execution. ░░ ░░ The job identifier is 237. Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started GSSAPI Proxy Daemon. ░░ Subject: A start job for unit gssproxy.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has finished successfully. ░░ ░░ The job identifier is 237. Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: RPC security service for NFS client and server was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab). ░░ Subject: A start job for unit rpc-gssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-gssd.service has finished successfully. ░░ ░░ The job identifier is 233. Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Reached target NFS client services. ░░ Subject: A start job for unit nfs-client.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nfs-client.target has finished successfully. ░░ ░░ The job identifier is 230. Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Reached target Preparation for Remote File Systems. ░░ Subject: A start job for unit remote-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 231. Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Reached target Remote File Systems. ░░ Subject: A start job for unit remote-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs.target has finished successfully. ░░ ░░ The job identifier is 239. Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: TPM2 PCR Barrier (User) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). ░░ Subject: A start job for unit systemd-pcrphase.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase.service has finished successfully. ░░ ░░ The job identifier is 158. Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 401. Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062058.1741] device (lo): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external') Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062058.1746] device (lo): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external') Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062058.1758] device (lo): Activation: successful, device activated. Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062058.1792] device (eth0): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'managed') Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062058.1798] device (eth0): state change: secondaries -> activated (reason 'none', sys-iface-state: 'managed') Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062058.1803] manager: NetworkManager state is now CONNECTED_SITE Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062058.1811] device (eth0): Activation: successful, device activated. Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062058.1818] manager: NetworkManager state is now CONNECTED_GLOBAL Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com NetworkManager[612]: [1719062058.1826] manager: startup complete Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Finished Network Manager Wait Online. ░░ Subject: A start job for unit NetworkManager-wait-online.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has finished successfully. ░░ ░░ The job identifier is 186. Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting Initial cloud-init job (metadata service crawler)... ░░ Subject: A start job for unit cloud-init.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has begun execution. ░░ ░░ The job identifier is 212. Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com chronyd[584]: Added source 10.11.160.238 Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com chronyd[584]: Added source 10.18.100.10 Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com chronyd[584]: Added source 10.2.32.37 Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com chronyd[584]: Added source 10.2.32.38 Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: Cloud-init v. 23.4-11.el9 running 'init' at Sat, 22 Jun 2024 13:14:18 +0000. Up 23.33 seconds. Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: ++++++++++++++++++++++++++++++++++++++Net device info+++++++++++++++++++++++++++++++++++++++ Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | Device | Up | Address | Mask | Scope | Hw-Address | Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | eth0 | True | 10.31.42.228 | 255.255.252.0 | global | 0e:f5:71:43:d0:93 | Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | eth0 | True | fe80::cf5:71ff:fe43:d093/64 | . | link | 0e:f5:71:43:d0:93 | Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | lo | True | 127.0.0.1 | 255.0.0.0 | host | . | Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | lo | True | ::1/128 | . | host | . | Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: ++++++++++++++++++++++++++++Route IPv4 info+++++++++++++++++++++++++++++ Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | Route | Destination | Gateway | Genmask | Interface | Flags | Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | 0 | 0.0.0.0 | 10.31.40.1 | 0.0.0.0 | eth0 | UG | Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | 1 | 10.31.40.0 | 0.0.0.0 | 255.255.252.0 | eth0 | U | Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++ Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +-------+-------------+---------+-----------+-------+ Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | Route | Destination | Gateway | Interface | Flags | Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +-------+-------------+---------+-----------+-------+ Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | 1 | fe80::/64 | :: | eth0 | U | Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | 3 | multicast | :: | eth0 | U | Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +-------+-------------+---------+-----------+-------+ Jun 22 13:14:18 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: 2024-06-22 13:14:18,673 - __init__.py[WARNING]: Unhandled non-multipart (text/x-not-multipart) userdata: 'b'~/.citool.d/post-install'...' Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: Generating public/private rsa key pair. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: Your identification has been saved in /etc/ssh/ssh_host_rsa_key Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: Your public key has been saved in /etc/ssh/ssh_host_rsa_key.pub Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: The key fingerprint is: Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: SHA256:JAicih2oNR8ePt3HE2JL+h7Jp1H5i9hKWvua/ye4zTc root@ip-10-31-42-228.us-east-1.aws.redhat.com Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: The key's randomart image is: Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: +---[RSA 3072]----+ Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | o.. | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: |. *.o. + . | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: |o+ *.+.=.+ o | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: |+ . = ooo * | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | . oS+ o | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | * . . | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | .oB o . | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | ++o+oo E | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | . ==+o++ . | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: +----[SHA256]-----+ Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: Generating public/private dsa key pair. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: Your identification has been saved in /etc/ssh/ssh_host_dsa_key Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: Your public key has been saved in /etc/ssh/ssh_host_dsa_key.pub Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: The key fingerprint is: Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: SHA256:CmDM98qmvstKMV/UJ4PFSC+prR9+KwAUyiJD1n/DzoQ root@ip-10-31-42-228.us-east-1.aws.redhat.com Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: The key's randomart image is: Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: +---[DSA 1024]----+ Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | +o ..o. | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: |=+ . .*. | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: |*.= o=o= . | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: |o+ o+E.=+ | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | o...o* S | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | +ooo + | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | . o=.. | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: |.. oo... | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: |.o*o oo.. | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: +----[SHA256]-----+ Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: Generating public/private ecdsa key pair. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: Your identification has been saved in /etc/ssh/ssh_host_ecdsa_key Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: Your public key has been saved in /etc/ssh/ssh_host_ecdsa_key.pub Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: The key fingerprint is: Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: SHA256:W5N4cFmWyz/UsLbWfCpWmiKnlbPvgyz8g70oxRXGIjg root@ip-10-31-42-228.us-east-1.aws.redhat.com Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: The key's randomart image is: Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: +---[ECDSA 256]---+ Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | . . o. | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | E . . ++. . | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | . ..ooo . + | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | +..o + .| Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | .S.= + + | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | o+ o * +| Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | o.++. = o.| Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | . =oBo* . | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | .o*+Boo | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: +----[SHA256]-----+ Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: Generating public/private ed25519 key pair. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: Your identification has been saved in /etc/ssh/ssh_host_ed25519_key Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: Your public key has been saved in /etc/ssh/ssh_host_ed25519_key.pub Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: The key fingerprint is: Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: SHA256:OihpfPChUEjLnWLVy0m+n44YOW5c4UBCMl1V8pj6qC8 root@ip-10-31-42-228.us-east-1.aws.redhat.com Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: The key's randomart image is: Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: +--[ED25519 256]--+ Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: |++ oo.o.. | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: |++=..o = | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: |.=+o+ = . | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: |.... B | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: |. . = o S | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | o =.O . | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | B+* = . | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | .E*+ .+ | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: | o=.... | Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[704]: +----[SHA256]-----+ Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com sm-notify[781]: Version 2.5.4 starting Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Finished Initial cloud-init job (metadata service crawler). ░░ Subject: A start job for unit cloud-init.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has finished successfully. ░░ ░░ The job identifier is 212. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-config availability. ░░ Subject: A start job for unit cloud-config.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.target has finished successfully. ░░ ░░ The job identifier is 211. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Reached target Network is Online. ░░ Subject: A start job for unit network-online.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-online.target has finished successfully. ░░ ░░ The job identifier is 185. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting Apply the settings specified in cloud-config... ░░ Subject: A start job for unit cloud-config.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has begun execution. ░░ ░░ The job identifier is 210. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting Crash recovery kernel arming... ░░ Subject: A start job for unit kdump.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has begun execution. ░░ ░░ The job identifier is 229. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting The restraint harness.... ░░ Subject: A start job for unit restraintd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has begun execution. ░░ ░░ The job identifier is 223. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting Notify NFS peers of a restart... ░░ Subject: A start job for unit rpc-statd-notify.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has begun execution. ░░ ░░ The job identifier is 238. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting System Logging Service... ░░ Subject: A start job for unit rsyslog.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has begun execution. ░░ ░░ The job identifier is 198. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 213. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started The restraint harness.. ░░ Subject: A start job for unit restraintd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has finished successfully. ░░ ░░ The job identifier is 223. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started Notify NFS peers of a restart. ░░ Subject: A start job for unit rpc-statd-notify.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has finished successfully. ░░ ░░ The job identifier is 238. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[784]: Server listening on 0.0.0.0 port 22. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[784]: Server listening on :: port 22. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 213. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started System Logging Service. ░░ Subject: A start job for unit rsyslog.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has finished successfully. ░░ ░░ The job identifier is 198. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com rsyslogd[783]: [origin software="rsyslogd" swVersion="8.2310.0-4.el9" x-pid="783" x-info="https://www.rsyslog.com"] start Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com rsyslogd[783]: imjournal: journal files changed, reloading... [v8.2310.0-4.el9 try https://www.rsyslog.com/e/0 ] Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[837]: Cloud-init v. 23.4-11.el9 running 'modules:config' at Sat, 22 Jun 2024 13:14:20 +0000. Up 25.65 seconds. Jun 22 13:14:20 ip-10-31-42-228.us-east-1.aws.redhat.com restraintd[786]: Listening on http://localhost:8081 Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[784]: Received signal 15; terminating. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Stopping OpenSSH server daemon... ░░ Subject: A stop job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 479. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: sshd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit sshd.service has successfully entered the 'dead' state. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Stopped OpenSSH server daemon. ░░ Subject: A stop job for unit sshd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has finished. ░░ ░░ The job identifier is 479 and the job result is done. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Stopped target sshd-keygen.target. ░░ Subject: A stop job for unit sshd-keygen.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd-keygen.target has finished. ░░ ░░ The job identifier is 547 and the job result is done. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Stopping sshd-keygen.target... ░░ Subject: A stop job for unit sshd-keygen.target has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd-keygen.target has begun execution. ░░ ░░ The job identifier is 547. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 546. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 543. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 545. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 547. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 479. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[868]: Server listening on 0.0.0.0 port 22. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[868]: Server listening on :: port 22. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 479. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Finished Apply the settings specified in cloud-config. ░░ Subject: A start job for unit cloud-config.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has finished successfully. ░░ ░░ The job identifier is 210. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com kdumpctl[788]: kdump: Detected change(s) in the following file(s): /etc/fstab Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting Execute cloud user/final scripts... ░░ Subject: A start job for unit cloud-final.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has begun execution. ░░ ░░ The job identifier is 219. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting Permit User Sessions... ░░ Subject: A start job for unit systemd-user-sessions.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has begun execution. ░░ ░░ The job identifier is 206. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Finished Permit User Sessions. ░░ Subject: A start job for unit systemd-user-sessions.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has finished successfully. ░░ ░░ The job identifier is 206. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started Command Scheduler. ░░ Subject: A start job for unit crond.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit crond.service has finished successfully. ░░ ░░ The job identifier is 228. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com crond[933]: (CRON) STARTUP (1.5.7) Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started Getty on tty1. ░░ Subject: A start job for unit getty@tty1.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty@tty1.service has finished successfully. ░░ ░░ The job identifier is 246. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com crond[933]: (CRON) INFO (Syslog will be used instead of sendmail.) Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com crond[933]: (CRON) INFO (RANDOM_DELAY will be scaled with factor 14% if used.) Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com crond[933]: (CRON) INFO (running with inotify support) Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started Serial Getty on ttyS0. ░░ Subject: A start job for unit serial-getty@ttyS0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit serial-getty@ttyS0.service has finished successfully. ░░ ░░ The job identifier is 241. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Reached target Login Prompts. ░░ Subject: A start job for unit getty.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty.target has finished successfully. ░░ ░░ The job identifier is 240. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Reached target Multi-User System. ░░ Subject: A start job for unit multi-user.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit multi-user.target has finished successfully. ░░ ░░ The job identifier is 115. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting Record Runlevel Change in UTMP... ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has begun execution. ░░ ░░ The job identifier is 201. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: systemd-update-utmp-runlevel.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-update-utmp-runlevel.service has successfully entered the 'dead' state. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Finished Record Runlevel Change in UTMP. ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has finished successfully. ░░ ░░ The job identifier is 201. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[1072]: Cloud-init v. 23.4-11.el9 running 'modules:final' at Sat, 22 Jun 2024 13:14:21 +0000. Up 26.34 seconds. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[1074]: ############################################################# Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[1075]: -----BEGIN SSH HOST KEY FINGERPRINTS----- Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[1077]: 1024 SHA256:CmDM98qmvstKMV/UJ4PFSC+prR9+KwAUyiJD1n/DzoQ root@ip-10-31-42-228.us-east-1.aws.redhat.com (DSA) Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[1079]: 256 SHA256:W5N4cFmWyz/UsLbWfCpWmiKnlbPvgyz8g70oxRXGIjg root@ip-10-31-42-228.us-east-1.aws.redhat.com (ECDSA) Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[1081]: 256 SHA256:OihpfPChUEjLnWLVy0m+n44YOW5c4UBCMl1V8pj6qC8 root@ip-10-31-42-228.us-east-1.aws.redhat.com (ED25519) Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[1083]: 3072 SHA256:JAicih2oNR8ePt3HE2JL+h7Jp1H5i9hKWvua/ye4zTc root@ip-10-31-42-228.us-east-1.aws.redhat.com (RSA) Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[1084]: -----END SSH HOST KEY FINGERPRINTS----- Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[1085]: ############################################################# Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com cloud-init[1072]: Cloud-init v. 23.4-11.el9 finished at Sat, 22 Jun 2024 13:14:21 +0000. Datasource DataSourceEc2Local. Up 26.55 seconds Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Finished Execute cloud user/final scripts. ░░ Subject: A start job for unit cloud-final.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has finished successfully. ░░ ░░ The job identifier is 219. Jun 22 13:14:21 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-init target. ░░ Subject: A start job for unit cloud-init.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.target has finished successfully. ░░ ░░ The job identifier is 208. Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: Cannot change IRQ 0 affinity: Input/output error Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: IRQ 0 affinity is now unmanaged Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: Cannot change IRQ 48 affinity: Input/output error Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: IRQ 48 affinity is now unmanaged Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: Cannot change IRQ 49 affinity: Input/output error Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: IRQ 49 affinity is now unmanaged Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: Cannot change IRQ 50 affinity: Input/output error Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: IRQ 50 affinity is now unmanaged Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: Cannot change IRQ 51 affinity: Input/output error Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: IRQ 51 affinity is now unmanaged Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: Cannot change IRQ 52 affinity: Input/output error Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: IRQ 52 affinity is now unmanaged Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: Cannot change IRQ 53 affinity: Input/output error Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: IRQ 53 affinity is now unmanaged Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: Cannot change IRQ 54 affinity: Input/output error Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: IRQ 54 affinity is now unmanaged Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: Cannot change IRQ 55 affinity: Input/output error Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: IRQ 55 affinity is now unmanaged Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: Cannot change IRQ 56 affinity: Input/output error Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: IRQ 56 affinity is now unmanaged Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: Cannot change IRQ 57 affinity: Input/output error Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: IRQ 57 affinity is now unmanaged Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: Cannot change IRQ 58 affinity: Input/output error Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: IRQ 58 affinity is now unmanaged Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: Cannot change IRQ 59 affinity: Input/output error Jun 22 13:14:22 ip-10-31-42-228.us-east-1.aws.redhat.com irqbalance[575]: IRQ 59 affinity is now unmanaged Jun 22 13:14:24 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: block xvda: the capability attribute has been deprecated. Jun 22 13:14:24 ip-10-31-42-228.us-east-1.aws.redhat.com chronyd[584]: Selected source 10.2.32.37 Jun 22 13:14:24 ip-10-31-42-228.us-east-1.aws.redhat.com chronyd[584]: System clock TAI offset set to 37 seconds Jun 22 13:14:24 ip-10-31-42-228.us-east-1.aws.redhat.com kdumpctl[788]: kdump: Rebuilding /boot/initramfs-5.14.0-460.el9.x86_64kdump.img Jun 22 13:14:25 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1403]: dracut-057-53.git20240104.el9 Jun 22 13:14:26 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: Executing: /usr/bin/dracut --add kdumpbase --quiet --hostonly --hostonly-cmdline --hostonly-i18n --hostonly-mode strict --hostonly-nics -o "plymouth resume ifcfg earlykdump" --mount "/dev/disk/by-uuid/16c5b236-cff0-4e33-ac23-8dbf92c770ef /sysroot xfs rw,relatime,seclabel,attr2,inode64,logbufs=8,logbsize=32k,noquota" --squash-compressor zstd --no-hostonly-default-device -f /boot/initramfs-5.14.0-460.el9.x86_64kdump.img 5.14.0-460.el9.x86_64 Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'systemd-networkd' will not be installed, because command 'networkctl' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd-wait-online' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'systemd-resolved' will not be installed, because command 'resolvectl' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'connman' will not be installed, because command 'connmand' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'connman' will not be installed, because command 'connmanctl' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'network-wicked' will not be installed, because command 'wicked' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'ifcfg' will not be installed, because it's in the list to be omitted! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'plymouth' will not be installed, because it's in the list to be omitted! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'pcsc' will not be installed, because command 'pcscd' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'tpm2-tss' will not be installed, because command 'tpm2' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'resume' will not be installed, because it's in the list to be omitted! Jun 22 13:14:27 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'biosdevname' will not be installed, because command 'biosdevname' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'earlykdump' will not be installed, because it's in the list to be omitted! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'memstrack' will not be installed, because command 'memstrack' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: memstrack is not available Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: If you need to use rd.memdebug>=4, please install memstrack and procps-ng Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'systemd-resolved' will not be installed, because command 'resolvectl' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'connman' will not be installed, because command 'connmand' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'connman' will not be installed, because command 'connmanctl' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'network-wicked' will not be installed, because command 'wicked' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'pcsc' will not be installed, because command 'pcscd' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'tpm2-tss' will not be installed, because command 'tpm2' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: dracut module 'memstrack' will not be installed, because command 'memstrack' could not be found! Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: memstrack is not available Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: If you need to use rd.memdebug>=4, please install memstrack and procps-ng Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: systemd *** Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: systemd-initrd *** Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: nss-softokn *** Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: rngd *** Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: i18n *** Jun 22 13:14:28 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: drm *** Jun 22 13:14:29 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: prefixdevname *** Jun 22 13:14:29 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: kernel-modules *** Jun 22 13:14:29 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: kernel-modules-extra *** Jun 22 13:14:29 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: kernel-modules-extra: configuration source "/run/depmod.d" does not exist Jun 22 13:14:29 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: kernel-modules-extra: configuration source "/lib/depmod.d" does not exist Jun 22 13:14:29 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: kernel-modules-extra: parsing configuration file "/etc/depmod.d/dist.conf" Jun 22 13:14:29 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: kernel-modules-extra: /etc/depmod.d/dist.conf: added "updates extra built-in weak-updates" to the list of search directories Jun 22 13:14:29 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: fstab-sys *** Jun 22 13:14:29 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: rootfs-block *** Jun 22 13:14:29 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: terminfo *** Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: udev-rules *** Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: Skipping udev rule: 91-permissions.rules Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: Skipping udev rule: 80-drivers-modprobe.rules Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: dracut-systemd *** Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: usrmount *** Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: base *** Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: fs-lib *** Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: kdumpbase *** Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: microcode_ctl-fw_dir_override *** Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl module: mangling fw_dir Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware" Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel"... Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: intel: caveats check for kernel version "5.14.0-460.el9.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-2d-07"... Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: configuration "intel-06-2d-07" is ignored Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4e-03"... Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: configuration "intel-06-4e-03" is ignored Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"... Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: configuration "intel-06-4f-01" is ignored Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-55-04"... Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: configuration "intel-06-55-04" is ignored Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-5e-03"... Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: configuration "intel-06-5e-03" is ignored Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8c-01"... Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: configuration "intel-06-8c-01" is ignored Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-0xca"... Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: configuration "intel-06-8e-9e-0x-0xca" is ignored Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-dell"... Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: configuration "intel-06-8e-9e-0x-dell" is ignored Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware" Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: shutdown *** Jun 22 13:14:30 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including module: squash *** Jun 22 13:14:31 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Including modules done *** Jun 22 13:14:31 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Installing kernel module dependencies *** Jun 22 13:14:32 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Installing kernel module dependencies done *** Jun 22 13:14:32 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Resolving executable dependencies *** Jun 22 13:14:33 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Resolving executable dependencies done *** Jun 22 13:14:33 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Hardlinking files *** Jun 22 13:14:33 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: Mode: real Jun 22 13:14:33 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: Files: 437 Jun 22 13:14:33 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: Linked: 1 files Jun 22 13:14:33 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: Compared: 0 xattrs Jun 22 13:14:33 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: Compared: 8 files Jun 22 13:14:33 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: Saved: 56.19 KiB Jun 22 13:14:33 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: Duration: 0.007804 seconds Jun 22 13:14:33 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Hardlinking files done *** Jun 22 13:14:33 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Generating early-microcode cpio image *** Jun 22 13:14:33 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Constructing GenuineIntel.bin *** Jun 22 13:14:33 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Constructing GenuineIntel.bin *** Jun 22 13:14:33 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Store current command line parameters *** Jun 22 13:14:33 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: Stored kernel commandline: Jun 22 13:14:34 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: No dracut internal kernel commandline stored in the initramfs Jun 22 13:14:34 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Install squash loader *** Jun 22 13:14:34 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Stripping files *** Jun 22 13:14:36 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Stripping files done *** Jun 22 13:14:36 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Squashing the files inside the initramfs *** Jun 22 13:14:41 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Squashing the files inside the initramfs done *** Jun 22 13:14:41 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Creating image file '/boot/initramfs-5.14.0-460.el9.x86_64kdump.img' *** Jun 22 13:14:41 ip-10-31-42-228.us-east-1.aws.redhat.com dracut[1405]: *** Creating initramfs image file '/boot/initramfs-5.14.0-460.el9.x86_64kdump.img' done *** Jun 22 13:14:41 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec_file: kernel: 00000000776cd6d6 kernel_size: 0xd2d828 Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: ima: kexec measurement buffer for the loaded kernel at 0xe0ffe000. Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec: Loaded ELF headers at 0xd5000000 bufsz=0x1000 memsz=0x1000 Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec-bzImage64: Loaded purgatory at 0xe0ff9000 Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec-bzImage64: Loaded boot_param, command line and misc at 0xe0ff7000 bufsz=0x1230 memsz=0x1230 Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec-bzImage64: Loaded 64bit kernel at 0xdd400000 bufsz=0xd28828 memsz=0x3bcb000 Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec-bzImage64: Loaded initrd at 0xdb124000 bufsz=0x22db800 memsz=0x22db800 Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec-bzImage64: Final command line is: elfcorehdr=0xd5000000 BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-460.el9.x86_64 ro rhgb net.ifnames=0 console=tty0 console=ttyS0,115200n8 irqpoll nr_cpus=1 reset_devices cgroup_disable=memory mce=off numa=off udev.children-max=2 panic=10 acpi_no_memhotplug transparent_hugepage=never nokaslr hest_disable novmcoredd cma=0 hugetlb_cma=0 disable_cpu_apicid=0 Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec-bzImage64: E820 memmap: Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec-bzImage64: 0000000000001000-000000000009dfff (1) Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec-bzImage64: 0000000000000000-0000000000000fff (2) Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec-bzImage64: 000000000009e000-000000000009ffff (2) Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec-bzImage64: 00000000000e0000-00000000000fffff (2) Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec-bzImage64: 00000000fc000000-00000000ffffffff (2) Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec-bzImage64: 00000000d5001000-00000000e0ffffff (1) Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec_file: nr_segments = 6 Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec_file: segment[0]: buf=0x00000000d8aab0e4 bufsz=0x82 mem=0xe0ffe000 memsz=0x1000 Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec_file: segment[1]: buf=0x00000000a2a6b643 bufsz=0x1000 mem=0xd5000000 memsz=0x1000 Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec_file: segment[2]: buf=0x00000000d5894f0d bufsz=0x4000 mem=0xe0ff9000 memsz=0x5000 Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec_file: segment[3]: buf=0x00000000223dfb3c bufsz=0x1230 mem=0xe0ff7000 memsz=0x2000 Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec_file: segment[4]: buf=0x000000000d6113a9 bufsz=0xd28828 mem=0xdd400000 memsz=0x3bcb000 Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec_file: segment[5]: buf=0x000000004aa44ec4 bufsz=0x22db800 mem=0xdb124000 memsz=0x22dc000 Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kernel: kexec_file: kexec_file_load: type:1, start:0xe0ff9140 head:0x4 flags:0xa Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kdumpctl[788]: kdump: kexec: loaded kdump kernel Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com kdumpctl[788]: kdump: Starting kdump: [OK] Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Finished Crash recovery kernel arming. ░░ Subject: A start job for unit kdump.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has finished successfully. ░░ ░░ The job identifier is 229. Jun 22 13:14:42 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Startup finished in 1.962s (kernel) + 6.692s (initrd) + 38.694s (userspace) = 47.349s. ░░ Subject: System start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ All system services necessary queued for starting at boot have been ░░ started. Note that this does not mean that the machine is now idle as services ░░ might still be busy with completing start-up. ░░ ░░ Kernel start-up required 1962324 microseconds. ░░ ░░ Initrd start-up required 6692060 microseconds. ░░ ░░ Userspace start-up required 38694856 microseconds. Jun 22 13:14:47 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4033]: Accepted publickey for root from 10.30.34.149 port 49476 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Created slice User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 611. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 613. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-logind[579]: New session 1 of user root. ░░ Subject: A new session 1 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 1 has been created for the user root. ░░ ░░ The leading process of the session is 4033. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Finished User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 613. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 548. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Queued start job for default target Main User Target. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Created slice User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Started Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Reached target Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Reached target Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Starting D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 6. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Starting Create User's Volatile Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 9. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Listening on D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Finished Create User's Volatile Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Reached target Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Reached target Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Reached target Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Startup finished in 140ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 140040 microseconds. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 548. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started Session 1 of User root. ░░ Subject: A start job for unit session-1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-1.scope has finished successfully. ░░ ░░ The job identifier is 614. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4033]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4046]: Received disconnect from 10.30.34.149 port 49476:11: disconnected by user Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4046]: Disconnected from user root 10.30.34.149 port 49476 Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4033]: pam_unix(sshd:session): session closed for user root Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: session-1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-1.scope has successfully entered the 'dead' state. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-logind[579]: Session 1 logged out. Waiting for processes to exit. Jun 22 13:15:41 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-logind[579]: Removed session 1. ░░ Subject: Session 1 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 1 has been terminated. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Stopping User Manager for UID 0... ░░ Subject: A stop job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 681. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Activating special unit Exit the Session... Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Stopped target Main User Target. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 17 and the job result is done. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Stopped target Basic System. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 25 and the job result is done. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Stopped target Paths. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 20 and the job result is done. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Stopped target Sockets. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 22 and the job result is done. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Stopped target Timers. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 24 and the job result is done. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Stopped Daily Cleanup of User's Temporary Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 27 and the job result is done. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Closed D-Bus User Message Bus Socket. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 23 and the job result is done. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Stopped Create User's Volatile Files and Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 18 and the job result is done. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Removed slice User Application Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 28 and the job result is done. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Reached target Shutdown. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 15. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Finished Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4037]: Reached target Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: user@0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@0.service has successfully entered the 'dead' state. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Stopped User Manager for UID 0. ░░ Subject: A stop job for unit user@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@0.service has finished. ░░ ░░ The job identifier is 681 and the job result is done. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Stopping User Runtime Directory /run/user/0... ░░ Subject: A stop job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 682. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: run-user-0.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-user-0.mount has successfully entered the 'dead' state. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: user-runtime-dir@0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-runtime-dir@0.service has successfully entered the 'dead' state. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Stopped User Runtime Directory /run/user/0. ░░ Subject: A stop job for unit user-runtime-dir@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@0.service has finished. ░░ ░░ The job identifier is 682 and the job result is done. Jun 22 13:15:51 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Removed slice User Slice of UID 0. ░░ Subject: A stop job for unit user-0.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-0.slice has finished. ░░ ░░ The job identifier is 684 and the job result is done. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4076]: error: kex_exchange_identification: Connection closed by remote host Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4076]: Connection closed by 10.29.163.169 port 46826 Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4077]: Accepted publickey for root from 10.29.163.169 port 46836 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Created slice User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 686. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 750. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-logind[579]: New session 3 of user root. ░░ Subject: A new session 3 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 3 has been created for the user root. ░░ ░░ The leading process of the session is 4077. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Finished User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 750. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 685. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4081]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4081]: Queued start job for default target Main User Target. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4081]: Created slice User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4081]: Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4081]: Started Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4081]: Reached target Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4081]: Reached target Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4081]: Starting D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 9. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4081]: Starting Create User's Volatile Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4081]: Listening on D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4081]: Finished Create User's Volatile Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4081]: Reached target Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4081]: Reached target Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4081]: Reached target Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[4081]: Startup finished in 56ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 56018 microseconds. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 685. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started Session 3 of User root. ░░ Subject: A start job for unit session-3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-3.scope has finished successfully. ░░ ░░ The job identifier is 751. Jun 22 13:15:56 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4077]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4090]: Received disconnect from 10.29.163.169 port 46836:11: disconnected by user Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4090]: Disconnected from user root 10.29.163.169 port 46836 Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4077]: pam_unix(sshd:session): session closed for user root Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-logind[579]: Session 3 logged out. Waiting for processes to exit. Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: session-3.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-3.scope has successfully entered the 'dead' state. Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-logind[579]: Removed session 3. ░░ Subject: Session 3 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 3 has been terminated. Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4107]: Accepted publickey for root from 10.29.163.169 port 46848 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-logind[579]: New session 5 of user root. ░░ Subject: A new session 5 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 5 has been created for the user root. ░░ ░░ The leading process of the session is 4107. Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started Session 5 of User root. ░░ Subject: A start job for unit session-5.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-5.scope has finished successfully. ░░ ░░ The job identifier is 818. Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4107]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4110]: Received disconnect from 10.29.163.169 port 46848:11: disconnected by user Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4110]: Disconnected from user root 10.29.163.169 port 46848 Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4107]: pam_unix(sshd:session): session closed for user root Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: session-5.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-5.scope has successfully entered the 'dead' state. Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-logind[579]: Session 5 logged out. Waiting for processes to exit. Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-logind[579]: Removed session 5. ░░ Subject: Session 5 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 5 has been terminated. Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4127]: Accepted publickey for root from 10.29.163.169 port 46862 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-logind[579]: New session 6 of user root. ░░ Subject: A new session 6 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 6 has been created for the user root. ░░ ░░ The leading process of the session is 4127. Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started Session 6 of User root. ░░ Subject: A start job for unit session-6.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-6.scope has finished successfully. ░░ ░░ The job identifier is 885. Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4127]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4130]: Received disconnect from 10.29.163.169 port 46862:11: disconnected by user Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4130]: Disconnected from user root 10.29.163.169 port 46862 Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4127]: pam_unix(sshd:session): session closed for user root Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-logind[579]: Session 6 logged out. Waiting for processes to exit. Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: session-6.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-6.scope has successfully entered the 'dead' state. Jun 22 13:15:57 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-logind[579]: Removed session 6. ░░ Subject: Session 6 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 6 has been terminated. Jun 22 13:15:58 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4147]: Accepted publickey for root from 10.29.163.169 port 46868 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Jun 22 13:15:58 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-logind[579]: New session 7 of user root. ░░ Subject: A new session 7 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 7 has been created for the user root. ░░ ░░ The leading process of the session is 4147. Jun 22 13:15:58 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started Session 7 of User root. ░░ Subject: A start job for unit session-7.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-7.scope has finished successfully. ░░ ░░ The job identifier is 952. Jun 22 13:15:58 ip-10-31-42-228.us-east-1.aws.redhat.com sshd[4147]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jun 22 13:16:22 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4261]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jun 22 13:16:24 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4302]: ansible-service_facts Invoked Jun 22 13:16:25 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4403]: ansible-ansible.legacy.command Invoked with _raw_params=rpm -qa _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:26 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4421]: ansible-stat Invoked with path=/etc/yum.repos.d/qa-tools.repo follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 22 13:16:27 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4440]: ansible-ansible.legacy.command Invoked with _raw_params=curl -sLI --connect-timeout 5 -w '%{response_code}' http://liver2.lab.eng.brq.redhat.com/repo/ | grep ^200 _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:28 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4461]: ansible-ansible.legacy.dnf Invoked with name=['ca-certificates'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jun 22 13:16:28 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4479]: ansible-ansible.legacy.dnf Invoked with name=['curl'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jun 22 13:16:29 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4497]: ansible-ansible.legacy.command Invoked with _raw_params=curl -skL -o /etc/pki/ca-trust/source/anchors/Current-IT-Root-CAs.pem https://certs.corp.redhat.com/certs/Current-IT-Root-CAs.pem _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:29 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4516]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c 'update-ca-trust' _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:31 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4540]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c 'update-ca-trust extract' _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:32 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4564]: ansible-ansible.legacy.command Invoked with _raw_params=curl -v https://gitlab.cee.redhat.com _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:33 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4583]: ansible-ansible.legacy.command Invoked with _raw_params=curl -v https://beaker.engineering.redhat.com/ _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:34 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4602]: ansible-ansible.legacy.command Invoked with _raw_params=if [ $(find /etc/yum.repos.d/ -name 'centos-addons.repo' | wc -l ) -gt 0 ]; then sed -i "/^\[nfv-source\]/,/^$/d" /etc/yum.repos.d/centos-addons.repo; fi _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:34 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4624]: ansible-file Invoked with path=/etc/yum.repos.d/CentOS-Media.repo state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:16:34 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4641]: ansible-file Invoked with path=/etc/yum.repos.d/CentOS-Linux-Media.repo state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:16:34 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4658]: ansible-file Invoked with path=/etc/yum.repos.d/CentOS-Stream-Media.repo state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:16:34 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4675]: ansible-file Invoked with path=/etc/yum.repos.d/beaker-client-testing.repo state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:16:35 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4692]: ansible-stat Invoked with path=/etc/yum.repos.d/beaker-client.repo follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 22 13:16:35 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4711]: ansible-replace Invoked with path=/etc/yum.repos.d/beaker-client.repo regexp=7|\$releasever replace=8 backup=False encoding=utf-8 unsafe_writes=False after=None before=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:16:35 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4728]: ansible-stat Invoked with path=/etc/yum.repos.d/beaker-client.repo follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 22 13:16:36 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4747]: ansible-community.general.ini_file Invoked with path=/etc/yum.repos.d/beaker-client.repo section=beaker-client option=skip_if_unavailable value=1 backup=False state=present exclusive=True no_extra_spaces=False ignore_spaces=False allow_no_value=False modify_inactive_option=True create=True follow=False unsafe_writes=False values=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:16:36 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4764]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:36 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4795]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:36 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4813]: ansible-stat Invoked with path=/etc/yum.repos.d/baseos-ci.repo follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 22 13:16:37 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4830]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:37 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4861]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:38 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4879]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:38 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4910]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:39 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4928]: ansible-ansible.legacy.command Invoked with _raw_params=yum repolist --enablerepo '*' | grep -q rhel-buildroot _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:39 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4948]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:39 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4979]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:40 ip-10-31-42-228.us-east-1.aws.redhat.com python3[4997]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:40 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5028]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:40 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5046]: ansible-ansible.legacy.command Invoked with _raw_params=dnf clean all --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:16:41 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5064]: ansible-ansible.legacy.command Invoked with _raw_params=dnf makecache --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:17:48 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5261]: ansible-ansible.legacy.dnf Invoked with name=['createrepo'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jun 22 13:17:54 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-ra95a6531075c435da20a7f4281cec6c0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-ra95a6531075c435da20a7f4281cec6c0.service has finished successfully. ░░ ░░ The job identifier is 1020. Jun 22 13:17:54 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1083. Jun 22 13:17:56 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5291]: ansible-ansible.legacy.dnf Invoked with name=['make'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jun 22 13:17:57 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jun 22 13:17:57 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1083. Jun 22 13:17:57 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: run-ra95a6531075c435da20a7f4281cec6c0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-ra95a6531075c435da20a7f4281cec6c0.service has successfully entered the 'dead' state. Jun 22 13:17:57 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5381]: ansible-ansible.legacy.dnf Invoked with name=['parted'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jun 22 13:17:58 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5399]: ansible-ansible.legacy.dnf Invoked with name=['psmisc'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jun 22 13:17:59 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5417]: ansible-ansible.legacy.dnf Invoked with name=['chrony'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jun 22 13:17:59 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5435]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c "chronyc sources" _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:18:02 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5453]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c "chronyc sources | grep redhat | grep -v util.phx2.redhat || chronyc add server clock.corp.redhat.com iburst" _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:18:05 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5474]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c "chronyc sources" _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:18:06 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5492]: ansible-ansible.legacy.command Invoked with _raw_params=chronyc waitsync 5 _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:18:06 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5510]: ansible-ansible.legacy.command Invoked with _raw_params=setenforce 0; timedatectl set-timezone UTC; setenforce 1 _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:18:06 ip-10-31-42-228.us-east-1.aws.redhat.com dbus-broker-launch[571]: avc: op=setenforce lsm=selinux enforcing=0 res=1 Jun 22 13:18:06 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting Time & Date Service... ░░ Subject: A start job for unit systemd-timedated.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-timedated.service has begun execution. ░░ ░░ The job identifier is 1146. Jun 22 13:18:06 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started Time & Date Service. ░░ Subject: A start job for unit systemd-timedated.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-timedated.service has finished successfully. ░░ ░░ The job identifier is 1146. Jun 22 13:18:06 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-timedated[5514]: Changed time zone to 'UTC' (UTC). ░░ Subject: Time zone change to UTC ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The system timezone has been changed to UTC. Jun 22 13:18:07 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5531]: ansible-yum_repository Invoked with state=present name=beaker-tasks description=Beaker tasks baseurl=['http://beaker.engineering.redhat.com/rpms/'] gpgcheck=False enabled=True reposdir=/etc/yum.repos.d unsafe_writes=False bandwidth=None cost=None deltarpm_metadata_percentage=None deltarpm_percentage=None enablegroups=None exclude=None failovermethod=None file=None gpgcakey=None gpgkey=None module_hotfixes=None http_caching=None include=None includepkgs=None ip_resolve=None keepalive=None keepcache=None metadata_expire=None metadata_expire_filter=None metalink=None mirrorlist=None mirrorlist_expire=None params=None password=NOT_LOGGING_PARAMETER priority=None protect=None proxy=None proxy_password=NOT_LOGGING_PARAMETER proxy_username=None repo_gpgcheck=None retries=None s3_enabled=None skip_if_unavailable=None sslcacert=None ssl_check_cert_permissions=None sslclientcert=None sslclientkey=None sslverify=None throttle=None timeout=None ui_repoid_vars=None username=None async=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:18:07 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5548]: ansible-user Invoked with name=root update_password=always password=NOT_LOGGING_PARAMETER state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on ip-10-31-42-228.us-east-1.aws.redhat.com uid=None group=None groups=None comment=None home=None shell=None login_class=None password_expire_max=None password_expire_min=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jun 22 13:18:07 ip-10-31-42-228.us-east-1.aws.redhat.com usermod[5551]: change user 'root' password Jun 22 13:18:08 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5574]: ansible-lineinfile Invoked with dest=/etc/ssh/sshd_config regexp=#?PasswordAuthentication (?:yes|no) line=PasswordAuthentication yes state=present path=/etc/ssh/sshd_config backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:18:08 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5591]: ansible-lineinfile Invoked with dest=/etc/ssh/sshd_config line=PermitRootLogin yes state=present path=/etc/ssh/sshd_config backrefs=False create=False backup=False firstmatch=False unsafe_writes=False regexp=None search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:18:08 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5608]: ansible-file Invoked with path=/var/lib/tft state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:18:08 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5625]: ansible-stat Invoked with path=/var/lib/tft/lib.sh follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 22 13:18:09 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5680]: ansible-ansible.legacy.stat Invoked with path=/var/lib/tft/lib.sh follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jun 22 13:18:09 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5730]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1719062288.932187-891-224615822936411/source dest=/var/lib/tft/lib.sh owner=root group=root mode=u=rx,go= follow=False _original_basename=lib.sh.j2 checksum=af779f2a149bc6695c9d2d1622342e81371886ab backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:18:09 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5764]: ansible-stat Invoked with path=/usr/local/bin/ci-extendtesttime.sh follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 22 13:18:10 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5819]: ansible-ansible.legacy.stat Invoked with path=/usr/local/bin/ci-extendtesttime.sh follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jun 22 13:18:10 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5869]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1719062289.8479855-929-184179805691830/source dest=/usr/local/bin/ci-extendtesttime.sh owner=root group=root mode=u=rx,go= follow=False _original_basename=ci-extendtesttime.sh.j2 checksum=2d0f6c296873c17e8b22c9490b000973b2a8a350 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:18:10 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5903]: ansible-stat Invoked with path=/usr/bin/extendtesttime.sh follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 22 13:18:10 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5920]: ansible-stat Invoked with path=/usr/local/bin/ci-return2pool.sh follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 22 13:18:11 ip-10-31-42-228.us-east-1.aws.redhat.com python3[5975]: ansible-ansible.legacy.stat Invoked with path=/usr/local/bin/ci-return2pool.sh follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jun 22 13:18:11 ip-10-31-42-228.us-east-1.aws.redhat.com python3[6025]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1719062290.8741663-984-49611414865962/source dest=/usr/local/bin/ci-return2pool.sh owner=root group=root mode=u=rx,go= follow=False _original_basename=ci-return2pool.sh.j2 checksum=f77cae3b2a729c60bcb0c01c82bf347f13a9b2c5 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:18:11 ip-10-31-42-228.us-east-1.aws.redhat.com python3[6059]: ansible-stat Invoked with path=/etc/motd follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 22 13:18:11 ip-10-31-42-228.us-east-1.aws.redhat.com python3[6116]: ansible-ansible.legacy.stat Invoked with path=/root/.guest-metadata.json follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jun 22 13:18:12 ip-10-31-42-228.us-east-1.aws.redhat.com python3[6166]: ansible-ansible.legacy.copy Invoked with dest=/root/.guest-metadata.json owner=root group=root mode=u=rw,go= src=/root/.ansible/tmp/ansible-tmp-1719062291.6706355-1032-188017947891869/source _original_basename=tmpmky0u4fw follow=False checksum=bf21a9e8fbc5a3846fb05b4fa0859e0917b2202f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:18:12 ip-10-31-42-228.us-east-1.aws.redhat.com python3[6200]: ansible-ansible.legacy.command Invoked with _raw_params=curl -sLI --connect-timeout 5 -w '%{response_code}' http://169.254.169.254/latest/meta-data/instance-id | grep ^200 _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:18:12 ip-10-31-42-228.us-east-1.aws.redhat.com python3[6220]: ansible-hostname Invoked with name=76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm use=None Jun 22 13:18:12 ip-10-31-42-228.us-east-1.aws.redhat.com dbus-broker-launch[571]: avc: op=setenforce lsm=selinux enforcing=1 res=1 Jun 22 13:18:12 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-journald[477]: Field hash table of /run/log/journal/d63c337a94f048b8ac03035012b6922a/system.journal has a fill level at 75.1 (250 of 333 items), suggesting rotation. Jun 22 13:18:12 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-journald[477]: /run/log/journal/d63c337a94f048b8ac03035012b6922a/system.journal: Journal header limits reached or header out-of-date, rotating. Jun 22 13:18:12 ip-10-31-42-228.us-east-1.aws.redhat.com rsyslogd[783]: imjournal: journal files changed, reloading... [v8.2310.0-4.el9 try https://www.rsyslog.com/e/0 ] Jun 22 13:18:12 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Starting Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 1210. Jun 22 13:18:13 ip-10-31-42-228.us-east-1.aws.redhat.com systemd[1]: Started Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 1210. Jun 22 13:18:13 ip-10-31-42-228.us-east-1.aws.redhat.com systemd-hostnamed[6225]: Changed pretty hostname to '76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm' Jun 22 13:18:13 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd-hostnamed[6225]: Hostname set to <76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm> (static) Jun 22 13:18:13 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm NetworkManager[612]: [1719062293.0395] hostname: static hostname changed from "ip-10-31-42-228.us-east-1.aws.redhat.com" to "76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm" Jun 22 13:18:13 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[1]: Starting Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 1274. Jun 22 13:18:13 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[1]: Started Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 1274. Jun 22 13:18:13 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6254]: ansible-ansible.legacy.command Invoked with _raw_params=ping -4 -c1 -W10 $(hostname) _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:18:13 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6273]: ansible-file Invoked with path=/var/log/messages state=touch recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:18:14 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6290]: ansible-ansible.legacy.command Invoked with _raw_params=setsebool nis_enabled on _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:18:14 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm setsebool[6291]: The nis_enabled policy boolean was changed to on by root Jun 22 13:18:14 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6308]: ansible-stat Invoked with path=/usr/bin/rstrnt-package follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 22 13:18:14 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6327]: ansible-ansible.legacy.command Invoked with _raw_params=sed -e 's/rpm -q $package/rpm -q --whatprovides $package/' -i /usr/bin/rstrnt-package _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:18:14 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6345]: ansible-stat Invoked with path=/var/lib/restraint follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 22 13:18:15 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6364]: ansible-file Invoked with path=/var/lib/restraint/avc_since state=touch recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:18:15 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6381]: ansible-stat Invoked with path=/usr/share/beakerlib/beakerlib.sh follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 22 13:18:15 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6400]: ansible-file Invoked with dest=/usr/lib/beakerlib state=directory path=/usr/lib/beakerlib recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:18:15 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6417]: ansible-file Invoked with dest=/usr/share/rhts-library state=directory path=/usr/share/rhts-library recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:18:16 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6434]: ansible-file Invoked with src=/usr/share/beakerlib/beakerlib.sh dest=/usr/lib/beakerlib/beakerlib.sh state=link path=/usr/lib/beakerlib/beakerlib.sh recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:18:16 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6451]: ansible-file Invoked with src=/usr/share/beakerlib/beakerlib.sh dest=/usr/share/rhts-library/rhtslib.sh state=link path=/usr/share/rhts-library/rhtslib.sh recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:18:16 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6468]: ansible-ansible.legacy.command Invoked with _raw_params=mv /var/log/audit/audit.log /var/log/audit/audit.log.bak _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:18:16 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6486]: ansible-ansible.legacy.command Invoked with _raw_params=/usr/sbin/service auditd restart _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:18:16 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm auditd[527]: The audit daemon is exiting. Jun 22 13:18:16 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm kernel: audit: type=1305 audit(1719062296.774:716): op=set audit_pid=0 old=527 auid=4294967295 ses=4294967295 subj=system_u:system_r:auditd_t:s0 res=1 Jun 22 13:18:16 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[1]: auditd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit auditd.service has successfully entered the 'dead' state. Jun 22 13:18:16 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm kernel: audit: type=1131 audit(1719062296.784:717): pid=1 uid=0 auid=4294967295 ses=4294967295 subj=system_u:system_r:init_t:s0 msg='unit=auditd comm="systemd" exe="/usr/lib/systemd/systemd" hostname=? addr=? terminal=? res=success' Jun 22 13:18:16 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm dbus-broker-launch[571]: avc: op=load_policy lsm=selinux seqno=2 res=1 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[1]: Starting Security Auditing Service... ░░ Subject: A start job for unit auditd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has begun execution. ░░ ░░ The job identifier is 1338. Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm auditd[6502]: No plugins found, not dispatching events Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm auditd[6502]: Init complete, auditd 3.1.2 listening for events (startup state enable) Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6505]: /sbin/augenrules: No change Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: No rules Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: enabled 1 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: failure 1 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: pid 6502 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: rate_limit 0 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: backlog_limit 8192 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: lost 0 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: backlog 0 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: backlog_wait_time 60000 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: backlog_wait_time_actual 0 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: enabled 1 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: failure 1 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: pid 6502 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: rate_limit 0 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: backlog_limit 8192 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: lost 0 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: backlog 4 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: backlog_wait_time 60000 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: backlog_wait_time_actual 0 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: enabled 1 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: failure 1 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: pid 6502 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: rate_limit 0 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: backlog_limit 8192 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: lost 0 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: backlog 0 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: backlog_wait_time 60000 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm augenrules[6515]: backlog_wait_time_actual 0 Jun 22 13:18:18 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[1]: Started Security Auditing Service. ░░ Subject: A start job for unit auditd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has finished successfully. ░░ ░░ The job identifier is 1338. Jun 22 13:18:19 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6538]: ansible-stat Invoked with path=/etc/NetworkManager/conf.d follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 22 13:18:19 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6595]: ansible-ansible.legacy.stat Invoked with path=/etc/NetworkManager/conf.d/99-zzz-tft-workaround-dns-default.conf follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jun 22 13:18:19 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6645]: ansible-ansible.legacy.copy Invoked with dest=/etc/NetworkManager/conf.d/99-zzz-tft-workaround-dns-default.conf src=/root/.ansible/tmp/ansible-tmp-1719062299.0808246-1295-47768246814085/source _original_basename=tmpl2uajfw5 follow=False checksum=0145f3ae57eef5aa08bbb678fedbb3edd001cd2d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:18:20 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6679]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:18:20 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6710]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:18:20 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6728]: ansible-ansible.legacy.command Invoked with _raw_params=dnf config-manager --set-enabled highavailability _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:18:21 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6746]: ansible-ansible.legacy.command Invoked with _raw_params=dnf clean all --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:18:22 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[6764]: ansible-ansible.legacy.command Invoked with _raw_params=dnf makecache --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:18:23 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jun 22 13:18:36 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[1]: systemd-timedated.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-timedated.service has successfully entered the 'dead' state. Jun 22 13:18:43 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Jun 22 13:19:23 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7000]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jun 22 13:19:24 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7041]: ansible-service_facts Invoked Jun 22 13:19:26 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7142]: ansible-ansible.legacy.command Invoked with _raw_params=rpm -qa _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:19:27 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7160]: ansible-ansible.legacy.command Invoked with _raw_params=if [ $(find /etc/yum.repos.d/ -name 'epel.repo' | wc -l ) -gt 0 ]; then dnf config-manager --set-disabled epel; fi _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:19:28 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7182]: ansible-ansible.legacy.command Invoked with _raw_params=dnf clean all --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:19:28 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7200]: ansible-ansible.legacy.command Invoked with _raw_params=dnf makecache --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:20:30 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7432]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jun 22 13:20:31 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7473]: ansible-service_facts Invoked Jun 22 13:20:33 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7574]: ansible-ansible.legacy.command Invoked with _raw_params=rpm -qa _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:20:35 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7627]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jun 22 13:20:35 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7668]: ansible-ansible.legacy.dnf Invoked with name=['ca-certificates'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jun 22 13:20:37 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7686]: ansible-ansible.legacy.dnf Invoked with name=['curl'] state=latest allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jun 22 13:20:38 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7704]: ansible-ansible.legacy.command Invoked with _raw_params=curl -skL -o /etc/pki/ca-trust/source/anchors/Current-IT-Root-CAs.pem https://certs.corp.redhat.com/certs/Current-IT-Root-CAs.pem _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:20:38 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7723]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c 'update-ca-trust' _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:20:40 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7747]: ansible-ansible.legacy.command Invoked with _raw_params=bash -c 'update-ca-trust extract' _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:20:41 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7771]: ansible-ansible.legacy.command Invoked with _raw_params=curl -v https://gitlab.cee.redhat.com _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:20:41 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7790]: ansible-ansible.legacy.command Invoked with _raw_params=curl -v https://beaker.engineering.redhat.com/ _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:20:42 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7809]: ansible-service_facts Invoked Jun 22 13:20:44 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7910]: ansible-ansible.legacy.command Invoked with _raw_params=rpm -qa _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:20:45 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[7928]: ansible-service_facts Invoked Jun 22 13:20:46 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[8029]: ansible-ansible.legacy.command Invoked with _raw_params=rpm -qa _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:20:47 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[8047]: ansible-ansible.legacy.command Invoked with _raw_params=for repofile in $(ls -1 /etc/yum.repos.d/); do echo "# vvvvv $repofile vvvvv vvvvv"; cat "/etc/yum.repos.d/$repofile"; echo "# ^^^^^ ^^^^^ ^^^^^ ^^^^^"; echo ""; done _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:20:47 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[8078]: ansible-ansible.legacy.command Invoked with _raw_params=yum -q repolist _uses_shell=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:20:48 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[8096]: ansible-ansible.legacy.command Invoked with _raw_params=dnf clean all --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:20:48 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[8114]: ansible-ansible.legacy.command Invoked with _raw_params=dnf makecache --enablerepo=* _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 22 13:21:49 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[8346]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jun 22 13:21:50 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[8389]: ansible-service_facts Invoked Jun 22 13:21:52 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[8490]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 22 13:21:52 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[8507]: ansible-ansible.legacy.dnf Invoked with name=['pcp-pmda-bpftrace', 'bpftrace'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jun 22 13:21:52 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[4081]: Created slice User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jun 22 13:21:52 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[4081]: Starting Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jun 22 13:21:52 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[4081]: Finished Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jun 22 13:22:24 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm kernel: SELinux: Converting 387 SID table entries... Jun 22 13:22:24 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm kernel: SELinux: policy capability network_peer_controls=1 Jun 22 13:22:24 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm kernel: SELinux: policy capability open_perms=1 Jun 22 13:22:24 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm kernel: SELinux: policy capability extended_socket_class=1 Jun 22 13:22:24 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm kernel: SELinux: policy capability always_check_network=0 Jun 22 13:22:24 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm kernel: SELinux: policy capability cgroup_seclabel=1 Jun 22 13:22:24 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm kernel: SELinux: policy capability nnp_nosuid_transition=1 Jun 22 13:22:24 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jun 22 13:22:25 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm dbus-broker-launch[571]: avc: op=load_policy lsm=selinux seqno=3 res=1 Jun 22 13:22:26 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm dbus-broker-launch[570]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jun 22 13:22:26 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm dbus-broker-launch[570]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jun 22 13:22:33 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r97b650a7bfa04c9bb5623328761e757f.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r97b650a7bfa04c9bb5623328761e757f.service has finished successfully. ░░ ░░ The job identifier is 1344. Jun 22 13:22:33 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1407. Jun 22 13:22:33 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[1]: Reloading. Jun 22 13:22:33 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd-rc-local-generator[9193]: /etc/rc.d/rc.local is not marked executable, skipping. Jun 22 13:22:33 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[1]: Queuing reload/restart jobs for marked units… Jun 22 13:22:37 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jun 22 13:22:37 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1407. Jun 22 13:22:37 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm systemd[1]: run-r97b650a7bfa04c9bb5623328761e757f.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r97b650a7bfa04c9bb5623328761e757f.service has successfully entered the 'dead' state. Jun 22 13:22:38 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[10706]: ansible-file Invoked with path=/etc/pcp/bpftrace state=directory mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:22:38 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[10785]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/bpftrace/bpftrace.conf follow=True get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 22 13:22:39 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[10861]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1719062558.350349-2590-246733738969920/source dest=/etc/pcp/bpftrace/bpftrace.conf mode=0600 follow=True _original_basename=bpftrace.conf.j2 checksum=2559785fc812966eeb50dc5f6f22139195666360 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 22 13:22:39 76a7b925-80b2-4387-916d-d937c72bdb25.testing-farm python3[10911]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex echo '##################' echo List of SELinux AVCs - note list may be empty grep type=AVC /var/log/audit/audit.log echo '##################' ls -alrtF /run if [ -d /run/pcp ]; then ls -alrtF /run/pcp else echo ERROR - /run/pcp does not exist fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None ################## List of SELinux AVCs - note list may be empty ################## total 40 dr-xr-xr-x. 18 root root 235 Jun 20 10:25 ../ drwxr-xr-x. 2 root root 60 Jun 22 13:13 tmpfiles.d/ drwxr-xr-x. 3 root root 60 Jun 22 13:13 log/ drwxr-xr-x. 2 root root 40 Jun 22 13:13 mount/ drwxr-xr-x. 4 root root 100 Jun 22 13:14 initramfs/ -r--r--r--. 1 root root 33 Jun 22 13:14 machine-id srw-rw-rw-. 1 root root 0 Jun 22 13:14 rpcbind.sock= prw-------. 1 root root 0 Jun 22 13:14 initctl| drwxr-xr-x. 5 root root 100 Jun 22 13:14 credentials/ drwx------. 2 root root 40 Jun 22 13:14 cryptsetup/ drwxr-xr-x. 2 root root 40 Jun 22 13:14 setrans/ drwxr-xr-x. 2 root root 40 Jun 22 13:14 sepermit/ drwxr-xr-x. 2 root root 40 Jun 22 13:14 motd.d/ drwxr-xr-x. 2 root root 40 Jun 22 13:14 faillock/ drwxr-xr-x. 2 root root 40 Jun 22 13:14 console/ -rw-r--r--. 1 root root 0 Jun 22 13:14 motd drwx--x--x. 3 root root 60 Jun 22 13:14 sudo/ drwxr-xr-x. 3 root root 60 Jun 22 13:14 tpm2-tss/ drwx------. 2 rpc rpc 60 Jun 22 13:14 rpcbind/ drwxr-xr-x. 2 root root 60 Jun 22 13:14 dbus/ srw-rw-rw-. 1 root root 0 Jun 22 13:14 .heim_org.h5l.kcm-socket= drwxr-xr-x. 2 root root 60 Jun 22 13:14 irqbalance/ -rw-r--r--. 1 root root 4 Jun 22 13:14 dhclient.pid -rw-r--r--. 1 root root 619 Jun 22 13:14 dhclient.lease -rw-------. 1 root root 4 Jun 22 13:14 gssproxy.pid srw-rw-rw-. 1 root root 0 Jun 22 13:14 gssproxy.sock= drwxr-xr-x. 2 root root 60 Jun 22 13:14 chrony-dhcp/ -rw-------. 1 root root 4 Jun 22 13:14 sm-notify.pid drwxr-xr-x. 3 root root 80 Jun 22 13:14 lock/ -rw-------. 1 root root 3 Jun 22 13:14 rsyslogd.pid -rw-r--r--. 1 root root 4 Jun 22 13:14 sshd.pid -rw-r--r--. 1 root root 4 Jun 22 13:14 crond.pid ----------. 1 root root 0 Jun 22 13:14 cron.reboot drwx------. 3 root root 340 Jun 22 13:14 cloud-init/ drwxr-xr-x. 2 root root 80 Jun 22 13:14 blkid/ -rw-------. 1 root root 0 Jun 22 13:14 agetty.reload drwxr-xr-x. 3 root root 60 Jun 22 13:15 user/ drwxr-xr-x. 7 root root 160 Jun 22 13:16 udev/ -rw-rw-r--. 1 root utmp 1920 Jun 22 13:16 utmp drwxr-x---. 2 chrony chrony 80 Jun 22 13:18 chrony/ drwxr-xr-x. 6 root root 160 Jun 22 13:18 NetworkManager/ -rw-r--r--. 1 root root 5 Jun 22 13:18 auditd.pid drwxrwxr-x. 2 pcp pcp 40 Jun 22 13:22 pcp/ drwxr-xr-x. 28 root root 900 Jun 22 13:22 ./ drwxr-xr-x. 21 root root 540 Jun 22 13:22 systemd/ total 0 drwxr-xr-x. 28 root root 900 Jun 22 13:22 ../ drwxrwxr-x. 2 pcp pcp 40 Jun 22 13:22 ./ TASK [Reraise error] *********************************************************** task path: /WORKDIR/git-weekly-cibzvipf1b/tests/handle_test_failure.yml:17 Saturday 22 June 2024 13:22:39 +0000 (0:00:00.401) 0:00:50.462 ********* fatal: [sut]: FAILED! => { "changed": false } MSG: {'failed': True, 'msg': "'dict object' has no attribute 'ansible_architecture'. 'dict object' has no attribute 'ansible_architecture'", '_ansible_no_log': False} TASK [Get final state of services] ********************************************* task path: /WORKDIR/git-weekly-cibzvipf1b/tests/restore_services_state.yml:3 Saturday 22 June 2024 13:22:39 +0000 (0:00:00.012) 0:00:50.475 ********* ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcp-reboot-init.service": { "name": "pcp-reboot-init.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pmcd.service": { "name": "pmcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pmfind.service": { "name": "pmfind.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pmie.service": { "name": "pmie.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pmie_check.service": { "name": "pmie_check.service", "source": "systemd", "state": "inactive", "status": "static" }, "pmie_daily.service": { "name": "pmie_daily.service", "source": "systemd", "state": "inactive", "status": "static" }, "pmie_farm.service": { "name": "pmie_farm.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pmie_farm_check.service": { "name": "pmie_farm_check.service", "source": "systemd", "state": "inactive", "status": "static" }, "pmlogger.service": { "name": "pmlogger.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pmlogger_check.service": { "name": "pmlogger_check.service", "source": "systemd", "state": "inactive", "status": "static" }, "pmlogger_daily.service": { "name": "pmlogger_daily.service", "source": "systemd", "state": "inactive", "status": "static" }, "pmlogger_farm.service": { "name": "pmlogger_farm.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pmlogger_farm_check.service": { "name": "pmlogger_farm_check.service", "source": "systemd", "state": "inactive", "status": "static" }, "pmproxy.service": { "name": "pmproxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles.service": { "name": "systemd-tmpfiles.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [Restore state of services] *********************************************** task path: /WORKDIR/git-weekly-cibzvipf1b/tests/restore_services_state.yml:9 Saturday 22 June 2024 13:22:41 +0000 (0:00:01.782) 0:00:52.258 ********* skipping: [sut] => (item=pmcd) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in initial_state.ansible_facts.services", "item": "pmcd", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=pmlogger) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in initial_state.ansible_facts.services", "item": "pmlogger", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=pmie) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in initial_state.ansible_facts.services", "item": "pmie", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=pmproxy) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in initial_state.ansible_facts.services", "item": "pmproxy", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=redis) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "redis", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=grafana-server) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "grafana-server", "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped to retry, use: --limit @/tmp/tests_bz1855544.retry PLAY RECAP ********************************************************************* sut : ok=17 changed=2 unreachable=0 failed=1 skipped=8 rescued=1 ignored=0 Saturday 22 June 2024 13:22:41 +0000 (0:00:00.035) 0:00:52.293 ********* =============================================================================== fedora.linux_system_roles.private_metrics_subrole_bpftrace : Install needed bpftrace metrics packages -- 45.75s /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:41 Get final state of services --------------------------------------------- 1.78s /WORKDIR/git-weekly-cibzvipf1b/tests/restore_services_state.yml:3 ------------- Get initial state of services ------------------------------------------- 1.54s /WORKDIR/git-weekly-cibzvipf1b/tests/get_services_state.yml:3 ----------------- fedora.linux_system_roles.private_metrics_subrole_bpftrace : Ensure PCP bpftrace agent is configured --- 0.91s /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:61 Gathering Facts --------------------------------------------------------- 0.88s /WORKDIR/git-weekly-cibzvipf1b/tests/tests_bz1855544.yml:9 -------------------- Collect logs ------------------------------------------------------------ 0.40s /WORKDIR/git-weekly-cibzvipf1b/tests/handle_test_failure.yml:2 ---------------- fedora.linux_system_roles.private_metrics_subrole_bpftrace : Ensure PCP bpftrace configuration directory exists --- 0.35s /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:54 fedora.linux_system_roles.private_metrics_subrole_bpftrace : Check if system is ostree --- 0.25s /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:18 fedora.linux_system_roles.private_metrics_subrole_bpftrace : Set platform/version specific variables --- 0.04s /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:4 Restore state of services ----------------------------------------------- 0.04s /WORKDIR/git-weekly-cibzvipf1b/tests/restore_services_state.yml:9 ------------- Setup metric querying service. ------------------------------------------ 0.03s /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:75 Setup bpftrace metrics. ------------------------------------------------- 0.03s /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:66 Run the role ------------------------------------------------------------ 0.02s /WORKDIR/git-weekly-cibzvipf1b/tests/tests_bz1855544.yml:27 ------------------- fedora.linux_system_roles.private_metrics_subrole_bpftrace : Extract allowed bpftrace user accounts --- 0.02s /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:48 fedora.linux_system_roles.private_metrics_subrole_bpftrace : Set flag to indicate system is ostree --- 0.02s /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:23 fedora.linux_system_roles.private_metrics_subrole_bpftrace : Establish bpftrace package names --- 0.02s /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:27 Configure Elasticsearch metrics ----------------------------------------- 0.02s /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:35 fedora.linux_system_roles.private_metrics_subrole_redis : Set platform/version specific variables --- 0.02s /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_redis/tasks/main.yml:4 fedora.linux_system_roles.private_metrics_subrole_bpftrace : Establish bpftrace metrics package names --- 0.02s /WORKDIR/git-weekly-cibzvipf1b/.collection/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:34 Handle test failure ----------------------------------------------------- 0.02s /WORKDIR/git-weekly-cibzvipf1b/tests/tests_bz1855544.yml:47 ------------------- ---^---^---^---^---^--- # STDERR: ---v---v---v---v---v--- [DEPRECATION WARNING]: ANSIBLE_COLLECTIONS_PATHS option, does not fit var naming standard, use the singular form ANSIBLE_COLLECTIONS_PATH instead. This feature will be removed from ansible-core in version 2.19. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. ---^---^---^---^---^---