# STDOUT: ---v---v---v---v---v--- ansible-playbook [core 2.16.0] config file = /etc/ansible/ansible.cfg configured module search path = ['/home/jenkins/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /opt/ansible-2.16/lib/python3.11/site-packages/ansible ansible collection location = /WORKDIR/git-weekly-ci1ebenttp/.collection executable location = /opt/ansible-2.16/bin/ansible-playbook python version = 3.11.5 (main, Sep 7 2023, 00:00:00) [GCC 11.4.1 20230605 (Red Hat 11.4.1-2)] (/opt/ansible-2.16/bin/python) jinja version = 3.1.2 libyaml = True Using /etc/ansible/ansible.cfg as config file Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_cluster_destroy.yml ******************************************** 2 plays in /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cluster_destroy.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cluster_destroy.yml:4 Saturday 25 May 2024 11:53:24 +0000 (0:00:00.012) 0:00:00.012 ********** ok: [sut] => { "ansible_facts": { "ha_cluster_hacluster_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n31303833633366333561656439323930303361333161363239346166656537323933313436\n3432386236656563343237306335323637396239616230353561330a313731623238393238\n62343064666336643930663239383936616465643134646536656532323461356237646133\n3761616633323839633232353637366266350a313163633236376666653238633435306565\n3264623032333736393535663833\n" } }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-ci1ebenttp/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Deconfigure cluster] ***************************************************** TASK [Gathering Facts] ********************************************************* task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cluster_destroy.yml:9 Saturday 25 May 2024 11:53:24 +0000 (0:00:00.011) 0:00:00.024 ********** ok: [sut] TASK [Set up test environment] ************************************************* task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cluster_destroy.yml:18 Saturday 25 May 2024 11:53:25 +0000 (0:00:00.863) 0:00:00.888 ********** TASK [fedora.linux_system_roles.ha_cluster : Set node name to 'localhost' for single-node clusters] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:9 Saturday 25 May 2024 11:53:25 +0000 (0:00:00.016) 0:00:00.905 ********** ok: [sut] => { "ansible_facts": { "inventory_hostname": "localhost" }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Ensure facts used by tests] ******* task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:14 Saturday 25 May 2024 11:53:25 +0000 (0:00:00.019) 0:00:00.924 ********** skipping: [sut] => { "changed": false, "false_condition": "'distribution' not in ansible_facts", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Check if system is ostree] ******** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:22 Saturday 25 May 2024 11:53:25 +0000 (0:00:00.009) 0:00:00.933 ********** ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.ha_cluster : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:27 Saturday 25 May 2024 11:53:26 +0000 (0:00:00.246) 0:00:01.180 ********** ok: [sut] => { "ansible_facts": { "__ha_cluster_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Do not try to enable RHEL repositories] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:32 Saturday 25 May 2024 11:53:26 +0000 (0:00:00.014) 0:00:01.194 ********** skipping: [sut] => { "changed": false, "false_condition": "ansible_distribution == 'RedHat'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Copy nss-altfiles ha_cluster users to /etc/passwd] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:41 Saturday 25 May 2024 11:53:26 +0000 (0:00:00.008) 0:00:01.203 ********** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_is_ostree | d(false)", "skip_reason": "Conditional result was False" } TASK [Run HA Cluster role] ***************************************************** task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cluster_destroy.yml:23 Saturday 25 May 2024 11:53:26 +0000 (0:00:00.017) 0:00:01.221 ********** TASK [fedora.linux_system_roles.ha_cluster : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:3 Saturday 25 May 2024 11:53:26 +0000 (0:00:00.023) 0:00:01.244 ********** included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:2 Saturday 25 May 2024 11:53:26 +0000 (0:00:00.012) 0:00:01.257 ********** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Check if system is ostree] ******** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:10 Saturday 25 May 2024 11:53:26 +0000 (0:00:00.018) 0:00:01.276 ********** skipping: [sut] => { "changed": false, "false_condition": "not __ha_cluster_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:15 Saturday 25 May 2024 11:53:26 +0000 (0:00:00.011) 0:00:01.287 ********** skipping: [sut] => { "changed": false, "false_condition": "not __ha_cluster_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:19 Saturday 25 May 2024 11:53:26 +0000 (0:00:00.010) 0:00:01.298 ********** ok: [sut] => (item=RedHat.yml) => { "ansible_facts": { "__ha_cluster_cloud_agents_packages": [], "__ha_cluster_fence_agent_packages_default": "{{ ['fence-agents-all'] + (['fence-virt'] if ansible_architecture == 'x86_64' else []) }}", "__ha_cluster_fullstack_node_packages": [ "corosync", "libknet1-plugins-all", "resource-agents", "pacemaker", "openssl" ], "__ha_cluster_pcs_provider": "pcs-0.10", "__ha_cluster_qdevice_node_packages": [ "corosync-qdevice", "bash", "coreutils", "curl", "grep", "nss-tools", "openssl", "sed" ], "__ha_cluster_repos": [], "__ha_cluster_role_essential_packages": [ "pcs", "corosync-qnetd" ], "__ha_cluster_sbd_packages": [ "sbd" ], "__ha_cluster_services": [ "corosync", "corosync-qdevice", "pacemaker" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__ha_cluster_cloud_agents_packages": [ "resource-agents-aliyun", "resource-agents-gcp", "fence-agents-aliyun", "fence-agents-aws", "fence-agents-azure-arm", "fence-agents-gce" ], "__ha_cluster_repos": [ { "id": "ha", "name": "HighAvailability" }, { "id": "resilientstorage", "name": "ResilientStorage" } ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__ha_cluster_cloud_agents_packages": [ "resource-agents-aliyun", "resource-agents-gcp", "fence-agents-aliyun", "fence-agents-aws", "fence-agents-azure-arm", "fence-agents-gce" ], "__ha_cluster_repos": [ { "id": "ha", "name": "HighAvailability" }, { "id": "resilientstorage", "name": "ResilientStorage" } ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.ha_cluster : Set Linux Pacemaker shell specific variables] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:34 Saturday 25 May 2024 11:53:26 +0000 (0:00:00.027) 0:00:01.325 ********** ok: [sut] => { "ansible_facts": {}, "ansible_included_var_files": [ "/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/shell_pcs.yml" ], "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Enable package repositories] ****** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:6 Saturday 25 May 2024 11:53:26 +0000 (0:00:00.011) 0:00:01.337 ********** included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-package-repositories.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Find platform/version specific tasks to enable repositories] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-package-repositories.yml:3 Saturday 25 May 2024 11:53:26 +0000 (0:00:00.013) 0:00:01.350 ********** ok: [sut] => (item=RedHat.yml) => { "ansible_facts": { "__ha_cluster_enable_repo_tasks_file": "/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/RedHat.yml" }, "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } ok: [sut] => (item=CentOS.yml) => { "ansible_facts": { "__ha_cluster_enable_repo_tasks_file": "/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/CentOS.yml" }, "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml" } skipping: [sut] => (item=CentOS_8.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__ha_cluster_enable_repo_tasks_file_candidate is file", "item": "CentOS_8.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS_8.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__ha_cluster_enable_repo_tasks_file_candidate is file", "item": "CentOS_8.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Run platform/version specific tasks to enable repositories] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-package-repositories.yml:21 Saturday 25 May 2024 11:53:26 +0000 (0:00:00.025) 0:00:01.376 ********** included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/CentOS.yml for sut TASK [fedora.linux_system_roles.ha_cluster : List active CentOS repositories] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/CentOS.yml:3 Saturday 25 May 2024 11:53:26 +0000 (0:00:00.024) 0:00:01.400 ********** ok: [sut] => { "changed": false, "cmd": [ "dnf", "repolist" ], "delta": "0:00:00.254415", "end": "2024-05-25 11:53:26.850338", "rc": 0, "start": "2024-05-25 11:53:26.595923" } STDOUT: repo id repo name appstream CentOS Stream 8 - AppStream baseos CentOS Stream 8 - BaseOS beaker-client Beaker Client - RedHatEnterpriseLinux8 beaker-harness Beaker harness beaker-tasks Beaker tasks beakerlib-libraries Copr repo for beakerlib-libraries owned by bgoncalv copr:copr.devel.redhat.com:lpol:qa-tools Copr repo for qa-tools owned by lpol extras CentOS Stream 8 - Extras extras-common CentOS Stream 8 - Extras common packages ha CentOS Stream 8 - HighAvailability TASK [fedora.linux_system_roles.ha_cluster : Enable CentOS repositories] ******* task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/CentOS.yml:10 Saturday 25 May 2024 11:53:26 +0000 (0:00:00.500) 0:00:01.900 ********** skipping: [sut] => (item={'id': 'ha', 'name': 'HighAvailability'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item.id not in __ha_cluster_repolist.stdout", "item": { "id": "ha", "name": "HighAvailability" }, "skip_reason": "Conditional result was False" } skipping: [sut] => (item={'id': 'resilientstorage', 'name': 'ResilientStorage'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item.name != \"ResilientStorage\" or ha_cluster_enable_repos_resilient_storage", "item": { "id": "resilientstorage", "name": "ResilientStorage" }, "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.ha_cluster : Install role essential packages] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:11 Saturday 25 May 2024 11:53:26 +0000 (0:00:00.012) 0:00:01.913 ********** changed: [sut] => { "changed": true, "rc": 0, "results": [ "Installed: libaio-0.3.112-1.el8.x86_64", "Installed: rubygem-json-2.1.0-111.module_el8+475+35a6c697.x86_64", "Installed: bzip2-1.0.6-26.el8.x86_64", "Installed: corosync-3.1.8-1.el8.x86_64", "Installed: libqb-1.0.3-13.el8.x86_64", "Installed: corosync-qnetd-3.0.2-2.el8.x86_64", "Installed: samba-client-libs-4.19.4-4.el8.x86_64", "Installed: samba-common-4.19.4-4.el8.noarch", "Installed: device-mapper-event-8:1.02.181-14.el8.x86_64", "Installed: libicu-60.3-2.el8_1.x86_64", "Installed: device-mapper-event-libs-8:1.02.181-14.el8.x86_64", "Installed: samba-common-libs-4.19.4-4.el8.x86_64", "Installed: pcs-0.10.18-2.el8.x86_64", "Installed: ruby-irb-2.5.9-111.module_el8+475+35a6c697.noarch", "Installed: python3-clufter-0.77.1-5.el8.noarch", "Installed: lvm2-8:2.03.14-14.el8.x86_64", "Installed: ruby-2.5.9-111.module_el8+475+35a6c697.x86_64", "Installed: rubygem-psych-3.0.2-111.module_el8+475+35a6c697.x86_64", "Installed: ruby-libs-2.5.9-111.module_el8+475+35a6c697.x86_64", "Installed: cifs-utils-7.0-1.el8.x86_64", "Installed: libknet1-1.28-1.el8.x86_64", "Installed: lvm2-libs-8:2.03.14-14.el8.x86_64", "Installed: rubygems-2.7.6.3-111.module_el8+475+35a6c697.noarch", "Installed: pacemaker-2.1.7-5.el8.x86_64", "Installed: resource-agents-4.9.0-54.el8.x86_64", "Installed: rubygem-did_you_mean-1.2.0-111.module_el8+475+35a6c697.noarch", "Installed: libknet1-compress-bzip2-plugin-1.28-1.el8.x86_64", "Installed: device-mapper-persistent-data-0.9.0-7.el8.x86_64", "Installed: libknet1-compress-lz4-plugin-1.28-1.el8.x86_64", "Installed: libknet1-compress-lzma-plugin-1.28-1.el8.x86_64", "Installed: rubygem-io-console-0.4.6-111.module_el8+475+35a6c697.x86_64", "Installed: net-snmp-libs-1:5.8-30.el8.x86_64", "Installed: net-tools-2.0-0.52.20160912git.el8.x86_64", "Installed: python3-pycurl-7.43.0.2-4.el8.x86_64", "Installed: libknet1-compress-lzo2-plugin-1.28-1.el8.x86_64", "Installed: libknet1-compress-plugins-all-1.28-1.el8.x86_64", "Installed: centos-logos-85.8-2.el8.x86_64", "Installed: overpass-fonts-3.0.2-3.el8.noarch", "Installed: libknet1-compress-zlib-plugin-1.28-1.el8.x86_64", "Installed: libknet1-crypto-nss-plugin-1.28-1.el8.x86_64", "Installed: libwbclient-4.19.4-4.el8.x86_64", "Installed: libknet1-crypto-openssl-plugin-1.28-1.el8.x86_64", "Installed: python3-pyparsing-2.1.10-7.el8.noarch", "Installed: pacemaker-cluster-libs-2.1.7-5.el8.x86_64", "Installed: libknet1-crypto-plugins-all-1.28-1.el8.x86_64", "Installed: fontpackages-filesystem-1.44-22.el8.noarch", "Installed: libknet1-plugins-all-1.28-1.el8.x86_64", "Installed: pacemaker-libs-2.1.7-5.el8.x86_64", "Installed: perl-TimeDate-1:2.30-15.module_el8+336+32327ac4.noarch", "Installed: pacemaker-schemas-2.1.7-5.el8.noarch", "Installed: liberation-fonts-common-1:2.00.3-7.el8.noarch", "Installed: rubygem-openssl-2.1.2-111.module_el8+475+35a6c697.x86_64", "Installed: nss-tools-3.90.0-7.el8.x86_64", "Installed: rubygem-bigdecimal-1.3.4-111.module_el8+475+35a6c697.x86_64", "Installed: rubygem-rdoc-6.0.1.1-111.module_el8+475+35a6c697.noarch", "Installed: liberation-sans-fonts-1:2.00.3-7.el8.noarch", "Installed: libnozzle1-1.28-1.el8.x86_64", "Installed: corosynclib-3.1.8-1.el8.x86_64", "Installed: clufter-bin-0.77.1-5.el8.x86_64", "Installed: clufter-common-0.77.1-5.el8.noarch", "Installed: pacemaker-cli-2.1.7-5.el8.x86_64" ] } lsrpackages: corosync-qnetd pcs TASK [fedora.linux_system_roles.ha_cluster : Check and prepare role variables] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:17 Saturday 25 May 2024 11:53:45 +0000 (0:00:19.021) 0:00:20.935 ********** included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Discover cluster node names] ****** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:3 Saturday 25 May 2024 11:53:45 +0000 (0:00:00.021) 0:00:20.957 ********** ok: [sut] => { "ansible_facts": { "__ha_cluster_node_name": "localhost" }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Collect cluster node names] ******* task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:7 Saturday 25 May 2024 11:53:45 +0000 (0:00:00.020) 0:00:20.977 ********** ok: [sut] => { "ansible_facts": { "__ha_cluster_all_node_names": [ "localhost" ] }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fail if ha_cluster_node_options contains unknown or duplicate nodes] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:16 Saturday 25 May 2024 11:53:45 +0000 (0:00:00.022) 0:00:20.999 ********** skipping: [sut] => { "changed": false, "false_condition": "(\n __nodes_from_options != (__nodes_from_options | unique)\n) or (\n __nodes_from_options | difference(__ha_cluster_all_node_names)\n)\n", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Extract node options] ************* task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:30 Saturday 25 May 2024 11:53:45 +0000 (0:00:00.018) 0:00:21.018 ********** ok: [sut] => { "ansible_facts": { "__ha_cluster_local_node": {} }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fail if passwords are not specified] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:43 Saturday 25 May 2024 11:53:46 +0000 (0:00:00.022) 0:00:21.040 ********** skipping: [sut] => (item=ha_cluster_hacluster_password) => { "ansible_loop_var": "item", "changed": false, "false_condition": "lookup(\"vars\", item, default=\"\") | string | length < 1", "item": "ha_cluster_hacluster_password", "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.ha_cluster : Fail if nodes do not have the same number of SBD devices specified] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:53 Saturday 25 May 2024 11:53:46 +0000 (0:00:00.028) 0:00:21.069 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_cluster_present", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fail if configuring qnetd on a cluster node] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:69 Saturday 25 May 2024 11:53:46 +0000 (0:00:00.008) 0:00:21.077 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_cluster_present", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fail if no valid level is specified for a fencing level] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:79 Saturday 25 May 2024 11:53:46 +0000 (0:00:00.007) 0:00:21.085 ********** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Fail if no target is specified for a fencing level] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:87 Saturday 25 May 2024 11:53:46 +0000 (0:00:00.007) 0:00:21.093 ********** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Extract qdevice settings] ********* task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:101 Saturday 25 May 2024 11:53:46 +0000 (0:00:00.007) 0:00:21.101 ********** ok: [sut] => { "ansible_facts": { "__ha_cluster_qdevice_host": "", "__ha_cluster_qdevice_in_use": false, "__ha_cluster_qdevice_model": "", "__ha_cluster_qdevice_pcs_address": "" }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Figure out if ATB needs to be enabled for SBD] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:110 Saturday 25 May 2024 11:53:46 +0000 (0:00:00.024) 0:00:21.126 ********** ok: [sut] => { "ansible_facts": { "__ha_cluster_sbd_needs_atb": false }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fail if SBD needs ATB enabled and the user configured ATB to be disabled] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:120 Saturday 25 May 2024 11:53:46 +0000 (0:00:00.022) 0:00:21.148 ********** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_sbd_needs_atb | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fail if ha_cluster_pcsd_public_key_src and ha_cluster_pcsd_private_key_src are set along with ha_cluster_pcsd_certificates] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:127 Saturday 25 May 2024 11:53:46 +0000 (0:00:00.016) 0:00:21.164 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_public_key_src is not none", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fetch pcs capabilities] *********** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:141 Saturday 25 May 2024 11:53:46 +0000 (0:00:00.011) 0:00:21.176 ********** ok: [sut] => { "changed": false, "cmd": [ "pcs", "--version", "--full" ], "delta": "0:00:00.963227", "end": "2024-05-25 11:53:47.375744", "rc": 0, "start": "2024-05-25 11:53:46.412517" } STDOUT: 0.10.18 booth booth.enable-authfile.set booth.enable-authfile.unset cluster.config.backup-local cluster.config.restore-cluster cluster.config.restore-local cluster.config.uuid cluster.create cluster.create.enable cluster.create.local cluster.create.no-keys-sync cluster.create.separated-name-and-address cluster.create.start cluster.create.start.wait cluster.create.transport.knet cluster.create.transport.udp-udpu cluster.create.transport.udp-udpu.no-rrp cluster.destroy cluster.destroy.all cluster.report cluster.verify corosync.authkey.update corosync.config.get corosync.config.get.struct corosync.config.reload corosync.config.sync-to-local-cluster corosync.config.update corosync.link.add corosync.link.remove corosync.link.remove.list corosync.link.update corosync.qdevice corosync.qdevice.model.net corosync.quorum corosync.quorum.device corosync.quorum.device.heuristics corosync.quorum.device.model.net corosync.quorum.device.model.net.options_tls_and_kaptb corosync.quorum.set-expected-votes-runtime corosync.quorum.status corosync.quorum.unblock corosync.totem.block_unlisted_ips corosync.uidgid node.add node.add.enable node.add.separated-name-and-address node.add.start node.add.start.wait node.attributes node.attributes.set-list-for-node node.confirm-off node.fence node.guest node.kill node.maintenance node.maintenance.all node.maintenance.list node.maintenance.wait node.remote node.remote.onfail-demote node.remove node.remove-from-caches node.remove.list node.standby node.standby.all node.standby.list node.standby.wait node.start-stop-enable-disable node.start-stop-enable-disable.all node.start-stop-enable-disable.list node.start-stop-enable-disable.start-wait node.utilization node.utilization.set-list-for-node pcmk.acl.enable-disable pcmk.acl.group pcmk.acl.role pcmk.acl.role.create-with-permissions pcmk.acl.role.delete-with-users-groups pcmk.acl.user pcmk.alert pcmk.cib.checkpoints pcmk.cib.checkpoints.diff pcmk.cib.edit pcmk.cib.get pcmk.cib.get.scope pcmk.cib.roles.promoted-unpromoted pcmk.cib.set pcmk.constraint.colocation.set pcmk.constraint.colocation.set.options pcmk.constraint.colocation.simple pcmk.constraint.colocation.simple.options pcmk.constraint.hide-expired pcmk.constraint.location.simple pcmk.constraint.location.simple.options pcmk.constraint.location.simple.resource-regexp pcmk.constraint.location.simple.rule pcmk.constraint.location.simple.rule.node-attr-type-number pcmk.constraint.location.simple.rule.options pcmk.constraint.location.simple.rule.rule-add-remove pcmk.constraint.no-autocorrect pcmk.constraint.order.set pcmk.constraint.order.set.options pcmk.constraint.order.simple pcmk.constraint.order.simple.options pcmk.constraint.ticket.set pcmk.constraint.ticket.set.options pcmk.constraint.ticket.simple pcmk.constraint.ticket.simple.constraint-id pcmk.properties.cluster pcmk.properties.cluster.config.output-formats pcmk.properties.cluster.defaults pcmk.properties.cluster.describe pcmk.properties.cluster.describe.output-formats pcmk.properties.operation-defaults pcmk.properties.operation-defaults.multiple pcmk.properties.operation-defaults.rule pcmk.properties.operation-defaults.rule-rsc-op pcmk.properties.operation-defaults.rule.hide-expired pcmk.properties.operation-defaults.rule.node-attr-type-number pcmk.properties.resource-defaults pcmk.properties.resource-defaults.multiple pcmk.properties.resource-defaults.rule pcmk.properties.resource-defaults.rule-rsc-op pcmk.properties.resource-defaults.rule.hide-expired pcmk.properties.resource-defaults.rule.node-attr-type-number pcmk.resource.ban-move-clear pcmk.resource.ban-move-clear.bundles pcmk.resource.ban-move-clear.clear-expired pcmk.resource.ban-move-clear.clone pcmk.resource.bundle pcmk.resource.bundle.container-docker pcmk.resource.bundle.container-docker.promoted-max pcmk.resource.bundle.container-podman pcmk.resource.bundle.container-podman.promoted-max pcmk.resource.bundle.container-rkt pcmk.resource.bundle.container-rkt.promoted-max pcmk.resource.bundle.reset pcmk.resource.bundle.wait pcmk.resource.cleanup pcmk.resource.cleanup.one-resource pcmk.resource.cleanup.strict pcmk.resource.clone pcmk.resource.clone.custom-id pcmk.resource.clone.meta-in-create pcmk.resource.clone.wait pcmk.resource.config.output-formats pcmk.resource.create pcmk.resource.create.clone.custom-id pcmk.resource.create.in-existing-bundle pcmk.resource.create.meta pcmk.resource.create.no-master pcmk.resource.create.operations pcmk.resource.create.operations.onfail-demote pcmk.resource.create.promotable pcmk.resource.create.promotable.custom-id pcmk.resource.create.wait pcmk.resource.debug pcmk.resource.delete pcmk.resource.disable.safe pcmk.resource.disable.safe.brief pcmk.resource.disable.safe.tag pcmk.resource.disable.simulate pcmk.resource.disable.simulate.brief pcmk.resource.disable.simulate.tag pcmk.resource.enable-disable pcmk.resource.enable-disable.list pcmk.resource.enable-disable.tag pcmk.resource.enable-disable.wait pcmk.resource.failcount pcmk.resource.group pcmk.resource.group.add-remove-list pcmk.resource.group.wait pcmk.resource.manage-unmanage pcmk.resource.manage-unmanage.list pcmk.resource.manage-unmanage.tag pcmk.resource.manage-unmanage.with-monitor pcmk.resource.move.autoclean pcmk.resource.promotable pcmk.resource.promotable.custom-id pcmk.resource.promotable.meta-in-create pcmk.resource.promotable.wait pcmk.resource.refresh pcmk.resource.refresh.one-resource pcmk.resource.refresh.strict pcmk.resource.relations pcmk.resource.relocate pcmk.resource.restart pcmk.resource.update pcmk.resource.update-meta pcmk.resource.update-meta.list pcmk.resource.update-meta.wait pcmk.resource.update-operations pcmk.resource.update-operations.onfail-demote pcmk.resource.update.meta pcmk.resource.update.operations pcmk.resource.update.operations.onfail-demote pcmk.resource.update.wait pcmk.resource.utilization pcmk.resource.utilization-set-list-for-resource pcmk.stonith.cleanup pcmk.stonith.cleanup.one-resource pcmk.stonith.cleanup.strict pcmk.stonith.create pcmk.stonith.create.in-group pcmk.stonith.create.meta pcmk.stonith.create.operations pcmk.stonith.create.operations.onfail-demote pcmk.stonith.create.wait pcmk.stonith.delete pcmk.stonith.enable-disable pcmk.stonith.enable-disable.list pcmk.stonith.enable-disable.wait pcmk.stonith.history.cleanup pcmk.stonith.history.show pcmk.stonith.history.update pcmk.stonith.levels pcmk.stonith.levels.add-remove-devices-list pcmk.stonith.levels.clear pcmk.stonith.levels.node-attr pcmk.stonith.levels.node-regexp pcmk.stonith.levels.verify pcmk.stonith.refresh pcmk.stonith.refresh.one-resource pcmk.stonith.refresh.strict pcmk.stonith.update pcmk.stonith.update.scsi-devices pcmk.stonith.update.scsi-devices.add-remove pcmk.stonith.update.scsi-devices.mpath pcmk.tag pcmk.tag.resources pcs.auth.client pcs.auth.client.cluster pcs.auth.client.token pcs.auth.deauth-client pcs.auth.deauth-server pcs.auth.no-bidirectional pcs.auth.separated-name-and-address pcs.auth.server.token pcs.cfg-in-file.cib pcs.daemon-ssl-cert.set pcs.daemon-ssl-cert.sync-to-local-cluster pcs.disaster-recovery.essentials pcs.request-timeout resource-agents.describe resource-agents.list resource-agents.list.detailed resource-agents.ocf.version-1-0 resource-agents.ocf.version-1-1 resource-agents.self-validation sbd sbd.option-timeout-action sbd.shared-block-device status.corosync.membership status.pcmk.resources.hide-inactive status.pcmk.resources.id status.pcmk.resources.node status.pcmk.resources.orphaned status.pcmk.xml stonith-agents.describe stonith-agents.list stonith-agents.list.detailed stonith-agents.ocf.version-1-0 stonith-agents.ocf.version-1-1 stonith-agents.self-validation TASK [fedora.linux_system_roles.ha_cluster : Parse pcs capabilities] *********** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:148 Saturday 25 May 2024 11:53:47 +0000 (0:00:01.250) 0:00:22.426 ********** ok: [sut] => { "ansible_facts": { "__ha_cluster_pcs_capabilities": [ "booth", "booth.enable-authfile.set", "booth.enable-authfile.unset", "cluster.config.backup-local", "cluster.config.restore-cluster", "cluster.config.restore-local", "cluster.config.uuid", "cluster.create", "cluster.create.enable", "cluster.create.local", "cluster.create.no-keys-sync", "cluster.create.separated-name-and-address", "cluster.create.start", "cluster.create.start.wait", "cluster.create.transport.knet", "cluster.create.transport.udp-udpu", "cluster.create.transport.udp-udpu.no-rrp", "cluster.destroy", "cluster.destroy.all", "cluster.report", "cluster.verify", "corosync.authkey.update", "corosync.config.get", "corosync.config.get.struct", "corosync.config.reload", "corosync.config.sync-to-local-cluster", "corosync.config.update", "corosync.link.add", "corosync.link.remove", "corosync.link.remove.list", "corosync.link.update", "corosync.qdevice", "corosync.qdevice.model.net", "corosync.quorum", "corosync.quorum.device", "corosync.quorum.device.heuristics", "corosync.quorum.device.model.net", "corosync.quorum.device.model.net.options_tls_and_kaptb", "corosync.quorum.set-expected-votes-runtime", "corosync.quorum.status", "corosync.quorum.unblock", "corosync.totem.block_unlisted_ips", "corosync.uidgid", "node.add", "node.add.enable", "node.add.separated-name-and-address", "node.add.start", "node.add.start.wait", "node.attributes", "node.attributes.set-list-for-node", "node.confirm-off", "node.fence", "node.guest", "node.kill", "node.maintenance", "node.maintenance.all", "node.maintenance.list", "node.maintenance.wait", "node.remote", "node.remote.onfail-demote", "node.remove", "node.remove-from-caches", "node.remove.list", "node.standby", "node.standby.all", "node.standby.list", "node.standby.wait", "node.start-stop-enable-disable", "node.start-stop-enable-disable.all", "node.start-stop-enable-disable.list", "node.start-stop-enable-disable.start-wait", "node.utilization", "node.utilization.set-list-for-node", "pcmk.acl.enable-disable", "pcmk.acl.group", "pcmk.acl.role", "pcmk.acl.role.create-with-permissions", "pcmk.acl.role.delete-with-users-groups", "pcmk.acl.user", "pcmk.alert", "pcmk.cib.checkpoints", "pcmk.cib.checkpoints.diff", "pcmk.cib.edit", "pcmk.cib.get", "pcmk.cib.get.scope", "pcmk.cib.roles.promoted-unpromoted", "pcmk.cib.set", "pcmk.constraint.colocation.set", "pcmk.constraint.colocation.set.options", "pcmk.constraint.colocation.simple", "pcmk.constraint.colocation.simple.options", "pcmk.constraint.hide-expired", "pcmk.constraint.location.simple", "pcmk.constraint.location.simple.options", "pcmk.constraint.location.simple.resource-regexp", "pcmk.constraint.location.simple.rule", "pcmk.constraint.location.simple.rule.node-attr-type-number", "pcmk.constraint.location.simple.rule.options", "pcmk.constraint.location.simple.rule.rule-add-remove", "pcmk.constraint.no-autocorrect", "pcmk.constraint.order.set", "pcmk.constraint.order.set.options", "pcmk.constraint.order.simple", "pcmk.constraint.order.simple.options", "pcmk.constraint.ticket.set", "pcmk.constraint.ticket.set.options", "pcmk.constraint.ticket.simple", "pcmk.constraint.ticket.simple.constraint-id", "pcmk.properties.cluster", "pcmk.properties.cluster.config.output-formats", "pcmk.properties.cluster.defaults", "pcmk.properties.cluster.describe", "pcmk.properties.cluster.describe.output-formats", "pcmk.properties.operation-defaults", "pcmk.properties.operation-defaults.multiple", "pcmk.properties.operation-defaults.rule", "pcmk.properties.operation-defaults.rule-rsc-op", "pcmk.properties.operation-defaults.rule.hide-expired", "pcmk.properties.operation-defaults.rule.node-attr-type-number", "pcmk.properties.resource-defaults", "pcmk.properties.resource-defaults.multiple", "pcmk.properties.resource-defaults.rule", "pcmk.properties.resource-defaults.rule-rsc-op", "pcmk.properties.resource-defaults.rule.hide-expired", "pcmk.properties.resource-defaults.rule.node-attr-type-number", "pcmk.resource.ban-move-clear", "pcmk.resource.ban-move-clear.bundles", "pcmk.resource.ban-move-clear.clear-expired", "pcmk.resource.ban-move-clear.clone", "pcmk.resource.bundle", "pcmk.resource.bundle.container-docker", "pcmk.resource.bundle.container-docker.promoted-max", "pcmk.resource.bundle.container-podman", "pcmk.resource.bundle.container-podman.promoted-max", "pcmk.resource.bundle.container-rkt", "pcmk.resource.bundle.container-rkt.promoted-max", "pcmk.resource.bundle.reset", "pcmk.resource.bundle.wait", "pcmk.resource.cleanup", "pcmk.resource.cleanup.one-resource", "pcmk.resource.cleanup.strict", "pcmk.resource.clone", "pcmk.resource.clone.custom-id", "pcmk.resource.clone.meta-in-create", "pcmk.resource.clone.wait", "pcmk.resource.config.output-formats", "pcmk.resource.create", "pcmk.resource.create.clone.custom-id", "pcmk.resource.create.in-existing-bundle", "pcmk.resource.create.meta", "pcmk.resource.create.no-master", "pcmk.resource.create.operations", "pcmk.resource.create.operations.onfail-demote", "pcmk.resource.create.promotable", "pcmk.resource.create.promotable.custom-id", "pcmk.resource.create.wait", "pcmk.resource.debug", "pcmk.resource.delete", "pcmk.resource.disable.safe", "pcmk.resource.disable.safe.brief", "pcmk.resource.disable.safe.tag", "pcmk.resource.disable.simulate", "pcmk.resource.disable.simulate.brief", "pcmk.resource.disable.simulate.tag", "pcmk.resource.enable-disable", "pcmk.resource.enable-disable.list", "pcmk.resource.enable-disable.tag", "pcmk.resource.enable-disable.wait", "pcmk.resource.failcount", "pcmk.resource.group", "pcmk.resource.group.add-remove-list", "pcmk.resource.group.wait", "pcmk.resource.manage-unmanage", "pcmk.resource.manage-unmanage.list", "pcmk.resource.manage-unmanage.tag", "pcmk.resource.manage-unmanage.with-monitor", "pcmk.resource.move.autoclean", "pcmk.resource.promotable", "pcmk.resource.promotable.custom-id", "pcmk.resource.promotable.meta-in-create", "pcmk.resource.promotable.wait", "pcmk.resource.refresh", "pcmk.resource.refresh.one-resource", "pcmk.resource.refresh.strict", "pcmk.resource.relations", "pcmk.resource.relocate", "pcmk.resource.restart", "pcmk.resource.update", "pcmk.resource.update-meta", "pcmk.resource.update-meta.list", "pcmk.resource.update-meta.wait", "pcmk.resource.update-operations", "pcmk.resource.update-operations.onfail-demote", "pcmk.resource.update.meta", "pcmk.resource.update.operations", "pcmk.resource.update.operations.onfail-demote", "pcmk.resource.update.wait", "pcmk.resource.utilization", "pcmk.resource.utilization-set-list-for-resource", "pcmk.stonith.cleanup", "pcmk.stonith.cleanup.one-resource", "pcmk.stonith.cleanup.strict", "pcmk.stonith.create", "pcmk.stonith.create.in-group", "pcmk.stonith.create.meta", "pcmk.stonith.create.operations", "pcmk.stonith.create.operations.onfail-demote", "pcmk.stonith.create.wait", "pcmk.stonith.delete", "pcmk.stonith.enable-disable", "pcmk.stonith.enable-disable.list", "pcmk.stonith.enable-disable.wait", "pcmk.stonith.history.cleanup", "pcmk.stonith.history.show", "pcmk.stonith.history.update", "pcmk.stonith.levels", "pcmk.stonith.levels.add-remove-devices-list", "pcmk.stonith.levels.clear", "pcmk.stonith.levels.node-attr", "pcmk.stonith.levels.node-regexp", "pcmk.stonith.levels.verify", "pcmk.stonith.refresh", "pcmk.stonith.refresh.one-resource", "pcmk.stonith.refresh.strict", "pcmk.stonith.update", "pcmk.stonith.update.scsi-devices", "pcmk.stonith.update.scsi-devices.add-remove", "pcmk.stonith.update.scsi-devices.mpath", "pcmk.tag", "pcmk.tag.resources", "pcs.auth.client", "pcs.auth.client.cluster", "pcs.auth.client.token", "pcs.auth.deauth-client", "pcs.auth.deauth-server", "pcs.auth.no-bidirectional", "pcs.auth.separated-name-and-address", "pcs.auth.server.token", "pcs.cfg-in-file.cib", "pcs.daemon-ssl-cert.set", "pcs.daemon-ssl-cert.sync-to-local-cluster", "pcs.disaster-recovery.essentials", "pcs.request-timeout", "resource-agents.describe", "resource-agents.list", "resource-agents.list.detailed", "resource-agents.ocf.version-1-0", "resource-agents.ocf.version-1-1", "resource-agents.self-validation", "sbd", "sbd.option-timeout-action", "sbd.shared-block-device", "status.corosync.membership", "status.pcmk.resources.hide-inactive", "status.pcmk.resources.id", "status.pcmk.resources.node", "status.pcmk.resources.orphaned", "status.pcmk.xml", "stonith-agents.describe", "stonith-agents.list", "stonith-agents.list.detailed", "stonith-agents.ocf.version-1-0", "stonith-agents.ocf.version-1-1", "stonith-agents.self-validation" ], "__ha_cluster_pcsd_capabilities_available": false }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fetch pcsd capabilities] ********** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:155 Saturday 25 May 2024 11:53:47 +0000 (0:00:00.017) 0:00:22.444 ********** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_pcsd_capabilities_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Parse pcsd capabilities] ********** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:163 Saturday 25 May 2024 11:53:47 +0000 (0:00:00.009) 0:00:22.453 ********** ok: [sut] => { "ansible_facts": { "__ha_cluster_pcsd_capabilities": [] }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fail if pcs is to old to configure resources and operations defaults] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:172 Saturday 25 May 2024 11:53:47 +0000 (0:00:00.019) 0:00:22.472 ********** skipping: [sut] => { "changed": false, "false_condition": "( ha_cluster_resource_defaults and not 'pcmk.properties.resource-defaults.multiple' in __ha_cluster_pcs_capabilities ) or ( ha_cluster_resource_operation_defaults and not 'pcmk.properties.operation-defaults.multiple' in __ha_cluster_pcs_capabilities )", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set hacluster password] *********** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:22 Saturday 25 May 2024 11:53:47 +0000 (0:00:00.008) 0:00:22.481 ********** changed: [sut] => { "append": false, "changed": true, "comment": "cluster user", "group": 189, "home": "/home/hacluster", "move_home": false, "name": "hacluster", "password": "NOT_LOGGING_PASSWORD", "shell": "/sbin/nologin", "state": "present", "uid": 189 } TASK [fedora.linux_system_roles.ha_cluster : Configure shell] ****************** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:29 Saturday 25 May 2024 11:53:47 +0000 (0:00:00.423) 0:00:22.905 ********** included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Stop pcsd] ************************ task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:6 Saturday 25 May 2024 11:53:47 +0000 (0:00:00.024) 0:00:22.929 ********** ok: [sut] => { "changed": false, "name": "pcsd", "state": "stopped", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "network-online.target basic.target system.slice sysinit.target pcsd-ruby.service systemd-journald.socket", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pcsd-ruby.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "PCS GUI and remote configuration interface", "DevicePolicy": "auto", "Documentation": "man:pcsd(8) man:pcs(8)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/pcsd (ignore_errors=no)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pcsd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pcsd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pcsd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "network-online.target system.slice sysinit.target pcsd-ruby.service", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.ha_cluster : Regenerate pcsd TLS certificate and key] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:11 Saturday 25 May 2024 11:53:48 +0000 (0:00:00.491) 0:00:23.421 ********** skipping: [sut] => (item=/var/lib/pcsd/pcsd.key) => { "ansible_loop_var": "item", "changed": false, "false_condition": "ha_cluster_regenerate_keys", "item": "/var/lib/pcsd/pcsd.key", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/var/lib/pcsd/pcsd.crt) => { "ansible_loop_var": "item", "changed": false, "false_condition": "ha_cluster_regenerate_keys", "item": "/var/lib/pcsd/pcsd.crt", "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.ha_cluster : Get the stat of /var/lib/pcsd] **** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:25 Saturday 25 May 2024 11:53:48 +0000 (0:00:00.010) 0:00:23.431 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_certificates | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Allow certmonger to write into pcsd's certificate directory] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:30 Saturday 25 May 2024 11:53:48 +0000 (0:00:00.016) 0:00:23.448 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_certificates | length > 0", "skip_reason": "Conditional result was False" } TASK [Ensure the name of ha_cluster_pcsd_certificates is /var/lib/pcsd/pcsd; Create certificates using the certificate role] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:37 Saturday 25 May 2024 11:53:48 +0000 (0:00:00.017) 0:00:23.465 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_certificates | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set pcsd's certificate directory back to cluster_var_lib_t] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:49 Saturday 25 May 2024 11:53:48 +0000 (0:00:00.016) 0:00:23.482 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_certificates | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute pcsd TLS private key] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:64 Saturday 25 May 2024 11:53:48 +0000 (0:00:00.017) 0:00:23.500 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_public_key_src is string", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute pcsd TLS certificate] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:71 Saturday 25 May 2024 11:53:48 +0000 (0:00:00.019) 0:00:23.519 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_public_key_src is string", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute pcs_settings.conf] ***** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:79 Saturday 25 May 2024 11:53:48 +0000 (0:00:00.019) 0:00:23.539 ********** changed: [sut] => { "changed": true, "checksum": "b504e1b9c9aa23803dd6f95e66c757088b08551d", "dest": "/var/lib/pcsd/pcs_settings.conf", "gid": 0, "group": "root", "md5sum": "087ff556d850518c8fff5ad1179d8817", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:cluster_var_lib_t:s0", "size": 359, "src": "/root/.ansible/tmp/ansible-tmp-1716638028.5406635-26052-130066323115462/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Start pcsd with updated config files and configure it to start on boot] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:88 Saturday 25 May 2024 11:53:49 +0000 (0:00:00.606) 0:00:24.146 ********** changed: [sut] => { "changed": true, "enabled": true, "name": "pcsd", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "sysinit.target basic.target network-online.target pcsd-ruby.service systemd-journald.socket system.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pcsd-ruby.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "PCS GUI and remote configuration interface", "DevicePolicy": "auto", "Documentation": "man:pcsd(8) man:pcs(8)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/pcsd (ignore_errors=no)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pcsd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pcsd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pcsd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "network-online.target pcsd-ruby.service system.slice sysinit.target", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.ha_cluster : Configure firewall] *************** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:35 Saturday 25 May 2024 11:53:50 +0000 (0:00:01.634) 0:00:25.780 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool or ha_cluster_qnetd.present | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Configure selinux] **************** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:38 Saturday 25 May 2024 11:53:50 +0000 (0:00:00.017) 0:00:25.797 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool or ha_cluster_qnetd.present | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Install cluster packages] ********* task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:44 Saturday 25 May 2024 11:53:50 +0000 (0:00:00.017) 0:00:25.815 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute fence-virt authkey] **** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:50 Saturday 25 May 2024 11:53:50 +0000 (0:00:00.015) 0:00:25.831 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Configure SBD] ******************** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:55 Saturday 25 May 2024 11:53:50 +0000 (0:00:00.015) 0:00:25.846 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Configure corosync] *************** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:58 Saturday 25 May 2024 11:53:50 +0000 (0:00:00.015) 0:00:25.862 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Cluster auth] ********************* task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:61 Saturday 25 May 2024 11:53:50 +0000 (0:00:00.016) 0:00:25.878 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute cluster shared keys] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:66 Saturday 25 May 2024 11:53:50 +0000 (0:00:00.016) 0:00:25.895 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Enable or disable cluster services on boot] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:72 Saturday 25 May 2024 11:53:50 +0000 (0:00:00.016) 0:00:25.911 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Start the cluster and reload corosync.conf] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:75 Saturday 25 May 2024 11:53:50 +0000 (0:00:00.015) 0:00:25.927 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Create and push CIB] ************** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:78 Saturday 25 May 2024 11:53:50 +0000 (0:00:00.015) 0:00:25.942 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Remove cluster configuration] ***** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:87 Saturday 25 May 2024 11:53:50 +0000 (0:00:00.016) 0:00:25.959 ********** included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-destroy-pcs-0.10.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Remove cluster configuration] ***** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-destroy-pcs-0.10.yml:9 Saturday 25 May 2024 11:53:50 +0000 (0:00:00.012) 0:00:25.972 ********** ok: [sut] => (item=/etc/corosync/corosync.conf) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "pcs", "cluster", "destroy" ], "delta": null, "end": null, "item": "/etc/corosync/corosync.conf", "rc": 0, "start": null } STDOUT: skipped, since /etc/corosync/corosync.conf does not exist MSG: Did not run command since '/etc/corosync/corosync.conf' does not exist ok: [sut] => (item=/var/lib/pacemaker/cib/cib.xml) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "pcs", "cluster", "destroy" ], "delta": null, "end": null, "item": "/var/lib/pacemaker/cib/cib.xml", "rc": 0, "start": null } STDOUT: skipped, since /var/lib/pacemaker/cib/cib.xml does not exist MSG: Did not run command since '/var/lib/pacemaker/cib/cib.xml' does not exist TASK [fedora.linux_system_roles.ha_cluster : Remove fence-virt authkey] ******** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:90 Saturday 25 May 2024 11:53:51 +0000 (0:00:00.370) 0:00:26.342 ********** ok: [sut] => { "changed": false, "path": "/etc/cluster/fence_xvm.key", "state": "absent" } TASK [fedora.linux_system_roles.ha_cluster : Configure qnetd] ****************** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:95 Saturday 25 May 2024 11:53:51 +0000 (0:00:00.287) 0:00:26.629 ********** included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Remove qnetd configuration] ******* task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml:3 Saturday 25 May 2024 11:53:51 +0000 (0:00:00.013) 0:00:26.642 ********** changed: [sut] => { "changed": true, "cmd": [ "pcs", "--force", "--", "qdevice", "destroy", "net" ], "delta": "0:00:01.099429", "end": "2024-05-25 11:53:52.889815", "rc": 0, "start": "2024-05-25 11:53:51.790386" } STDOUT: Stopping quorum device... quorum device stopped quorum device disabled Quorum device 'net' configuration files removed TASK [fedora.linux_system_roles.ha_cluster : Setup qnetd] ********************** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml:16 Saturday 25 May 2024 11:53:52 +0000 (0:00:01.297) 0:00:27.940 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_qnetd.present | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Enable or disable qnetd service on boot] *** task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml:26 Saturday 25 May 2024 11:53:52 +0000 (0:00:00.017) 0:00:27.958 ********** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_qnetd.present | d(false)", "skip_reason": "Conditional result was False" } TASK [Stat corosync.conf] ****************************************************** task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cluster_destroy.yml:28 Saturday 25 May 2024 11:53:52 +0000 (0:00:00.017) 0:00:27.976 ********** ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [Stat cib.xml] ************************************************************ task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cluster_destroy.yml:33 Saturday 25 May 2024 11:53:53 +0000 (0:00:00.187) 0:00:28.163 ********** ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [Stat fence_xvm.key] ****************************************************** task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cluster_destroy.yml:38 Saturday 25 May 2024 11:53:53 +0000 (0:00:00.184) 0:00:28.347 ********** ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [Check the files do not exist] ******************************************** task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cluster_destroy.yml:43 Saturday 25 May 2024 11:53:53 +0000 (0:00:00.183) 0:00:28.531 ********** ok: [sut] => { "changed": false } MSG: All assertions passed PLAY RECAP ********************************************************************* sut : ok=36 changed=5 unreachable=0 failed=0 skipped=37 rescued=0 ignored=0 Saturday 25 May 2024 11:53:53 +0000 (0:00:00.016) 0:00:28.547 ********** =============================================================================== fedora.linux_system_roles.ha_cluster : Install role essential packages -- 19.02s /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:11 fedora.linux_system_roles.ha_cluster : Start pcsd with updated config files and configure it to start on boot --- 1.63s /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:88 fedora.linux_system_roles.ha_cluster : Remove qnetd configuration ------- 1.30s /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml:3 fedora.linux_system_roles.ha_cluster : Fetch pcs capabilities ----------- 1.25s /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:141 Gathering Facts --------------------------------------------------------- 0.86s /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cluster_destroy.yml:9 -------------- fedora.linux_system_roles.ha_cluster : Distribute pcs_settings.conf ----- 0.61s /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:79 fedora.linux_system_roles.ha_cluster : List active CentOS repositories --- 0.50s /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/CentOS.yml:3 fedora.linux_system_roles.ha_cluster : Stop pcsd ------------------------ 0.49s /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:6 fedora.linux_system_roles.ha_cluster : Set hacluster password ----------- 0.42s /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:22 fedora.linux_system_roles.ha_cluster : Remove cluster configuration ----- 0.37s /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-destroy-pcs-0.10.yml:9 fedora.linux_system_roles.ha_cluster : Remove fence-virt authkey -------- 0.29s /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:90 fedora.linux_system_roles.ha_cluster : Check if system is ostree -------- 0.25s /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:22 Stat corosync.conf ------------------------------------------------------ 0.19s /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cluster_destroy.yml:28 ------------- Stat cib.xml ------------------------------------------------------------ 0.18s /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cluster_destroy.yml:33 ------------- Stat fence_xvm.key ------------------------------------------------------ 0.18s /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cluster_destroy.yml:38 ------------- fedora.linux_system_roles.ha_cluster : Fail if passwords are not specified --- 0.03s /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:43 fedora.linux_system_roles.ha_cluster : Set platform/version specific variables --- 0.03s /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:19 fedora.linux_system_roles.ha_cluster : Find platform/version specific tasks to enable repositories --- 0.03s /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-package-repositories.yml:3 fedora.linux_system_roles.ha_cluster : Extract qdevice settings --------- 0.02s /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:101 fedora.linux_system_roles.ha_cluster : Configure shell ------------------ 0.02s /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:29 ---^---^---^---^---^--- # STDERR: ---v---v---v---v---v--- [DEPRECATION WARNING]: ANSIBLE_COLLECTIONS_PATHS option, does not fit var naming standard, use the singular form ANSIBLE_COLLECTIONS_PATH instead. This feature will be removed from ansible-core in version 2.19. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. [DEPRECATION WARNING]: Encryption using the Python crypt module is deprecated. The Python crypt module is deprecated and will be removed from Python 3.13. Install the passlib library for continued encryption functionality. This feature will be removed in version 2.17. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. ---^---^---^---^---^---