summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rw-r--r--test/ansible_test/Makefile13
-rw-r--r--test/ansible_test/unit/test_diff.py105
-rw-r--r--test/ansible_test/validate-modules-unit/test_validate_modules_regex.py (renamed from test/units/ansible_test/test_validate_modules.py)34
-rw-r--r--test/integration/targets/ansible-config/aliases2
-rwxr-xr-xtest/integration/targets/ansible-config/files/ini_dupes.py12
-rw-r--r--test/integration/targets/ansible-config/tasks/main.yml14
-rw-r--r--test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/MANIFEST.json2
-rw-r--r--test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/inventory/statichost.py1
-rw-r--r--test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/lookup/noop.py3
-rw-r--r--test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/MANIFEST.json2
-rw-r--r--test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/inventory/statichost.py1
-rw-r--r--test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py22
-rw-r--r--test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/test/yolo.yml19
-rw-r--r--test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol2/MANIFEST.json2
-rw-r--r--test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol3/galaxy.yml6
-rw-r--r--test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol3/plugins/modules/test1.py27
-rw-r--r--test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol4/galaxy.yml6
-rw-r--r--test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol4/plugins/modules/test2.py27
-rw-r--r--test/integration/targets/ansible-doc/randommodule-text.output26
-rw-r--r--test/integration/targets/ansible-doc/randommodule.output19
-rwxr-xr-xtest/integration/targets/ansible-doc/runme.sh134
-rw-r--r--test/integration/targets/ansible-doc/yolo-text.output47
-rw-r--r--test/integration/targets/ansible-doc/yolo.output64
-rw-r--r--test/integration/targets/ansible-galaxy-collection-cli/files/expected.txt6
-rw-r--r--test/integration/targets/ansible-galaxy-collection-cli/files/galaxy.yml1
-rw-r--r--test/integration/targets/ansible-galaxy-collection-cli/files/make_collection_dir.py4
-rw-r--r--test/integration/targets/ansible-galaxy-collection-scm/tasks/main.yml2
-rw-r--r--test/integration/targets/ansible-galaxy-collection-scm/tasks/multi_collection_repo_all.yml2
-rw-r--r--test/integration/targets/ansible-galaxy-collection-scm/tasks/setup_recursive_scm_dependency.yml7
-rw-r--r--test/integration/targets/ansible-galaxy-collection/library/reset_pulp.py57
-rw-r--r--test/integration/targets/ansible-galaxy-collection/library/setup_collections.py15
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/build.yml25
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/download.yml4
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/fail_fast_resolvelib.yml2
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/init.yml65
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/install.yml203
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/install_offline.yml8
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/list.yml41
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/main.yml43
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/pinned_pre_releases_in_deptree.yml79
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/publish.yml5
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/supported_resolvelib.yml8
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/upgrade.yml2
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/verify.yml8
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/virtual_direct_requests.yml77
-rw-r--r--test/integration/targets/ansible-galaxy-collection/templates/ansible.cfg.j216
-rw-r--r--test/integration/targets/ansible-galaxy-collection/vars/main.yml43
-rwxr-xr-xtest/integration/targets/ansible-galaxy-role/files/create-role-archive.py21
-rw-r--r--test/integration/targets/ansible-galaxy-role/tasks/dir-traversal.yml86
-rw-r--r--test/integration/targets/ansible-galaxy-role/tasks/main.yml15
-rw-r--r--test/integration/targets/ansible-galaxy-role/tasks/valid-role-symlinks.yml78
-rw-r--r--test/integration/targets/ansible-galaxy/files/testserver.py19
-rwxr-xr-xtest/integration/targets/ansible-galaxy/runme.sh7
-rw-r--r--test/integration/targets/ansible-inventory/files/complex.ini35
-rw-r--r--test/integration/targets/ansible-inventory/files/valid_sample.yml2
-rw-r--r--test/integration/targets/ansible-inventory/filter_plugins/toml.py50
-rw-r--r--test/integration/targets/ansible-inventory/tasks/json_output.yml33
-rw-r--r--test/integration/targets/ansible-inventory/tasks/main.yml7
-rw-r--r--test/integration/targets/ansible-inventory/tasks/toml_output.yml43
-rw-r--r--test/integration/targets/ansible-inventory/tasks/yaml_output.yml34
-rw-r--r--test/integration/targets/ansible-playbook-callbacks/aliases4
-rw-r--r--test/integration/targets/ansible-playbook-callbacks/all-callbacks.yml123
-rw-r--r--test/integration/targets/ansible-playbook-callbacks/callbacks_list.expected24
-rwxr-xr-xtest/integration/targets/ansible-playbook-callbacks/runme.sh12
-rw-r--r--test/integration/targets/ansible-pull/pull-integration-test/conn_secret.yml12
-rw-r--r--test/integration/targets/ansible-pull/pull-integration-test/secret_connection_password1
-rwxr-xr-xtest/integration/targets/ansible-pull/runme.sh5
-rw-r--r--test/integration/targets/ansible-runner/aliases1
-rw-r--r--test/integration/targets/ansible-runner/files/adhoc_example1.py1
-rw-r--r--test/integration/targets/ansible-test-cloud-foreman/aliases3
-rw-r--r--test/integration/targets/ansible-test-cloud-foreman/tasks/main.yml6
-rw-r--r--test/integration/targets/ansible-test-cloud-openshift/aliases2
-rw-r--r--test/integration/targets/ansible-test-cloud-openshift/tasks/main.yml11
-rw-r--r--test/integration/targets/ansible-test-cloud-vcenter/aliases3
-rw-r--r--test/integration/targets/ansible-test-cloud-vcenter/tasks/main.yml6
-rwxr-xr-xtest/integration/targets/ansible-test-container/runme.py11
-rw-r--r--test/integration/targets/ansible-test-sanity-import/ansible_collections/ns/col/plugins/lookup/vendor1.py4
-rw-r--r--test/integration/targets/ansible-test-sanity-import/ansible_collections/ns/col/plugins/lookup/vendor2.py4
-rw-r--r--test/integration/targets/ansible-test-sanity-import/expected.txt2
-rwxr-xr-xtest/integration/targets/ansible-test-sanity-import/runme.sh22
-rw-r--r--test/integration/targets/ansible-test-sanity-no-get-exception/aliases4
-rw-r--r--test/integration/targets/ansible-test-sanity-no-get-exception/ansible_collections/ns/col/do-not-check-me.py5
-rw-r--r--test/integration/targets/ansible-test-sanity-no-get-exception/ansible_collections/ns/col/plugins/modules/check-me.py5
-rw-r--r--test/integration/targets/ansible-test-sanity-no-get-exception/expected.txt2
-rwxr-xr-xtest/integration/targets/ansible-test-sanity-no-get-exception/runme.sh12
-rw-r--r--test/integration/targets/ansible-test-sanity-pylint/aliases4
-rw-r--r--test/integration/targets/ansible-test-sanity-pylint/ansible_collections/ns/col/galaxy.yml6
-rw-r--r--test/integration/targets/ansible-test-sanity-pylint/ansible_collections/ns/col/plugins/lookup/deprecated.py22
-rw-r--r--test/integration/targets/ansible-test-sanity-pylint/expected.txt1
-rwxr-xr-xtest/integration/targets/ansible-test-sanity-pylint/runme.sh25
-rw-r--r--test/integration/targets/ansible-test-sanity-replace-urlopen/aliases4
-rw-r--r--test/integration/targets/ansible-test-sanity-replace-urlopen/ansible_collections/ns/col/do-not-check-me.py5
-rw-r--r--test/integration/targets/ansible-test-sanity-replace-urlopen/ansible_collections/ns/col/plugins/modules/check-me.py5
-rw-r--r--test/integration/targets/ansible-test-sanity-replace-urlopen/expected.txt1
-rwxr-xr-xtest/integration/targets/ansible-test-sanity-replace-urlopen/runme.sh12
-rw-r--r--test/integration/targets/ansible-test-sanity-use-compat-six/aliases4
-rw-r--r--test/integration/targets/ansible-test-sanity-use-compat-six/ansible_collections/ns/col/do-not-check-me.py5
-rw-r--r--test/integration/targets/ansible-test-sanity-use-compat-six/ansible_collections/ns/col/plugins/modules/check-me.py5
-rw-r--r--test/integration/targets/ansible-test-sanity-use-compat-six/expected.txt1
-rwxr-xr-xtest/integration/targets/ansible-test-sanity-use-compat-six/runme.sh12
-rw-r--r--test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/meta/runtime.yml4
-rw-r--r--test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/lookup/import_order_lookup.py16
-rw-r--r--test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_1.py33
-rw-r--r--test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_2.py34
-rw-r--r--test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_3.py33
-rw-r--r--test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_4.py33
-rw-r--r--test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_5.py33
-rw-r--r--test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_6.py34
-rw-r--r--test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_7.py33
-rw-r--r--test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/import_order.py24
-rw-r--r--test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/semantic_markup.py127
-rw-r--r--test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/sidecar.yaml3
-rw-r--r--test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/failure/README.md1
-rw-r--r--test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/failure/meta/runtime.yml4
-rw-r--r--test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/ps_only/README.md1
-rw-r--r--test/integration/targets/ansible-test-sanity-validate-modules/expected.txt21
-rwxr-xr-xtest/integration/targets/ansible-test-sanity-validate-modules/runme.sh12
-rw-r--r--test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/README.md1
-rw-r--r--test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/meta/runtime.yml9
-rw-r--r--test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/filter/check_pylint.py (renamed from test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/plugin_utils/check_pylint.py)7
-rw-r--r--test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/lookup/bad.py4
-rw-r--r--test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/lookup/world.py2
-rw-r--r--test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/modules/bad.py2
-rw-r--r--test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/random_directory/bad.py2
-rw-r--r--test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py4
-rw-r--r--test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/tests/sanity/ignore.txt1
-rwxr-xr-xtest/integration/targets/ansible-test-sanity/runme.sh6
-rw-r--r--test/integration/targets/ansible-test-units-assertions/aliases4
-rw-r--r--test/integration/targets/ansible-test-units-assertions/ansible_collections/ns/col/tests/unit/plugins/modules/test_assertion.py6
-rwxr-xr-xtest/integration/targets/ansible-test-units-assertions/runme.sh22
-rw-r--r--test/integration/targets/ansible-test-units-forked/aliases5
-rw-r--r--test/integration/targets/ansible-test-units-forked/ansible_collections/ns/col/tests/unit/plugins/modules/test_ansible_forked.py43
-rwxr-xr-xtest/integration/targets/ansible-test-units-forked/runme.sh45
-rwxr-xr-xtest/integration/targets/ansible-test/venv-pythons.py10
-rw-r--r--test/integration/targets/ansible-vault/invalid_format/broken-group-vars-tasks.yml2
-rwxr-xr-xtest/integration/targets/ansible-vault/runme.sh32
-rw-r--r--test/integration/targets/ansible-vault/test_vault.yml2
-rw-r--r--test/integration/targets/ansible-vault/test_vaulted_template.yml2
-rw-r--r--test/integration/targets/ansible/aliases1
-rw-r--r--test/integration/targets/ansible/ansible-testé.cfg2
-rw-r--r--test/integration/targets/ansible/callback_plugins/callback_debug.py (renamed from test/integration/targets/support-callback_plugins/callback_plugins/callback_debug.py)0
-rwxr-xr-xtest/integration/targets/ansible/runme.sh6
-rw-r--r--test/integration/targets/apt/aliases1
-rw-r--r--test/integration/targets/apt/tasks/apt.yml51
-rw-r--r--test/integration/targets/apt/tasks/repo.yml2
-rw-r--r--test/integration/targets/apt/tasks/upgrade_scenarios.yml25
-rw-r--r--test/integration/targets/apt_key/aliases1
-rw-r--r--test/integration/targets/apt_key/tasks/main.yml2
-rw-r--r--test/integration/targets/apt_repository/aliases1
-rw-r--r--test/integration/targets/apt_repository/tasks/apt.yml18
-rw-r--r--test/integration/targets/apt_repository/tasks/mode_cleanup.yaml2
-rw-r--r--test/integration/targets/argspec/library/argspec.py6
-rw-r--r--test/integration/targets/become/tasks/main.yml4
-rw-r--r--test/integration/targets/blockinfile/tasks/append_newline.yml119
-rw-r--r--test/integration/targets/blockinfile/tasks/create_dir.yml29
-rw-r--r--test/integration/targets/blockinfile/tasks/main.yml3
-rw-r--r--test/integration/targets/blockinfile/tasks/prepend_newline.yml119
-rw-r--r--test/integration/targets/blocks/unsafe_failed_task.yml2
-rw-r--r--test/integration/targets/callback_default/callback_default.out.result_format_yaml_lossy_verbose.stdout4
-rw-r--r--test/integration/targets/callback_default/callback_default.out.result_format_yaml_verbose.stdout4
-rw-r--r--test/integration/targets/check_mode/check_mode.yml2
-rw-r--r--test/integration/targets/check_mode/roles/test_check_mode/tasks/main.yml8
-rw-r--r--test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/connection/localconn.py2
-rw-r--r--test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/uses_mu_missing.py5
-rw-r--r--test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/uses_mu_missing_redirect_collection.py5
-rw-r--r--test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/uses_mu_missing_redirect_module.py5
-rw-r--r--test/integration/targets/collections/collections/ansible_collections/testns/content_adj/plugins/inventory/statichost.py1
-rw-r--r--test/integration/targets/collections/test_task_resolved_plugin/callback_plugins/display_resolved_action.py1
-rw-r--r--test/integration/targets/collections/testcoll2/MANIFEST.json (renamed from test/integration/targets/ansible-playbook-callbacks/include_me.yml)0
-rw-r--r--test/integration/targets/command_nonexisting/tasks/main.yml2
-rwxr-xr-xtest/integration/targets/command_shell/scripts/yoink.sh2
-rw-r--r--test/integration/targets/command_shell/tasks/main.yml42
-rw-r--r--test/integration/targets/conditionals/play.yml26
-rw-r--r--test/integration/targets/connection_delegation/aliases1
-rw-r--r--test/integration/targets/connection_paramiko_ssh/test_connection.inventory2
-rw-r--r--test/integration/targets/connection_psrp/tests.yml11
-rw-r--r--test/integration/targets/connection_winrm/tests.yml3
-rw-r--r--test/integration/targets/copy/tasks/main.yml2
-rw-r--r--test/integration/targets/copy/tasks/tests.yml152
-rw-r--r--test/integration/targets/cron/aliases1
-rw-r--r--test/integration/targets/deb822_repository/aliases6
-rw-r--r--test/integration/targets/deb822_repository/meta/main.yml4
-rw-r--r--test/integration/targets/deb822_repository/tasks/install.yml40
-rw-r--r--test/integration/targets/deb822_repository/tasks/main.yml19
-rw-r--r--test/integration/targets/deb822_repository/tasks/test.yml229
-rw-r--r--test/integration/targets/debconf/tasks/main.yml42
-rw-r--r--test/integration/targets/delegate_to/delegate_local_from_root.yml2
-rwxr-xr-xtest/integration/targets/delegate_to/runme.sh4
-rw-r--r--test/integration/targets/delegate_to/test_delegate_to.yml27
-rw-r--r--test/integration/targets/delegate_to/test_random_delegate_to_with_loop.yml26
-rw-r--r--test/integration/targets/delegate_to/test_random_delegate_to_without_loop.yml13
-rw-r--r--test/integration/targets/dnf/aliases2
-rw-r--r--test/integration/targets/dnf/tasks/dnf.yml55
-rw-r--r--test/integration/targets/dnf/tasks/main.yml6
-rw-r--r--test/integration/targets/dnf/tasks/skip_broken_and_nobest.yml3
-rw-r--r--test/integration/targets/dnf/tasks/test_sos_removal.yml4
-rw-r--r--test/integration/targets/dnf5/aliases6
-rw-r--r--test/integration/targets/dnf5/playbook.yml19
-rwxr-xr-xtest/integration/targets/dnf5/runme.sh5
-rw-r--r--test/integration/targets/dpkg_selections/aliases1
-rw-r--r--test/integration/targets/dpkg_selections/tasks/dpkg_selections.yaml12
-rw-r--r--test/integration/targets/egg-info/lookup_plugins/import_pkg_resources.py2
-rw-r--r--test/integration/targets/environment/test_environment.yml10
-rw-r--r--test/integration/targets/error_from_connection/connection_plugins/dummy.py1
-rw-r--r--test/integration/targets/expect/tasks/main.yml9
-rw-r--r--test/integration/targets/facts_linux_network/aliases1
-rw-r--r--test/integration/targets/fetch/roles/fetch_tests/tasks/failures.yml10
-rw-r--r--test/integration/targets/file/tasks/link_rewrite.yml10
-rw-r--r--test/integration/targets/file/tasks/main.yml2
-rw-r--r--test/integration/targets/filter_core/tasks/main.yml32
-rw-r--r--test/integration/targets/filter_encryption/base.yml12
-rw-r--r--test/integration/targets/filter_mathstuff/tasks/main.yml54
-rw-r--r--test/integration/targets/find/tasks/main.yml3
-rw-r--r--test/integration/targets/find/tasks/mode.yml68
-rw-r--r--test/integration/targets/fork_safe_stdio/aliases2
-rwxr-xr-xtest/integration/targets/fork_safe_stdio/runme.sh2
-rwxr-xr-xtest/integration/targets/gathering_facts/library/dummy119
-rwxr-xr-xtest/integration/targets/gathering_facts/library/dummy219
-rwxr-xr-xtest/integration/targets/gathering_facts/library/dummy319
-rw-r--r--test/integration/targets/gathering_facts/library/file_utils.py3
-rw-r--r--test/integration/targets/gathering_facts/library/slow26
-rwxr-xr-xtest/integration/targets/gathering_facts/runme.sh14
-rw-r--r--test/integration/targets/get_url/tasks/hashlib.yml20
-rw-r--r--test/integration/targets/get_url/tasks/main.yml2
-rw-r--r--test/integration/targets/get_url/tasks/use_netrc.yml6
-rw-r--r--test/integration/targets/git/tasks/depth.yml9
-rw-r--r--test/integration/targets/git/tasks/forcefully-fetch-tag.yml4
-rw-r--r--test/integration/targets/git/tasks/gpg-verification.yml10
-rw-r--r--test/integration/targets/git/tasks/localmods.yml26
-rw-r--r--test/integration/targets/git/tasks/main.yml50
-rw-r--r--test/integration/targets/git/tasks/missing_hostkey.yml3
-rw-r--r--test/integration/targets/git/tasks/missing_hostkey_acceptnew.yml3
-rw-r--r--test/integration/targets/git/tasks/reset-origin.yml9
-rw-r--r--test/integration/targets/git/tasks/setup-local-repos.yml33
-rw-r--r--test/integration/targets/git/tasks/setup.yml38
-rw-r--r--test/integration/targets/git/tasks/single-branch.yml6
-rw-r--r--test/integration/targets/git/tasks/specific-revision.yml18
-rw-r--r--test/integration/targets/git/vars/main.yml1
-rw-r--r--test/integration/targets/group/files/get_free_gid.py23
-rw-r--r--test/integration/targets/group/files/get_gid_for_group.py18
-rw-r--r--test/integration/targets/group/files/gidget.py15
-rw-r--r--test/integration/targets/group/tasks/main.yml23
-rw-r--r--test/integration/targets/group/tasks/tests.yml681
-rw-r--r--test/integration/targets/handlers/80880.yml34
-rw-r--r--test/integration/targets/handlers/82241.yml6
-rw-r--r--test/integration/targets/handlers/nested_flush_handlers_failure_force.yml19
-rw-r--r--test/integration/targets/handlers/roles/include_role_include_tasks_handler/handlers/include_handlers.yml2
-rw-r--r--test/integration/targets/handlers/roles/include_role_include_tasks_handler/handlers/main.yml2
-rw-r--r--test/integration/targets/handlers/roles/include_role_include_tasks_handler/tasks/main.yml2
-rw-r--r--test/integration/targets/handlers/roles/r1-dep_chain-vars/defaults/main.yml1
-rw-r--r--test/integration/targets/handlers/roles/r1-dep_chain-vars/tasks/main.yml2
-rw-r--r--test/integration/targets/handlers/roles/r2-dep_chain-vars/handlers/main.yml4
-rw-r--r--test/integration/targets/handlers/roles/r2-dep_chain-vars/tasks/main.yml2
-rw-r--r--test/integration/targets/handlers/roles/role-82241/handlers/main.yml2
-rw-r--r--test/integration/targets/handlers/roles/role-82241/tasks/entry_point.yml2
-rw-r--r--test/integration/targets/handlers/roles/role-82241/tasks/included_tasks.yml2
-rw-r--r--test/integration/targets/handlers/roles/test_listen_role_dedup_global/handlers/main.yml4
-rw-r--r--test/integration/targets/handlers/roles/test_listen_role_dedup_role1/meta/main.yml2
-rw-r--r--test/integration/targets/handlers/roles/test_listen_role_dedup_role1/tasks/main.yml3
-rw-r--r--test/integration/targets/handlers/roles/test_listen_role_dedup_role2/meta/main.yml2
-rw-r--r--test/integration/targets/handlers/roles/test_listen_role_dedup_role2/tasks/main.yml3
-rw-r--r--test/integration/targets/handlers/roles/two_tasks_files_role/handlers/main.yml3
-rw-r--r--test/integration/targets/handlers/roles/two_tasks_files_role/tasks/main.yml3
-rw-r--r--test/integration/targets/handlers/roles/two_tasks_files_role/tasks/other.yml3
-rwxr-xr-xtest/integration/targets/handlers/runme.sh27
-rw-r--r--test/integration/targets/handlers/test_include_role_handler_once.yml20
-rw-r--r--test/integration/targets/handlers/test_include_tasks_in_include_role.yml5
-rw-r--r--test/integration/targets/handlers/test_listen_role_dedup.yml5
-rw-r--r--test/integration/targets/handlers/test_multiple_handlers_with_recursive_notification.yml36
-rw-r--r--test/integration/targets/handlers/test_run_once.yml10
-rw-r--r--test/integration/targets/include_vars/files/test_depth/sub1/sub11.yml1
-rw-r--r--test/integration/targets/include_vars/files/test_depth/sub1/sub11/config11.yml1
-rw-r--r--test/integration/targets/include_vars/files/test_depth/sub1/sub11/config112.yml1
-rw-r--r--test/integration/targets/include_vars/files/test_depth/sub1/sub12.yml1
-rw-r--r--test/integration/targets/include_vars/files/test_depth/sub2/sub21.yml1
-rw-r--r--test/integration/targets/include_vars/files/test_depth/sub2/sub21/config211.yml1
-rw-r--r--test/integration/targets/include_vars/files/test_depth/sub2/sub21/config212.yml1
-rw-r--r--test/integration/targets/include_vars/files/test_depth/sub3/config3.yml1
-rw-r--r--test/integration/targets/include_vars/tasks/main.yml52
-rw-r--r--test/integration/targets/include_vars/vars/services/service_vars.yml2
-rw-r--r--test/integration/targets/include_vars/vars/services/service_vars_fqcn.yml2
-rw-r--r--test/integration/targets/include_when_parent_is_dynamic/tasks.yml2
-rw-r--r--test/integration/targets/include_when_parent_is_static/tasks.yml2
-rw-r--r--test/integration/targets/includes/include_on_playbook_should_fail.yml2
-rw-r--r--test/integration/targets/includes/roles/test_includes/handlers/main.yml2
-rw-r--r--test/integration/targets/includes/roles/test_includes/tasks/main.yml40
-rw-r--r--test/integration/targets/includes/roles/test_includes_free/tasks/main.yml4
-rw-r--r--test/integration/targets/includes/roles/test_includes_host_pinned/tasks/main.yml2
-rwxr-xr-xtest/integration/targets/includes/runme.sh2
-rw-r--r--test/integration/targets/includes/test_includes2.yml4
-rw-r--r--test/integration/targets/includes/test_includes3.yml2
-rw-r--r--test/integration/targets/inventory/inventory_plugins/contructed_with_hostvars.py2
-rw-r--r--test/integration/targets/inventory_ini/inventory.ini2
-rwxr-xr-xtest/integration/targets/inventory_ini/runme.sh3
-rw-r--r--test/integration/targets/iptables/aliases1
-rw-r--r--test/integration/targets/iptables/tasks/chain_management.yml21
-rw-r--r--test/integration/targets/known_hosts/defaults/main.yml2
-rw-r--r--test/integration/targets/known_hosts/tasks/main.yml2
-rw-r--r--test/integration/targets/lookup-option-name/aliases2
-rw-r--r--test/integration/targets/lookup-option-name/tasks/main.yml6
-rw-r--r--test/integration/targets/lookup_config/tasks/main.yml2
-rw-r--r--test/integration/targets/lookup_fileglob/issue72873/test.yml6
-rw-r--r--test/integration/targets/lookup_first_found/tasks/main.yml53
-rw-r--r--test/integration/targets/lookup_first_found/vars/ishouldnotbefound.yml1
-rw-r--r--test/integration/targets/lookup_first_found/vars/itworks.yml1
-rw-r--r--test/integration/targets/lookup_sequence/tasks/main.yml2
-rw-r--r--test/integration/targets/lookup_together/tasks/main.yml2
-rw-r--r--test/integration/targets/lookup_url/aliases9
-rw-r--r--test/integration/targets/lookup_url/meta/main.yml2
-rw-r--r--test/integration/targets/lookup_url/tasks/main.yml32
-rw-r--r--test/integration/targets/lookup_url/tasks/use_netrc.yml8
-rw-r--r--test/integration/targets/loop-connection/collections/ansible_collections/ns/name/meta/runtime.yml2
-rw-r--r--test/integration/targets/loop-connection/main.yml2
-rw-r--r--test/integration/targets/missing_required_lib/library/missing_required_lib.py2
-rw-r--r--test/integration/targets/module_defaults/action_plugins/debug.py2
-rw-r--r--test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/action/eos.py1
-rw-r--r--test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/action/ios.py1
-rw-r--r--test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/action/vyos.py1
-rw-r--r--test/integration/targets/module_no_log/aliases1
-rw-r--r--test/integration/targets/module_no_log/library/module_that_has_secret.py19
-rw-r--r--test/integration/targets/module_no_log/tasks/main.yml38
-rw-r--r--test/integration/targets/module_utils/library/test.py12
-rw-r--r--test/integration/targets/module_utils/library/test_failure.py4
-rw-r--r--test/integration/targets/module_utils/module_utils/bar0/foo.py (renamed from test/integration/targets/module_utils/module_utils/bar0/foo3.py)0
-rw-r--r--test/integration/targets/module_utils/module_utils/foo.py3
-rw-r--r--test/integration/targets/module_utils/module_utils/sub/bar/__init__.py (renamed from test/units/module_utils/compat/__init__.py)0
-rw-r--r--test/integration/targets/module_utils/module_utils/sub/bar/bam.py3
-rw-r--r--test/integration/targets/module_utils/module_utils/sub/bar/bar.py3
-rw-r--r--test/integration/targets/module_utils/module_utils/yak/zebra/foo.py (renamed from test/integration/targets/module_utils/module_utils/yak/zebra/foo4.py)0
-rw-r--r--test/integration/targets/module_utils/module_utils_test.yml2
-rw-r--r--test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps12
-rw-r--r--test/integration/targets/module_utils_Ansible.ModuleUtils.AddType/library/add_type_test.ps168
-rw-r--r--test/integration/targets/no_log/no_log_config.yml13
-rwxr-xr-xtest/integration/targets/no_log/runme.sh7
-rw-r--r--test/integration/targets/old_style_cache_plugins/aliases1
-rw-r--r--test/integration/targets/old_style_cache_plugins/plugins/cache/configurable_redis.py1
-rw-r--r--test/integration/targets/old_style_cache_plugins/setup_redis_cache.yml3
-rw-r--r--test/integration/targets/old_style_vars_plugins/deprecation_warning/v2_vars_plugin.py6
-rw-r--r--test/integration/targets/old_style_vars_plugins/deprecation_warning/vars.py2
-rw-r--r--test/integration/targets/old_style_vars_plugins/roles/a/tasks/main.yml3
-rw-r--r--test/integration/targets/old_style_vars_plugins/roles/a/vars_plugins/auto_role_vars.py11
-rwxr-xr-xtest/integration/targets/old_style_vars_plugins/runme.sh38
-rw-r--r--test/integration/targets/omit/75692.yml2
-rw-r--r--test/integration/targets/package/tasks/main.yml2
-rw-r--r--test/integration/targets/package_facts/aliases1
-rw-r--r--test/integration/targets/parsing/bad_parsing.yml12
-rw-r--r--test/integration/targets/parsing/parsing.yml35
-rw-r--r--test/integration/targets/parsing/roles/test_bad_parsing/tasks/main.yml60
-rw-r--r--test/integration/targets/parsing/roles/test_bad_parsing/tasks/scenario1.yml4
-rw-r--r--test/integration/targets/parsing/roles/test_bad_parsing/tasks/scenario2.yml4
-rw-r--r--test/integration/targets/parsing/roles/test_bad_parsing/tasks/scenario3.yml4
-rw-r--r--test/integration/targets/parsing/roles/test_bad_parsing/tasks/scenario4.yml4
-rw-r--r--test/integration/targets/parsing/roles/test_bad_parsing/vars/main.yml2
-rw-r--r--test/integration/targets/parsing/roles/test_good_parsing/tasks/main.yml19
-rw-r--r--test/integration/targets/parsing/roles/test_good_parsing/tasks/test_include_conditional.yml2
-rwxr-xr-xtest/integration/targets/parsing/runme.sh4
-rw-r--r--test/integration/targets/path_lookups/roles/showfile/tasks/main.yml (renamed from test/integration/targets/path_lookups/roles/showfile/tasks/notmain.yml)0
-rw-r--r--test/integration/targets/path_lookups/testplay.yml8
-rw-r--r--test/integration/targets/pause/pause-6.yml25
-rwxr-xr-xtest/integration/targets/pause/test-pause.py23
-rw-r--r--test/integration/targets/pip/tasks/main.yml3
-rw-r--r--test/integration/targets/pip/tasks/no_setuptools.yml48
-rw-r--r--test/integration/targets/pip/tasks/pip.yml22
-rw-r--r--test/integration/targets/pkg_resources/lookup_plugins/check_pkg_resources.py2
-rw-r--r--test/integration/targets/plugin_filtering/filter_lookup.yml2
-rw-r--r--test/integration/targets/plugin_filtering/filter_modules.yml2
-rw-r--r--test/integration/targets/plugin_filtering/filter_ping.yml2
-rw-r--r--test/integration/targets/plugin_filtering/filter_stat.yml2
-rw-r--r--test/integration/targets/plugin_filtering/no_blacklist_module.ini3
-rw-r--r--test/integration/targets/plugin_filtering/no_blacklist_module.yml (renamed from test/integration/targets/plugin_filtering/no_rejectlist_module.yml)2
-rwxr-xr-xtest/integration/targets/plugin_filtering/runme.sh16
-rw-r--r--test/integration/targets/plugin_loader/collections/ansible_collections/n/c/plugins/action/a.py6
-rw-r--r--test/integration/targets/plugin_loader/file_collision/play.yml7
-rw-r--r--test/integration/targets/plugin_loader/file_collision/roles/r1/filter_plugins/custom.py15
-rw-r--r--test/integration/targets/plugin_loader/file_collision/roles/r1/filter_plugins/filter1.yml18
-rw-r--r--test/integration/targets/plugin_loader/file_collision/roles/r1/filter_plugins/filter3.yml18
-rw-r--r--test/integration/targets/plugin_loader/file_collision/roles/r2/filter_plugins/custom.py14
-rw-r--r--test/integration/targets/plugin_loader/file_collision/roles/r2/filter_plugins/filter2.yml18
-rw-r--r--test/integration/targets/plugin_loader/override/filters.yml2
-rwxr-xr-xtest/integration/targets/plugin_loader/runme.sh5
-rw-r--r--test/integration/targets/plugin_loader/unsafe_plugin_name.yml9
-rw-r--r--test/integration/targets/rel_plugin_loading/subdir/inventory_plugins/notyaml.py2
-rw-r--r--test/integration/targets/remote_tmp/playbook.yml43
-rw-r--r--test/integration/targets/replace/tasks/main.yml19
-rw-r--r--test/integration/targets/result_pickle_error/action_plugins/result_pickle_error.py15
-rw-r--r--test/integration/targets/result_pickle_error/aliases3
-rwxr-xr-xtest/integration/targets/result_pickle_error/runme.sh16
-rw-r--r--test/integration/targets/result_pickle_error/runme.yml7
-rw-r--r--test/integration/targets/result_pickle_error/tasks/main.yml14
-rw-r--r--test/integration/targets/roles/47023.yml5
-rw-r--r--test/integration/targets/roles/dupe_inheritance.yml10
-rw-r--r--test/integration/targets/roles/privacy.yml60
-rw-r--r--test/integration/targets/roles/role_complete.yml47
-rw-r--r--test/integration/targets/roles/role_dep_chain.yml6
-rw-r--r--test/integration/targets/roles/roles/47023_role1/defaults/main.yml1
-rw-r--r--test/integration/targets/roles/roles/47023_role1/tasks/main.yml1
-rw-r--r--test/integration/targets/roles/roles/47023_role1/vars/main.yml1
-rw-r--r--test/integration/targets/roles/roles/47023_role2/tasks/main.yml1
-rw-r--r--test/integration/targets/roles/roles/47023_role3/tasks/main.yml1
-rw-r--r--test/integration/targets/roles/roles/47023_role4/tasks/main.yml5
-rw-r--r--test/integration/targets/roles/roles/a/vars/main.yml1
-rw-r--r--test/integration/targets/roles/roles/bottom/tasks/main.yml3
-rw-r--r--test/integration/targets/roles/roles/failed_when/tasks/main.yml4
-rw-r--r--test/integration/targets/roles/roles/imported_from_include/tasks/main.yml4
-rw-r--r--test/integration/targets/roles/roles/include_import_dep_chain/defaults/main.yml1
-rw-r--r--test/integration/targets/roles/roles/include_import_dep_chain/tasks/main.yml2
-rw-r--r--test/integration/targets/roles/roles/include_import_dep_chain/vars/main.yml1
-rw-r--r--test/integration/targets/roles/roles/middle/tasks/main.yml6
-rw-r--r--test/integration/targets/roles/roles/recover/tasks/main.yml1
-rw-r--r--test/integration/targets/roles/roles/set_var/tasks/main.yml2
-rw-r--r--test/integration/targets/roles/roles/test_connectivity/tasks/main.yml2
-rw-r--r--test/integration/targets/roles/roles/top/tasks/main.yml6
-rw-r--r--test/integration/targets/roles/roles/vars_scope/defaults/main.yml10
-rw-r--r--test/integration/targets/roles/roles/vars_scope/tasks/check_vars.yml7
-rw-r--r--test/integration/targets/roles/roles/vars_scope/tasks/main.yml1
-rw-r--r--test/integration/targets/roles/roles/vars_scope/vars/main.yml9
-rwxr-xr-xtest/integration/targets/roles/runme.sh35
-rw-r--r--test/integration/targets/roles/tasks/check_vars.yml7
-rw-r--r--test/integration/targets/roles/vars/play.yml26
-rw-r--r--test/integration/targets/roles/vars/privacy_vars.yml2
-rw-r--r--test/integration/targets/roles/vars_scope.yml358
-rw-r--r--test/integration/targets/roles_arg_spec/roles/c/meta/main.yml9
-rw-r--r--test/integration/targets/roles_arg_spec/test.yml130
-rw-r--r--test/integration/targets/rpm_key/tasks/rpm_key.yaml26
-rw-r--r--test/integration/targets/script/tasks/main.yml11
-rw-r--r--test/integration/targets/service/aliases1
-rw-r--r--test/integration/targets/service/files/ansible_test_service.py1
-rw-r--r--test/integration/targets/service_facts/aliases1
-rw-r--r--test/integration/targets/setup_deb_repo/tasks/main.yml1
-rw-r--r--test/integration/targets/setup_paramiko/install-Alpine-3-python-3.yml9
-rw-r--r--test/integration/targets/setup_paramiko/install-CentOS-6-python-2.yml3
-rw-r--r--test/integration/targets/setup_paramiko/install-Fedora-35-python-3.yml9
-rw-r--r--test/integration/targets/setup_paramiko/install-Ubuntu-16-python-2.yml3
-rw-r--r--test/integration/targets/setup_paramiko/install-python-2.yml3
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-Alpine-3-python-3.yml4
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-Fedora-35-python-3.yml5
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-apt-python-2.yml5
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-zypper-python-2.yml2
-rw-r--r--test/integration/targets/setup_rpm_repo/tasks/main.yml15
-rwxr-xr-xtest/integration/targets/strategy_linear/runme.sh2
-rw-r--r--test/integration/targets/strategy_linear/task_templated_run_once.yml20
-rw-r--r--test/integration/targets/subversion/aliases2
-rw-r--r--test/integration/targets/support-callback_plugins/aliases1
-rw-r--r--test/integration/targets/systemd/tasks/test_indirect_service.yml2
-rw-r--r--test/integration/targets/systemd/vars/Debian.yml2
-rwxr-xr-xtest/integration/targets/tags/runme.sh9
-rw-r--r--test/integration/targets/tags/test_template_parent_tags.yml10
-rw-r--r--test/integration/targets/tasks/playbook.yml5
-rwxr-xr-xtest/integration/targets/tasks/runme.sh2
-rw-r--r--test/integration/targets/template/ansible_managed_79129.yml29
-rw-r--r--test/integration/targets/template/arg_template_overrides.j24
-rw-r--r--test/integration/targets/template/in_template_overrides.yml28
-rwxr-xr-xtest/integration/targets/template/runme.sh7
-rw-r--r--test/integration/targets/template/tasks/main.yml4
-rw-r--r--test/integration/targets/template/template_overrides.yml38
-rw-r--r--test/integration/targets/template/templates/%necho Onii-chan help Im stuck;exit 1%n.j23
-rw-r--r--test/integration/targets/template/templates/completely{{ 1 % 0 }} safe template.j23
-rw-r--r--test/integration/targets/template/unsafe.yml5
-rw-r--r--test/integration/targets/template_jinja2_non_native/macro_override.yml2
-rw-r--r--test/integration/targets/templating/tasks/main.yml11
-rw-r--r--test/integration/targets/test_core/tasks/main.yml13
-rw-r--r--test/integration/targets/test_utils/aliases1
-rwxr-xr-xtest/integration/targets/test_utils/scripts/timeout.py21
-rwxr-xr-xtest/integration/targets/unarchive/runme.sh8
-rw-r--r--test/integration/targets/unarchive/runme.yml4
-rw-r--r--test/integration/targets/unarchive/tasks/main.yml1
-rw-r--r--test/integration/targets/unarchive/tasks/test_different_language_var.yml4
-rw-r--r--test/integration/targets/unarchive/tasks/test_mode.yml23
-rw-r--r--test/integration/targets/unarchive/tasks/test_relative_dest.yml26
-rw-r--r--test/integration/targets/unarchive/test_relative_tmp_dir.yml10
-rw-r--r--test/integration/targets/unsafe_writes/aliases1
-rw-r--r--test/integration/targets/until/tasks/main.yml34
-rw-r--r--test/integration/targets/unvault/main.yml1
-rwxr-xr-xtest/integration/targets/unvault/runme.sh2
-rw-r--r--test/integration/targets/uri/tasks/main.yml37
-rw-r--r--test/integration/targets/uri/tasks/redirect-none.yml2
-rw-r--r--test/integration/targets/uri/tasks/redirect-urllib2.yml35
-rw-r--r--test/integration/targets/uri/tasks/return-content.yml2
-rw-r--r--test/integration/targets/uri/tasks/use_netrc.yml2
-rw-r--r--test/integration/targets/user/tasks/main.yml2
-rw-r--r--test/integration/targets/user/tasks/test_create_user.yml12
-rw-r--r--test/integration/targets/user/tasks/test_create_user_home.yml18
-rw-r--r--test/integration/targets/user/tasks/test_expires_no_shadow.yml47
-rw-r--r--test/integration/targets/user/tasks/test_expires_warn.yml36
-rw-r--r--test/integration/targets/user/tasks/test_local.yml40
-rw-r--r--test/integration/targets/user/vars/main.yml2
-rw-r--r--test/integration/targets/var_blending/roles/test_var_blending/tasks/main.yml10
-rwxr-xr-xtest/integration/targets/var_precedence/ansible-var-precedence-check.py5
-rw-r--r--test/integration/targets/var_precedence/test_var_precedence.yml16
-rw-r--r--test/integration/targets/vars_files/aliases2
-rw-r--r--test/integration/targets/vars_files/inventory3
-rwxr-xr-xtest/integration/targets/vars_files/runme.sh5
-rw-r--r--test/integration/targets/vars_files/runme.yml22
-rw-r--r--test/integration/targets/vars_files/validate.yml11
-rw-r--r--test/integration/targets/vars_files/vars/bar.yml1
-rw-r--r--test/integration/targets/vars_files/vars/common.yml1
-rw-r--r--test/integration/targets/vars_files/vars/defaults.yml1
-rw-r--r--test/integration/targets/wait_for/tasks/main.yml11
-rw-r--r--test/integration/targets/win_exec_wrapper/action_plugins/test_rc_1.py35
-rw-r--r--test/integration/targets/win_exec_wrapper/library/test_rc_1.ps117
-rw-r--r--test/integration/targets/win_exec_wrapper/tasks/main.yml9
-rw-r--r--test/integration/targets/win_fetch/tasks/main.yml14
-rw-r--r--test/integration/targets/win_script/files/test_script_with_args.ps12
-rw-r--r--test/integration/targets/win_script/files/test_script_with_errors.ps12
-rw-r--r--test/integration/targets/windows-minimal/library/win_ping_set_attr.ps18
-rw-r--r--test/integration/targets/windows-minimal/library/win_ping_strict_mode_error.ps18
-rw-r--r--test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps18
-rw-r--r--test/integration/targets/windows-minimal/library/win_ping_throw.ps18
-rw-r--r--test/integration/targets/windows-minimal/library/win_ping_throw_string.ps18
-rw-r--r--test/integration/targets/yum/aliases1
-rw-r--r--test/integration/targets/yum/filter_plugins/filter_list_of_tuples_by_first_param.py2
-rw-r--r--test/lib/ansible_test/_data/completion/docker.txt18
-rw-r--r--test/lib/ansible_test/_data/completion/remote.txt14
-rw-r--r--test/lib/ansible_test/_data/completion/windows.txt2
-rw-r--r--test/lib/ansible_test/_data/requirements/ansible-test.txt3
-rw-r--r--test/lib/ansible_test/_data/requirements/ansible.txt2
-rw-r--r--test/lib/ansible_test/_data/requirements/constraints.txt2
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt9
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.changelog.in3
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.changelog.txt15
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt6
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.import.txt4
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt4
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.mypy.in10
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.mypy.txt32
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.pep8.txt2
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.pslint.ps14
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.pylint.in2
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.pylint.txt20
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt4
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.validate-modules.in1
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt7
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.yamllint.txt8
-rw-r--r--test/lib/ansible_test/_data/requirements/units.txt1
-rw-r--r--test/lib/ansible_test/_internal/ci/azp.py8
-rw-r--r--test/lib/ansible_test/_internal/cli/environments.py13
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py8
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/combine.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/acme.py14
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/cs.py15
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py96
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py199
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py3
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/nios.py16
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py9
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py94
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/__init__.py31
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py40
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/import.py12
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/mypy.py18
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/pylint.py28
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/validate_modules.py5
-rw-r--r--test/lib/ansible_test/_internal/commands/units/__init__.py4
-rw-r--r--test/lib/ansible_test/_internal/config.py7
-rw-r--r--test/lib/ansible_test/_internal/containers.py67
-rw-r--r--test/lib/ansible_test/_internal/core_ci.py7
-rw-r--r--test/lib/ansible_test/_internal/coverage_util.py11
-rw-r--r--test/lib/ansible_test/_internal/delegation.py1
-rw-r--r--test/lib/ansible_test/_internal/diff.py2
-rw-r--r--test/lib/ansible_test/_internal/docker_util.py11
-rw-r--r--test/lib/ansible_test/_internal/host_profiles.py12
-rw-r--r--test/lib/ansible_test/_internal/http.py2
-rw-r--r--test/lib/ansible_test/_internal/junit_xml.py2
-rw-r--r--test/lib/ansible_test/_internal/pypi_proxy.py2
-rw-r--r--test/lib/ansible_test/_internal/python_requirements.py11
-rw-r--r--test/lib/ansible_test/_internal/util.py9
-rw-r--r--test/lib/ansible_test/_internal/util_common.py11
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.json4
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.json4
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py57
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.json4
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/mypy/ansible-core.ini3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini8
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/mypy/packaging.ini20
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pep8/current-ignore.txt5
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd13
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg4
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/config/code-smell.cfg1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/config/collection.cfg9
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg10
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py185
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/plugins/hide_unraisable.py24
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py41
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py13
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py197
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py2
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py113
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py2
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py6
-rw-r--r--test/lib/ansible_test/_util/controller/tools/collection_detail.py4
-rw-r--r--test/lib/ansible_test/_util/target/common/constants.py4
-rw-r--r--test/lib/ansible_test/_util/target/pytest/plugins/ansible_forked.py103
-rw-r--r--test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py46
-rw-r--r--test/lib/ansible_test/_util/target/sanity/import/importer.py12
-rw-r--r--test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1435
-rw-r--r--test/lib/ansible_test/_util/target/setup/bootstrap.sh61
-rw-r--r--test/lib/ansible_test/_util/target/setup/quiet_pip.py4
-rw-r--r--test/lib/ansible_test/config/cloud-config-aws.ini.template4
-rw-r--r--test/lib/ansible_test/config/cloud-config-azure.ini.template3
-rw-r--r--test/lib/ansible_test/config/cloud-config-cloudscale.ini.template2
-rw-r--r--test/lib/ansible_test/config/cloud-config-cs.ini.template3
-rw-r--r--test/lib/ansible_test/config/cloud-config-gcp.ini.template3
-rw-r--r--test/lib/ansible_test/config/cloud-config-hcloud.ini.template3
-rw-r--r--test/lib/ansible_test/config/cloud-config-opennebula.ini.template5
-rw-r--r--test/lib/ansible_test/config/cloud-config-openshift.kubeconfig.template3
-rw-r--r--test/lib/ansible_test/config/cloud-config-scaleway.ini.template3
-rw-r--r--test/lib/ansible_test/config/cloud-config-vcenter.ini.template3
-rw-r--r--test/lib/ansible_test/config/cloud-config-vultr.ini.template3
-rw-r--r--test/lib/ansible_test/config/inventory.networking.template3
-rw-r--r--test/lib/ansible_test/config/inventory.winrm.template3
-rw-r--r--test/sanity/code-smell/ansible-requirements.py1
-rw-r--r--test/sanity/code-smell/deprecated-config.requirements.in2
-rw-r--r--test/sanity/code-smell/deprecated-config.requirements.txt6
-rw-r--r--test/sanity/code-smell/obsolete-files.json2
-rw-r--r--test/sanity/code-smell/package-data.requirements.in8
-rw-r--r--test/sanity/code-smell/package-data.requirements.txt25
-rw-r--r--test/sanity/code-smell/pymarkdown.config.json11
-rw-r--r--test/sanity/code-smell/pymarkdown.json7
-rw-r--r--test/sanity/code-smell/pymarkdown.py64
-rw-r--r--test/sanity/code-smell/pymarkdown.requirements.in1
-rw-r--r--test/sanity/code-smell/pymarkdown.requirements.txt9
-rw-r--r--test/sanity/code-smell/release-names.py7
-rw-r--r--test/sanity/code-smell/release-names.requirements.in1
-rw-r--r--test/sanity/code-smell/release-names.requirements.txt4
-rw-r--r--test/sanity/code-smell/test-constraints.py6
-rw-r--r--test/sanity/code-smell/update-bundled.requirements.txt3
-rw-r--r--test/sanity/ignore.txt113
-rw-r--r--test/support/README.md2
-rw-r--r--test/support/integration/plugins/module_utils/compat/__init__.py (renamed from test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll2/__init__.py)0
-rw-r--r--test/support/integration/plugins/module_utils/compat/ipaddress.py2476
-rw-r--r--test/support/integration/plugins/module_utils/net_tools/__init__.py0
-rw-r--r--test/support/integration/plugins/module_utils/network/__init__.py0
-rw-r--r--test/support/integration/plugins/module_utils/network/common/__init__.py0
-rw-r--r--test/support/integration/plugins/module_utils/network/common/utils.py643
-rw-r--r--test/support/integration/plugins/modules/sefcontext.py4
-rw-r--r--test/support/integration/plugins/modules/timezone.py4
-rw-r--r--test/support/integration/plugins/modules/zypper.py5
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/net_base.py90
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/net_get.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/net_put.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/network.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/become/enable.py42
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/httpapi.py324
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/netconf.py404
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/network_cli.py3
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/persistent.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py66
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py14
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/filter/ipaddr.py1186
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/filter/network.py531
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/httpapi/restconf.py91
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py20
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py147
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py61
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/modules/cli_config.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/modules/net_get.py71
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/modules/net_put.py82
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py70
-rw-r--r--test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/cliconf/ios.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py6
-rw-r--r--test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/terminal/ios.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/cliconf/vyos.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py6
-rw-r--r--test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py2
-rw-r--r--test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/action/win_copy.py4
-rw-r--r--test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/action/win_reboot.py101
-rw-r--r--test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_stat.ps12
-rw-r--r--test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/plugin_utils/_quote.py114
-rw-r--r--test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/plugin_utils/_reboot.py620
-rw-r--r--test/support/windows-integration/plugins/action/win_copy.py4
-rw-r--r--test/support/windows-integration/plugins/action/win_reboot.py7
-rw-r--r--test/support/windows-integration/plugins/modules/win_stat.ps12
-rw-r--r--test/units/_vendor/test_vendor.py19
-rw-r--r--test/units/ansible_test/diff/add_binary_file.diff4
-rw-r--r--test/units/ansible_test/diff/add_text_file.diff8
-rw-r--r--test/units/ansible_test/diff/add_trailing_newline.diff9
-rw-r--r--test/units/ansible_test/diff/add_two_text_files.diff16
-rw-r--r--test/units/ansible_test/diff/context_no_trailing_newline.diff8
-rw-r--r--test/units/ansible_test/diff/multiple_context_lines.diff10
-rw-r--r--test/units/ansible_test/diff/parse_delete.diff16
-rw-r--r--test/units/ansible_test/diff/parse_rename.diff8
-rw-r--r--test/units/ansible_test/diff/remove_trailing_newline.diff9
-rw-r--r--test/units/ansible_test/test_diff.py178
-rw-r--r--test/units/cli/arguments/test_optparse_helpers.py5
-rw-r--r--test/units/cli/galaxy/test_execute_list_collection.py152
-rw-r--r--test/units/cli/test_adhoc.py10
-rw-r--r--test/units/cli/test_data/collection_skeleton/README.md2
-rw-r--r--test/units/cli/test_data/collection_skeleton/docs/My Collection.md2
-rw-r--r--test/units/cli/test_doc.py3
-rw-r--r--test/units/cli/test_galaxy.py110
-rw-r--r--test/units/cli/test_vault.py23
-rw-r--r--test/units/compat/mock.py2
-rw-r--r--test/units/config/manager/test_find_ini_config_file.py67
-rw-r--r--test/units/config/test3.cfg4
-rw-r--r--test/units/config/test_manager.py30
-rw-r--r--test/units/executor/module_common/conftest.py10
-rw-r--r--test/units/executor/module_common/test_modify_module.py8
-rw-r--r--test/units/executor/module_common/test_module_common.py39
-rw-r--r--test/units/executor/module_common/test_recursive_finder.py5
-rw-r--r--test/units/executor/test_interpreter_discovery.py8
-rw-r--r--test/units/executor/test_play_iterator.py24
-rw-r--r--test/units/executor/test_task_executor.py55
-rw-r--r--test/units/galaxy/test_api.py37
-rw-r--r--test/units/galaxy/test_collection.py161
-rw-r--r--test/units/galaxy/test_collection_install.py156
-rw-r--r--test/units/galaxy/test_role_install.py21
-rw-r--r--test/units/galaxy/test_token.py2
-rw-r--r--test/units/inventory/test_host.py8
-rw-r--r--test/units/mock/loader.py30
-rw-r--r--test/units/mock/procenv.py27
-rw-r--r--test/units/mock/vault_helper.py2
-rw-r--r--test/units/mock/yaml_helper.py73
-rw-r--r--test/units/module_utils/basic/test__symbolic_mode_to_octal.py8
-rw-r--r--test/units/module_utils/basic/test_argument_spec.py2
-rw-r--r--test/units/module_utils/basic/test_command_nonexisting.py5
-rw-r--r--test/units/module_utils/basic/test_filesystem.py2
-rw-r--r--test/units/module_utils/basic/test_get_available_hash_algorithms.py60
-rw-r--r--test/units/module_utils/basic/test_run_command.py10
-rw-r--r--test/units/module_utils/basic/test_safe_eval.py2
-rw-r--r--test/units/module_utils/basic/test_sanitize_keys.py1
-rw-r--r--test/units/module_utils/basic/test_selinux.py82
-rw-r--r--test/units/module_utils/basic/test_set_cwd.py7
-rw-r--r--test/units/module_utils/basic/test_tmpdir.py2
-rw-r--r--test/units/module_utils/common/arg_spec/test_aliases.py1
-rw-r--r--test/units/module_utils/common/parameters/test_handle_aliases.py2
-rw-r--r--test/units/module_utils/common/parameters/test_list_deprecations.py11
-rw-r--r--test/units/module_utils/common/test_collections.py21
-rw-r--r--test/units/module_utils/common/text/converters/test_json_encode_fallback.py6
-rw-r--r--test/units/module_utils/common/validation/test_check_missing_parameters.py8
-rw-r--r--test/units/module_utils/common/validation/test_check_mutually_exclusive.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_required_arguments.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_required_by.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_required_if.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_required_one_of.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_required_together.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_type_bits.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_type_bool.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_type_bytes.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_type_float.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_type_int.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_type_jsonarg.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_type_str.py2
-rw-r--r--test/units/module_utils/compat/test_datetime.py34
-rw-r--r--test/units/module_utils/conftest.py4
-rw-r--r--test/units/module_utils/facts/base.py4
-rw-r--r--test/units/module_utils/facts/fixtures/cpuinfo/s390x-z13-2cpu-cpuinfo14
-rw-r--r--test/units/module_utils/facts/fixtures/cpuinfo/s390x-z14-64cpu-cpuinfo1037
-rw-r--r--test/units/module_utils/facts/hardware/linux_data.py62
-rw-r--r--test/units/module_utils/facts/hardware/test_linux_get_cpu_info.py4
-rw-r--r--test/units/module_utils/facts/network/test_locally_reachable_ips.py93
-rw-r--r--test/units/module_utils/facts/system/distribution/test_parse_distribution_file_ClearLinux.py6
-rw-r--r--test/units/module_utils/facts/system/distribution/test_parse_distribution_file_Slackware.py5
-rw-r--r--test/units/module_utils/facts/system/test_pkg_mgr.py75
-rw-r--r--test/units/module_utils/facts/test_collectors.py5
-rw-r--r--test/units/module_utils/facts/test_date_time.py15
-rw-r--r--test/units/module_utils/facts/test_sysctl.py6
-rw-r--r--test/units/module_utils/facts/test_timeout.py2
-rw-r--r--test/units/module_utils/test_text.py21
-rw-r--r--test/units/module_utils/urls/test_Request.py14
-rw-r--r--test/units/module_utils/urls/test_fetch_file.py1
-rw-r--r--test/units/module_utils/urls/test_prepare_multipart.py2
-rw-r--r--test/units/module_utils/urls/test_urls.py2
-rw-r--r--test/units/modules/conftest.py21
-rw-r--r--test/units/modules/test_apt.py29
-rw-r--r--test/units/modules/test_async_wrapper.py9
-rw-r--r--test/units/modules/test_copy.py23
-rw-r--r--test/units/modules/test_hostname.py10
-rw-r--r--test/units/modules/test_iptables.py40
-rw-r--r--test/units/modules/test_known_hosts.py2
-rw-r--r--test/units/modules/test_unarchive.py20
-rw-r--r--test/units/modules/utils.py10
-rw-r--r--test/units/parsing/test_ajson.py6
-rw-r--r--test/units/parsing/test_dataloader.py13
-rw-r--r--test/units/parsing/test_mod_args.py10
-rw-r--r--test/units/parsing/test_splitter.py75
-rw-r--r--test/units/parsing/vault/test_vault.py43
-rw-r--r--test/units/parsing/vault/test_vault_editor.py79
-rw-r--r--test/units/parsing/yaml/test_dumper.py21
-rw-r--r--test/units/parsing/yaml/test_objects.py7
-rw-r--r--test/units/playbook/role/test_include_role.py6
-rw-r--r--test/units/playbook/role/test_role.py77
-rw-r--r--test/units/playbook/test_base.py20
-rw-r--r--test/units/playbook/test_collectionsearch.py1
-rw-r--r--test/units/playbook/test_helpers.py62
-rw-r--r--test/units/playbook/test_included_file.py14
-rw-r--r--test/units/playbook/test_play_context.py2
-rw-r--r--test/units/playbook/test_taggable.py1
-rw-r--r--test/units/playbook/test_task.py2
-rw-r--r--test/units/plugins/action/test_action.py57
-rw-r--r--test/units/plugins/action/test_pause.py (renamed from test/units/utils/display/test_curses.py)30
-rw-r--r--test/units/plugins/action/test_raw.py6
-rw-r--r--test/units/plugins/cache/test_cache.py5
-rw-r--r--test/units/plugins/connection/test_connection.py75
-rw-r--r--test/units/plugins/connection/test_local.py1
-rw-r--r--test/units/plugins/connection/test_paramiko.py (renamed from test/units/plugins/connection/test_paramiko_ssh.py)14
-rw-r--r--test/units/plugins/connection/test_ssh.py18
-rw-r--r--test/units/plugins/connection/test_winrm.py104
-rw-r--r--test/units/plugins/filter/test_core.py4
-rw-r--r--test/units/plugins/filter/test_mathstuff.py85
-rw-r--r--test/units/plugins/inventory/test_constructed.py10
-rw-r--r--test/units/plugins/inventory/test_inventory.py2
-rw-r--r--test/units/plugins/inventory/test_script.py10
-rw-r--r--test/units/plugins/lookup/test_password.py30
-rw-r--r--test/units/plugins/strategy/test_strategy.py492
-rw-r--r--test/units/plugins/test_plugins.py10
-rw-r--r--test/units/requirements.txt8
-rw-r--r--test/units/template/test_templar.py14
-rw-r--r--test/units/template/test_vars.py23
-rw-r--r--test/units/test_constants.py94
-rw-r--r--test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py2
-rw-r--r--test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_other_util.py2
-rw-r--r--test/units/utils/collection_loader/test_collection_loader.py71
-rw-r--r--test/units/utils/display/test_broken_cowsay.py7
-rw-r--r--test/units/utils/test_cleanup_tmp_file.py25
-rw-r--r--test/units/utils/test_display.py35
-rw-r--r--test/units/utils/test_encrypt.py13
-rw-r--r--test/units/utils/test_unsafe_proxy.py28
-rw-r--r--test/units/vars/test_module_response_deepcopy.py11
-rw-r--r--test/units/vars/test_variable_manager.py6
827 files changed, 11004 insertions, 12035 deletions
diff --git a/test/ansible_test/Makefile b/test/ansible_test/Makefile
new file mode 100644
index 00000000..2d85e3da
--- /dev/null
+++ b/test/ansible_test/Makefile
@@ -0,0 +1,13 @@
+all: sanity unit validate-modules-unit
+
+.PHONY: sanity
+sanity:
+ $(abspath ${CURDIR}/../../bin/ansible-test) sanity test/lib/ ${FLAGS}
+
+.PHONY: unit
+unit:
+ PYTHONPATH=$(abspath ${CURDIR}/../lib) pytest unit ${FLAGS}
+
+.PHONY: validate-modules-unit
+validate-modules-unit:
+ PYTHONPATH=$(abspath ${CURDIR}/../lib/ansible_test/_util/controller/sanity/validate-modules):$(abspath ${CURDIR}/../../lib) pytest validate-modules-unit ${FLAGS}
diff --git a/test/ansible_test/unit/test_diff.py b/test/ansible_test/unit/test_diff.py
new file mode 100644
index 00000000..1f2559d2
--- /dev/null
+++ b/test/ansible_test/unit/test_diff.py
@@ -0,0 +1,105 @@
+"""Tests for diff module."""
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+import subprocess
+import pytest
+
+from ansible_test._internal.util import (
+ to_text,
+ to_bytes,
+)
+
+from ansible_test._internal.diff import (
+ parse_diff,
+ FileDiff,
+)
+
+
+def get_diff(base, head=None):
+ """Return a git diff between the base and head revision.
+ :type base: str
+ :type head: str | None
+ :rtype: list[str]
+ """
+ if not head or head == 'HEAD':
+ head = to_text(subprocess.check_output(['git', 'rev-parse', 'HEAD'])).strip()
+
+ cache = '/tmp/git-diff-cache-%s-%s.log' % (base, head)
+
+ if os.path.exists(cache):
+ with open(cache, 'rb') as cache_fd:
+ lines = to_text(cache_fd.read()).splitlines()
+ else:
+ lines = to_text(subprocess.check_output(['git', 'diff', base, head]), errors='replace').splitlines()
+
+ with open(cache, 'wb') as cache_fd:
+ cache_fd.write(to_bytes('\n'.join(lines)))
+
+ assert lines
+
+ return lines
+
+
+def get_parsed_diff(base, head=None):
+ """Return a parsed git diff between the base and head revision.
+ :type base: str
+ :type head: str | None
+ :rtype: list[FileDiff]
+ """
+ lines = get_diff(base, head)
+ items = parse_diff(lines)
+
+ assert items
+
+ for item in items:
+ assert item.headers
+ assert item.is_complete
+
+ item.old.format_lines()
+ item.new.format_lines()
+
+ for line_range in item.old.ranges:
+ assert line_range[1] >= line_range[0] > 0
+
+ for line_range in item.new.ranges:
+ assert line_range[1] >= line_range[0] > 0
+
+ return items
+
+
+RANGES_TO_TEST = (
+ ('f31421576b00f0b167cdbe61217c31c21a41ac02', 'HEAD'),
+ ('b8125ac1a61f2c7d1de821c78c884560071895f1', '32146acf4e43e6f95f54d9179bf01f0df9814217')
+)
+
+
+@pytest.mark.parametrize("base, head", RANGES_TO_TEST)
+def test_parse_diff(base, head):
+ """Integration test to verify parsing of ansible/ansible history."""
+ get_parsed_diff(base, head)
+
+
+def test_parse_delete():
+ """Integration test to verify parsing of a deleted file."""
+ commit = 'ee17b914554861470b382e9e80a8e934063e0860'
+ items = get_parsed_diff(commit + '~', commit)
+ deletes = [item for item in items if not item.new.exists]
+
+ assert len(deletes) == 1
+ assert deletes[0].old.path == 'lib/ansible/plugins/connection/nspawn.py'
+ assert deletes[0].new.path == 'lib/ansible/plugins/connection/nspawn.py'
+
+
+def test_parse_rename():
+ """Integration test to verify parsing of renamed files."""
+ commit = '16a39639f568f4dd5cb233df2d0631bdab3a05e9'
+ items = get_parsed_diff(commit + '~', commit)
+ renames = [item for item in items if item.old.path != item.new.path and item.old.exists and item.new.exists]
+
+ assert len(renames) == 2
+ assert renames[0].old.path == 'test/integration/targets/eos_eapi/tests/cli/badtransport.yaml'
+ assert renames[0].new.path == 'test/integration/targets/eos_eapi/tests/cli/badtransport.1'
+ assert renames[1].old.path == 'test/integration/targets/eos_eapi/tests/cli/zzz_reset.yaml'
+ assert renames[1].new.path == 'test/integration/targets/eos_eapi/tests/cli/zzz_reset.1'
diff --git a/test/units/ansible_test/test_validate_modules.py b/test/ansible_test/validate-modules-unit/test_validate_modules_regex.py
index 1b801a59..8c0b45ca 100644
--- a/test/units/ansible_test/test_validate_modules.py
+++ b/test/ansible_test/validate-modules-unit/test_validate_modules_regex.py
@@ -1,27 +1,10 @@
"""Tests for validate-modules regexes."""
-from __future__ import annotations
-
-import pathlib
-import sys
-from unittest import mock
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
import pytest
-
-@pytest.fixture(autouse=True, scope='session')
-def validate_modules() -> None:
- """Make validate_modules available on sys.path for unit testing."""
- sys.path.insert(0, str(pathlib.Path(__file__).parent.parent.parent / 'lib/ansible_test/_util/controller/sanity/validate-modules'))
-
- # Mock out voluptuous to facilitate testing without it, since tests aren't covering anything that uses it.
-
- sys.modules['voluptuous'] = voluptuous = mock.MagicMock()
- sys.modules['voluptuous.humanize'] = voluptuous.humanize = mock.MagicMock()
-
- # Mock out antsibull_docs_parser to facilitate testing without it, since tests aren't covering anything that uses it.
-
- sys.modules['antsibull_docs_parser'] = antsibull_docs_parser = mock.MagicMock()
- sys.modules['antsibull_docs_parser.parser'] = antsibull_docs_parser.parser = mock.MagicMock()
+from validate_modules.main import TYPE_REGEX
@pytest.mark.parametrize('cstring,cexpected', [
@@ -53,11 +36,8 @@ def validate_modules() -> None:
])
def test_type_regex(cstring, cexpected): # type: (str, str) -> None
"""Check TYPE_REGEX against various examples to verify it correctly matches or does not match."""
- from validate_modules.main import TYPE_REGEX
-
match = TYPE_REGEX.match(cstring)
-
- if cexpected:
- assert match, f"should have matched: {cstring}"
- else:
- assert not match, f"should not have matched: {cstring}"
+ if cexpected and not match:
+ assert False, "%s should have matched" % cstring
+ elif not cexpected and match:
+ assert False, "%s should not have matched" % cstring
diff --git a/test/integration/targets/ansible-config/aliases b/test/integration/targets/ansible-config/aliases
deleted file mode 100644
index 1d28bdb2..00000000
--- a/test/integration/targets/ansible-config/aliases
+++ /dev/null
@@ -1,2 +0,0 @@
-shippable/posix/group5
-context/controller
diff --git a/test/integration/targets/ansible-config/files/ini_dupes.py b/test/integration/targets/ansible-config/files/ini_dupes.py
deleted file mode 100755
index ed42e6ac..00000000
--- a/test/integration/targets/ansible-config/files/ini_dupes.py
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env python
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import annotations
-
-import configparser
-import sys
-
-
-ini_file = sys.argv[1]
-c = configparser.ConfigParser(strict=True, inline_comment_prefixes=(';',))
-c.read_file(open(ini_file))
diff --git a/test/integration/targets/ansible-config/tasks/main.yml b/test/integration/targets/ansible-config/tasks/main.yml
deleted file mode 100644
index a894dd45..00000000
--- a/test/integration/targets/ansible-config/tasks/main.yml
+++ /dev/null
@@ -1,14 +0,0 @@
-- name: test ansible-config for valid output and no dupes
- block:
- - name: Create temporary file
- tempfile:
- path: '{{output_dir}}'
- state: file
- suffix: temp.ini
- register: ini_tempfile
-
- - name: run config full dump
- shell: ansible-config init -t all > {{ini_tempfile.path}}
-
- - name: run ini tester, for correctness and dupes
- shell: "{{ansible_playbook_python}} '{{role_path}}/files/ini_dupes.py' '{{ini_tempfile.path}}'"
diff --git a/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/MANIFEST.json b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/MANIFEST.json
index 36f402fc..243a5e43 100644
--- a/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/MANIFEST.json
+++ b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/MANIFEST.json
@@ -17,7 +17,7 @@
"version": "0.1.1231",
"readme": "README.md",
"license_file": "COPYING",
- "homepage": ""
+ "homepage": "",
},
"file_manifest_file": {
"format": 1,
diff --git a/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/inventory/statichost.py b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/inventory/statichost.py
index dfc12710..caec2ed6 100644
--- a/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/inventory/statichost.py
+++ b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/inventory/statichost.py
@@ -20,6 +20,7 @@ DOCUMENTATION = '''
required: True
'''
+from ansible.errors import AnsibleParserError
from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable
diff --git a/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/lookup/noop.py b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/lookup/noop.py
index 639d3c6b..d4569869 100644
--- a/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/lookup/noop.py
+++ b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/lookup/noop.py
@@ -32,8 +32,7 @@ RETURN = """
version_added: 1.0.0
"""
-from collections.abc import Sequence
-
+from ansible.module_utils.common._collections_compat import Sequence
from ansible.plugins.lookup import LookupBase
from ansible.errors import AnsibleError
diff --git a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/MANIFEST.json b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/MANIFEST.json
index 36f402fc..243a5e43 100644
--- a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/MANIFEST.json
+++ b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/MANIFEST.json
@@ -17,7 +17,7 @@
"version": "0.1.1231",
"readme": "README.md",
"license_file": "COPYING",
- "homepage": ""
+ "homepage": "",
},
"file_manifest_file": {
"format": 1,
diff --git a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/inventory/statichost.py b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/inventory/statichost.py
index 1870b8ea..cbb8f0fb 100644
--- a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/inventory/statichost.py
+++ b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/inventory/statichost.py
@@ -19,6 +19,7 @@ DOCUMENTATION = '''
required: True
'''
+from ansible.errors import AnsibleParserError
from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable
diff --git a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py
index aaaecb80..79b7a704 100644
--- a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py
+++ b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py
@@ -3,17 +3,12 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-DOCUMENTATION = r'''
+DOCUMENTATION = '''
---
module: randommodule
short_description: A random module
description:
- A random module.
- - See O(foo.bar.baz#role:main:foo=bar) for how this is used in the P(foo.bar.baz#role)'s C(main) entrypoint.
- - See L(the docsite,https://docs.ansible.com/ansible-core/devel/) for more information on ansible-core.
- - This module is not related to the M(ansible.builtin.copy) module. HORIZONTALLINE You might also be interested
- in R(ansible_python_interpreter, ansible_python_interpreter).
- - Sometimes you have M(broken markup) that will result in error messages.
author:
- Ansible Core Team
version_added: 1.0.0
@@ -23,22 +18,22 @@ deprecated:
removed_in: '3.0.0'
options:
test:
- description: Some text. Consider not using O(ignore:foo=bar).
+ description: Some text.
type: str
version_added: 1.2.0
sub:
- description: Suboptions. Contains O(sub.subtest), which can be set to V(123). You can use E(TEST_ENV) to set this.
+ description: Suboptions.
type: dict
suboptions:
subtest:
- description: A suboption. Not compatible to O(ansible.builtin.copy#module:path=c:\\foo\(1\).txt).
+ description: A suboption.
type: int
version_added: 1.1.0
# The following is the wrong syntax, and should not get processed
# by add_collection_to_versions_and_dates()
options:
subtest2:
- description: Another suboption. Useful when P(ansible.builtin.shuffle#filter) is used with value V([a,b,\),d\\]).
+ description: Another suboption.
type: float
version_added: 1.1.0
# The following is not supported in modules, and should not get processed
@@ -70,7 +65,7 @@ seealso:
EXAMPLES = '''
'''
-RETURN = r'''
+RETURN = '''
z_last:
description: A last result.
type: str
@@ -80,8 +75,7 @@ z_last:
m_middle:
description:
- This should be in the middle.
- - Has some more data.
- - Check out RV(m_middle.suboption) and compare it to RV(a_first=foo) and RV(community.general.foo#lookup:value).
+ - Has some more data
type: dict
returned: success and 1st of month
contains:
@@ -92,7 +86,7 @@ m_middle:
version_added: 1.4.0
a_first:
- description: A first result. Use RV(a_first=foo\(bar\\baz\)bam).
+ description: A first result.
type: str
returned: success
'''
diff --git a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/test/yolo.yml b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/test/yolo.yml
index ebfea2af..cc60945e 100644
--- a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/test/yolo.yml
+++ b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/test/yolo.yml
@@ -8,25 +8,6 @@ DOCUMENTATION:
description: does not matter
type: raw
required: true
- seealso:
- - module: ansible.builtin.test
- - module: testns.testcol.fakemodule
- description: A fake module
- - plugin: testns.testcol.noop
- plugin_type: lookup
- - plugin: testns.testcol.grouped
- plugin_type: filter
- description: A grouped filter.
- - plugin: ansible.builtin.combine
- plugin_type: filter
- - plugin: ansible.builtin.file
- plugin_type: lookup
- description: Read a file on the controller.
- - link: https://docs.ansible.com
- name: Ansible docsite
- description: See also the Ansible docsite.
- - ref: foo_bar
- description: Some foo bar.
EXAMPLES: |
{{ 'anything' is yolo }}
diff --git a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol2/MANIFEST.json b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol2/MANIFEST.json
index e930d7d8..02ec289f 100644
--- a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol2/MANIFEST.json
+++ b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol2/MANIFEST.json
@@ -17,7 +17,7 @@
"version": "1.2.0",
"readme": "README.md",
"license_file": "COPYING",
- "homepage": ""
+ "homepage": "",
},
"file_manifest_file": {
"format": 1,
diff --git a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol3/galaxy.yml b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol3/galaxy.yml
deleted file mode 100644
index bd6c15a4..00000000
--- a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol3/galaxy.yml
+++ /dev/null
@@ -1,6 +0,0 @@
-namespace: testns
-name: testcol3
-version: 0.1.0
-readme: README.md
-authors:
- - me
diff --git a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol3/plugins/modules/test1.py b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol3/plugins/modules/test1.py
deleted file mode 100644
index 02dfb89d..00000000
--- a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol3/plugins/modules/test1.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/python
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-
-DOCUMENTATION = """
-module: test1
-short_description: Foo module in testcol3
-description:
- - This is a foo module.
-author:
- - me
-"""
-
-from ansible.module_utils.basic import AnsibleModule
-
-
-def main():
- module = AnsibleModule(
- argument_spec=dict(),
- )
-
- module.exit_json()
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol4/galaxy.yml b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol4/galaxy.yml
deleted file mode 100644
index 7894d393..00000000
--- a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol4/galaxy.yml
+++ /dev/null
@@ -1,6 +0,0 @@
-namespace: testns
-name: testcol4
-version: 1.0.0
-readme: README.md
-authors:
- - me
diff --git a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol4/plugins/modules/test2.py b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol4/plugins/modules/test2.py
deleted file mode 100644
index ddb0c114..00000000
--- a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol4/plugins/modules/test2.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/python
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-
-DOCUMENTATION = """
-module: test2
-short_description: Foo module in testcol4
-description:
- - This is a foo module.
-author:
- - me
-"""
-
-from ansible.module_utils.basic import AnsibleModule
-
-
-def main():
- module = AnsibleModule(
- argument_spec=dict(),
- )
-
- module.exit_json()
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/integration/targets/ansible-doc/randommodule-text.output b/test/integration/targets/ansible-doc/randommodule-text.output
index ca361346..602d66ec 100644
--- a/test/integration/targets/ansible-doc/randommodule-text.output
+++ b/test/integration/targets/ansible-doc/randommodule-text.output
@@ -1,13 +1,6 @@
> TESTNS.TESTCOL.RANDOMMODULE (./collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py)
- A random module. See `foo=bar' (of role foo.bar.baz, main
- entrypoint) for how this is used in the [foo.bar.baz]'s `main'
- entrypoint. See the docsite <https://docs.ansible.com/ansible-
- core/devel/> for more information on ansible-core. This module
- is not related to the [ansible.builtin.copy] module.
- ------------- You might also be interested in
- ansible_python_interpreter. Sometimes you have [broken markup]
- that will result in error messages.
+ A random module.
ADDED IN: version 1.0.0 of testns.testcol
@@ -21,8 +14,7 @@ DEPRECATED:
OPTIONS (= is mandatory):
- sub
- Suboptions. Contains `sub.subtest', which can be set to `123'.
- You can use `TEST_ENV' to set this.
+ Suboptions.
set_via:
env:
- deprecated:
@@ -37,8 +29,7 @@ OPTIONS (= is mandatory):
OPTIONS:
- subtest2
- Another suboption. Useful when [ansible.builtin.shuffle]
- is used with value `[a,b,),d\]'.
+ Another suboption.
default: null
type: float
added in: version 1.1.0
@@ -48,15 +39,14 @@ OPTIONS (= is mandatory):
SUBOPTIONS:
- subtest
- A suboption. Not compatible to `path=c:\foo(1).txt' (of
- module ansible.builtin.copy).
+ A suboption.
default: null
type: int
added in: version 1.1.0 of testns.testcol
- test
- Some text. Consider not using `foo=bar'.
+ Some text.
default: null
type: str
added in: version 1.2.0 of testns.testcol
@@ -103,15 +93,13 @@ EXAMPLES:
RETURN VALUES:
- a_first
- A first result. Use `a_first=foo(bar\baz)bam'.
+ A first result.
returned: success
type: str
- m_middle
This should be in the middle.
- Has some more data.
- Check out `m_middle.suboption' and compare it to `a_first=foo'
- and `value' (of lookup plugin community.general.foo).
+ Has some more data
returned: success and 1st of month
type: dict
diff --git a/test/integration/targets/ansible-doc/randommodule.output b/test/integration/targets/ansible-doc/randommodule.output
index f40202a8..cf036000 100644
--- a/test/integration/targets/ansible-doc/randommodule.output
+++ b/test/integration/targets/ansible-doc/randommodule.output
@@ -12,18 +12,14 @@
"why": "Test deprecation"
},
"description": [
- "A random module.",
- "See O(foo.bar.baz#role:main:foo=bar) for how this is used in the P(foo.bar.baz#role)'s C(main) entrypoint.",
- "See L(the docsite,https://docs.ansible.com/ansible-core/devel/) for more information on ansible-core.",
- "This module is not related to the M(ansible.builtin.copy) module. HORIZONTALLINE You might also be interested in R(ansible_python_interpreter, ansible_python_interpreter).",
- "Sometimes you have M(broken markup) that will result in error messages."
+ "A random module."
],
"filename": "./collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py",
"has_action": false,
"module": "randommodule",
"options": {
"sub": {
- "description": "Suboptions. Contains O(sub.subtest), which can be set to V(123). You can use E(TEST_ENV) to set this.",
+ "description": "Suboptions.",
"env": [
{
"deprecated": {
@@ -38,14 +34,14 @@
],
"options": {
"subtest2": {
- "description": "Another suboption. Useful when P(ansible.builtin.shuffle#filter) is used with value V([a,b,\\),d\\\\]).",
+ "description": "Another suboption.",
"type": "float",
"version_added": "1.1.0"
}
},
"suboptions": {
"subtest": {
- "description": "A suboption. Not compatible to O(ansible.builtin.copy#module:path=c:\\\\foo\\(1\\).txt).",
+ "description": "A suboption.",
"type": "int",
"version_added": "1.1.0",
"version_added_collection": "testns.testcol"
@@ -54,7 +50,7 @@
"type": "dict"
},
"test": {
- "description": "Some text. Consider not using O(ignore:foo=bar).",
+ "description": "Some text.",
"type": "str",
"version_added": "1.2.0",
"version_added_collection": "testns.testcol"
@@ -107,7 +103,7 @@
"metadata": null,
"return": {
"a_first": {
- "description": "A first result. Use RV(a_first=foo\\(bar\\\\baz\\)bam).",
+ "description": "A first result.",
"returned": "success",
"type": "str"
},
@@ -127,8 +123,7 @@
},
"description": [
"This should be in the middle.",
- "Has some more data.",
- "Check out RV(m_middle.suboption) and compare it to RV(a_first=foo) and RV(community.general.foo#lookup:value)."
+ "Has some more data"
],
"returned": "success and 1st of month",
"type": "dict"
diff --git a/test/integration/targets/ansible-doc/runme.sh b/test/integration/targets/ansible-doc/runme.sh
index b525766c..f51fa8a4 100755
--- a/test/integration/targets/ansible-doc/runme.sh
+++ b/test/integration/targets/ansible-doc/runme.sh
@@ -1,74 +1,36 @@
#!/usr/bin/env bash
-# always set sane error behaviors, enable execution tracing later if sufficient verbosity requested
-set -eu
-
-verbosity=0
-
-# default to silent output for naked grep; -vvv+ will adjust this
-export GREP_OPTS=-q
-
-# shell tracing output is very large from this script; only enable if >= -vvv was passed
-while getopts :v opt
-do case "$opt" in
- v) ((verbosity+=1)) ;;
- *) ;;
- esac
-done
-
-if (( verbosity >= 3 ));
-then
- set -x;
- export GREP_OPTS= ;
-fi
-
-echo "running playbook-backed docs tests"
+set -eux
ansible-playbook test.yml -i inventory "$@"
# test keyword docs
-ansible-doc -t keyword -l | grep $GREP_OPTS 'vars_prompt: list of variables to prompt for.'
-ansible-doc -t keyword vars_prompt | grep $GREP_OPTS 'description: list of variables to prompt for.'
-ansible-doc -t keyword asldkfjaslidfhals 2>&1 | grep $GREP_OPTS 'Skipping Invalid keyword'
+ansible-doc -t keyword -l | grep 'vars_prompt: list of variables to prompt for.'
+ansible-doc -t keyword vars_prompt | grep 'description: list of variables to prompt for.'
+ansible-doc -t keyword asldkfjaslidfhals 2>&1 | grep 'Skipping Invalid keyword'
# collections testing
(
unset ANSIBLE_PLAYBOOK_DIR
cd "$(dirname "$0")"
-
-echo "test fakemodule docs from collection"
+# test module docs from collection
# we use sed to strip the module path from the first line
current_out="$(ansible-doc --playbook-dir ./ testns.testcol.fakemodule | sed '1 s/\(^> TESTNS\.TESTCOL\.FAKEMODULE\).*(.*)$/\1/')"
expected_out="$(sed '1 s/\(^> TESTNS\.TESTCOL\.FAKEMODULE\).*(.*)$/\1/' fakemodule.output)"
test "$current_out" == "$expected_out"
-echo "test randommodule docs from collection"
# we use sed to strip the plugin path from the first line, and fix-urls.py to unbreak and replace URLs from stable-X branches
current_out="$(ansible-doc --playbook-dir ./ testns.testcol.randommodule | sed '1 s/\(^> TESTNS\.TESTCOL\.RANDOMMODULE\).*(.*)$/\1/' | python fix-urls.py)"
expected_out="$(sed '1 s/\(^> TESTNS\.TESTCOL\.RANDOMMODULE\).*(.*)$/\1/' randommodule-text.output)"
test "$current_out" == "$expected_out"
-echo "test yolo filter docs from collection"
-# we use sed to strip the plugin path from the first line, and fix-urls.py to unbreak and replace URLs from stable-X branches
-current_out="$(ansible-doc --playbook-dir ./ testns.testcol.yolo --type test | sed '1 s/\(^> TESTNS\.TESTCOL\.YOLO\).*(.*)$/\1/' | python fix-urls.py)"
-expected_out="$(sed '1 s/\(^> TESTNS\.TESTCOL\.YOLO\).*(.*)$/\1/' yolo-text.output)"
-test "$current_out" == "$expected_out"
-
-echo "ensure we do work with valid collection name for list"
-ansible-doc --list testns.testcol --playbook-dir ./ 2>&1 | grep $GREP_OPTS -v "Invalid collection name"
+# ensure we do work with valid collection name for list
+ansible-doc --list testns.testcol --playbook-dir ./ 2>&1 | grep -v "Invalid collection name"
-echo "ensure we dont break on invalid collection name for list"
-ansible-doc --list testns.testcol.fakemodule --playbook-dir ./ 2>&1 | grep $GREP_OPTS "Invalid collection name"
+# ensure we dont break on invalid collection name for list
+ansible-doc --list testns.testcol.fakemodule --playbook-dir ./ 2>&1 | grep "Invalid collection name"
-echo "filter list with more than one collection (1/2)"
-output=$(ansible-doc --list testns.testcol3 testns.testcol4 --playbook-dir ./ 2>&1 | wc -l)
-test "$output" -eq 2
-
-echo "filter list with more than one collection (2/2)"
-output=$(ansible-doc --list testns.testcol testns.testcol4 --playbook-dir ./ 2>&1 | wc -l)
-test "$output" -eq 5
-
-echo "testing ansible-doc output for various plugin types"
+# test listing diff plugin types from collection
for ptype in cache inventory lookup vars filter module
do
# each plugin type adds 1 from collection
@@ -88,20 +50,20 @@ do
elif [ "${ptype}" == "lookup" ]; then expected_names=("noop");
elif [ "${ptype}" == "vars" ]; then expected_names=("noop_vars_plugin"); fi
fi
- echo "testing collection-filtered list for plugin ${ptype}"
+ # ensure we ONLY list from the collection
justcol=$(ansible-doc -l -t ${ptype} --playbook-dir ./ testns.testcol|wc -l)
test "$justcol" -eq "$expected"
- echo "validate collection plugin name display for plugin ${ptype}"
+ # ensure the right names are displayed
list_result=$(ansible-doc -l -t ${ptype} --playbook-dir ./ testns.testcol)
metadata_result=$(ansible-doc --metadata-dump --no-fail-on-errors -t ${ptype} --playbook-dir ./ testns.testcol)
for name in "${expected_names[@]}"; do
- echo "${list_result}" | grep $GREP_OPTS "testns.testcol.${name}"
- echo "${metadata_result}" | grep $GREP_OPTS "testns.testcol.${name}"
+ echo "${list_result}" | grep "testns.testcol.${name}"
+ echo "${metadata_result}" | grep "testns.testcol.${name}"
done
- # ensure we get error if passing invalid collection, much less any plugins
- ansible-doc -l -t ${ptype} bogus.boguscoll 2>&1 | grep $GREP_OPTS "unable to locate collection"
+ # ensure we get error if passinginvalid collection, much less any plugins
+ ansible-doc -l -t ${ptype} testns.testcol 2>&1 | grep "unable to locate collection"
# TODO: do we want per namespace?
# ensure we get 1 plugins when restricting namespace
@@ -111,28 +73,20 @@ done
#### test role functionality
-echo "testing role text output"
+# Test role text output
# we use sed to strip the role path from the first line
current_role_out="$(ansible-doc -t role -r ./roles test_role1 | sed '1 s/\(^> TEST_ROLE1\).*(.*)$/\1/')"
expected_role_out="$(sed '1 s/\(^> TEST_ROLE1\).*(.*)$/\1/' fakerole.output)"
test "$current_role_out" == "$expected_role_out"
-echo "testing multiple role entrypoints"
# Two collection roles are defined, but only 1 has a role arg spec with 2 entry points
output=$(ansible-doc -t role -l --playbook-dir . testns.testcol | wc -l)
test "$output" -eq 2
-echo "test listing roles with multiple collection filters"
-# Two collection roles are defined, but only 1 has a role arg spec with 2 entry points
-output=$(ansible-doc -t role -l --playbook-dir . testns.testcol2 testns.testcol | wc -l)
-test "$output" -eq 2
-
-echo "testing standalone roles"
# Include normal roles (no collection filter)
output=$(ansible-doc -t role -l --playbook-dir . | wc -l)
test "$output" -eq 3
-echo "testing role precedence"
# Test that a role in the playbook dir with the same name as a role in the
# 'roles' subdir of the playbook dir does not appear (lower precedence).
output=$(ansible-doc -t role -l --playbook-dir . | grep -c "test_role1 from roles subdir")
@@ -140,7 +94,7 @@ test "$output" -eq 1
output=$(ansible-doc -t role -l --playbook-dir . | grep -c "test_role1 from playbook dir" || true)
test "$output" -eq 0
-echo "testing role entrypoint filter"
+# Test entry point filter
current_role_out="$(ansible-doc -t role --playbook-dir . testns.testcol.testrole -e alternate| sed '1 s/\(^> TESTNS\.TESTCOL\.TESTROLE\).*(.*)$/\1/')"
expected_role_out="$(sed '1 s/\(^> TESTNS\.TESTCOL\.TESTROLE\).*(.*)$/\1/' fakecollrole.output)"
test "$current_role_out" == "$expected_role_out"
@@ -149,16 +103,10 @@ test "$current_role_out" == "$expected_role_out"
#### test add_collection_to_versions_and_dates()
-echo "testing json output"
current_out="$(ansible-doc --json --playbook-dir ./ testns.testcol.randommodule | sed 's/ *$//' | sed 's/ *"filename": "[^"]*",$//')"
expected_out="$(sed 's/ *"filename": "[^"]*",$//' randommodule.output)"
test "$current_out" == "$expected_out"
-echo "testing json output 2"
-current_out="$(ansible-doc --json --playbook-dir ./ testns.testcol.yolo --type test | sed 's/ *$//' | sed 's/ *"filename": "[^"]*",$//')"
-expected_out="$(sed 's/ *"filename": "[^"]*",$//' yolo.output)"
-test "$current_out" == "$expected_out"
-
current_out="$(ansible-doc --json --playbook-dir ./ -t cache testns.testcol.notjsonfile | sed 's/ *$//' | sed 's/ *"filename": "[^"]*",$//')"
expected_out="$(sed 's/ *"filename": "[^"]*",$//' notjsonfile.output)"
test "$current_out" == "$expected_out"
@@ -171,9 +119,8 @@ current_out="$(ansible-doc --json --playbook-dir ./ -t vars testns.testcol.noop_
expected_out="$(sed 's/ *"filename": "[^"]*",$//' noop_vars_plugin.output)"
test "$current_out" == "$expected_out"
-echo "testing metadata dump"
# just ensure it runs
-ANSIBLE_LIBRARY='./nolibrary' ansible-doc --metadata-dump --playbook-dir /dev/null 1>/dev/null 2>&1
+ANSIBLE_LIBRARY='./nolibrary' ansible-doc --metadata-dump --playbook-dir /dev/null >/dev/null
# create broken role argument spec
mkdir -p broken-docs/collections/ansible_collections/testns/testcol/roles/testrole/meta
@@ -197,72 +144,71 @@ argument_specs:
EOF
# ensure that --metadata-dump does not fail when --no-fail-on-errors is supplied
-ANSIBLE_LIBRARY='./nolibrary' ansible-doc --metadata-dump --no-fail-on-errors --playbook-dir broken-docs testns.testcol 1>/dev/null 2>&1
+ANSIBLE_LIBRARY='./nolibrary' ansible-doc --metadata-dump --no-fail-on-errors --playbook-dir broken-docs testns.testcol >/dev/null
# ensure that --metadata-dump does fail when --no-fail-on-errors is not supplied
output=$(ANSIBLE_LIBRARY='./nolibrary' ansible-doc --metadata-dump --playbook-dir broken-docs testns.testcol 2>&1 | grep -c 'ERROR!' || true)
test "${output}" -eq 1
-
-echo "testing legacy plugin listing"
+# ensure we list the 'legacy plugins'
[ "$(ansible-doc -M ./library -l ansible.legacy |wc -l)" -gt "0" ]
-echo "testing legacy plugin list via --playbook-dir"
+# playbook dir should work the same
[ "$(ansible-doc -l ansible.legacy --playbook-dir ./|wc -l)" -gt "0" ]
-echo "testing undocumented plugin output"
+# see that we show undocumented when missing docs
[ "$(ansible-doc -M ./library -l ansible.legacy |grep -c UNDOCUMENTED)" == "6" ]
-echo "testing filtering does not include any 'test_' modules"
+# ensure filtering works and does not include any 'test_' modules
[ "$(ansible-doc -M ./library -l ansible.builtin |grep -c test_)" == 0 ]
[ "$(ansible-doc --playbook-dir ./ -l ansible.builtin |grep -c test_)" == 0 ]
-echo "testing filtering still shows modules"
+# ensure filtering still shows modules
count=$(ANSIBLE_LIBRARY='./nolibrary' ansible-doc -l ansible.builtin |wc -l)
[ "${count}" -gt "0" ]
[ "$(ansible-doc -M ./library -l ansible.builtin |wc -l)" == "${count}" ]
[ "$(ansible-doc --playbook-dir ./ -l ansible.builtin |wc -l)" == "${count}" ]
-echo "testing sidecar docs for jinja plugins"
+# produce 'sidecar' docs for test
[ "$(ansible-doc -t test --playbook-dir ./ testns.testcol.yolo| wc -l)" -gt "0" ]
[ "$(ansible-doc -t filter --playbook-dir ./ donothing| wc -l)" -gt "0" ]
[ "$(ansible-doc -t filter --playbook-dir ./ ansible.legacy.donothing| wc -l)" -gt "0" ]
-echo "testing no docs and no sidecar"
-ansible-doc -t filter --playbook-dir ./ nodocs 2>&1| grep $GREP_OPTS -c 'missing documentation' || true
+# no docs and no sidecar
+ansible-doc -t filter --playbook-dir ./ nodocs 2>&1| grep -c 'missing documentation' || true
-echo "testing sidecar docs for module"
+# produce 'sidecar' docs for module
[ "$(ansible-doc -M ./library test_win_module| wc -l)" -gt "0" ]
[ "$(ansible-doc --playbook-dir ./ test_win_module| wc -l)" -gt "0" ]
-echo "testing duplicate DOCUMENTATION"
+# test 'double DOCUMENTATION' use
[ "$(ansible-doc --playbook-dir ./ double_doc| wc -l)" -gt "0" ]
-echo "testing don't break on module dir"
+# don't break on module dir
ansible-doc --list --module-path ./modules > /dev/null
-echo "testing dedupe by fqcn and not base name"
+# ensure we dedupe by fqcn and not base name
[ "$(ansible-doc -l -t filter --playbook-dir ./ |grep -c 'b64decode')" -eq "3" ]
-echo "testing no duplicates for plugins that only exist in ansible.builtin when listing ansible.legacy plugins"
+# ensure we don't show duplicates for plugins that only exist in ansible.builtin when listing ansible.legacy plugins
[ "$(ansible-doc -l -t filter --playbook-dir ./ |grep -c 'b64encode')" -eq "1" ]
-echo "testing with playbook dir, legacy should override"
-ansible-doc -t filter split --playbook-dir ./ |grep $GREP_OPTS histerical
+# with playbook dir, legacy should override
+ansible-doc -t filter split --playbook-dir ./ |grep histerical
pyc_src="$(pwd)/filter_plugins/other.py"
pyc_1="$(pwd)/filter_plugins/split.pyc"
pyc_2="$(pwd)/library/notaplugin.pyc"
trap 'rm -rf "$pyc_1" "$pyc_2"' EXIT
-echo "testing pyc files are not used as adjacent documentation"
+# test pyc files are not used as adjacent documentation
python -c "import py_compile; py_compile.compile('$pyc_src', cfile='$pyc_1')"
-ansible-doc -t filter split --playbook-dir ./ |grep $GREP_OPTS histerical
+ansible-doc -t filter split --playbook-dir ./ |grep histerical
-echo "testing pyc files are not listed as plugins"
+# test pyc files are not listed as plugins
python -c "import py_compile; py_compile.compile('$pyc_src', cfile='$pyc_2')"
test "$(ansible-doc -l -t module --playbook-dir ./ 2>&1 1>/dev/null |grep -c "notaplugin")" == 0
-echo "testing without playbook dir, builtin should return"
-ansible-doc -t filter split 2>&1 |grep $GREP_OPTS -v histerical
+# without playbook dir, builtin should return
+ansible-doc -t filter split |grep -v histerical
diff --git a/test/integration/targets/ansible-doc/yolo-text.output b/test/integration/targets/ansible-doc/yolo-text.output
deleted file mode 100644
index 647a4f6a..00000000
--- a/test/integration/targets/ansible-doc/yolo-text.output
+++ /dev/null
@@ -1,47 +0,0 @@
-> TESTNS.TESTCOL.YOLO (./collections/ansible_collections/testns/testcol/plugins/test/yolo.yml)
-
- This is always true
-
-OPTIONS (= is mandatory):
-
-= _input
- does not matter
- type: raw
-
-
-SEE ALSO:
- * Module ansible.builtin.test
- The official documentation on the
- ansible.builtin.test module.
- https://docs.ansible.com/ansible-core/devel/collections/ansible/builtin/test_module.html
- * Module testns.testcol.fakemodule
- A fake module
- * Lookup plugin testns.testcol.noop
- * Filter plugin testns.testcol.grouped
- A grouped filter.
- * Filter plugin ansible.builtin.combine
- The official documentation on the
- ansible.builtin.combine filter plugin.
- https://docs.ansible.com/ansible-core/devel/collections/ansible/builtin/combine_filter.html
- * Lookup plugin ansible.builtin.file
- Read a file on the controller.
- https://docs.ansible.com/ansible-core/devel/collections/ansible/builtin/file_lookup.html
- * Ansible docsite
- See also the Ansible docsite.
- https://docs.ansible.com
- * Ansible documentation [foo_bar]
- Some foo bar.
- https://docs.ansible.com/ansible-core/devel/#stq=foo_bar&stp=1
-
-
-NAME: yolo
-
-EXAMPLES:
-
-{{ 'anything' is yolo }}
-
-
-RETURN VALUES:
-- output
- always true
- type: boolean
diff --git a/test/integration/targets/ansible-doc/yolo.output b/test/integration/targets/ansible-doc/yolo.output
deleted file mode 100644
index b54cc2de..00000000
--- a/test/integration/targets/ansible-doc/yolo.output
+++ /dev/null
@@ -1,64 +0,0 @@
-{
- "testns.testcol.yolo": {
- "doc": {
- "collection": "testns.testcol",
- "description": [
- "This is always true"
- ],
- "filename": "./collections/ansible_collections/testns/testcol/plugins/test/yolo.yml",
- "name": "yolo",
- "options": {
- "_input": {
- "description": "does not matter",
- "required": true,
- "type": "raw"
- }
- },
- "seealso": [
- {
- "module": "ansible.builtin.test"
- },
- {
- "description": "A fake module",
- "module": "testns.testcol.fakemodule"
- },
- {
- "plugin": "testns.testcol.noop",
- "plugin_type": "lookup"
- },
- {
- "description": "A grouped filter.",
- "plugin": "testns.testcol.grouped",
- "plugin_type": "filter"
- },
- {
- "plugin": "ansible.builtin.combine",
- "plugin_type": "filter"
- },
- {
- "description": "Read a file on the controller.",
- "plugin": "ansible.builtin.file",
- "plugin_type": "lookup"
- },
- {
- "description": "See also the Ansible docsite.",
- "link": "https://docs.ansible.com",
- "name": "Ansible docsite"
- },
- {
- "description": "Some foo bar.",
- "ref": "foo_bar"
- }
- ],
- "short_description": "you only live once"
- },
- "examples": "{{ 'anything' is yolo }}\n",
- "metadata": null,
- "return": {
- "output": {
- "description": "always true",
- "type": "boolean"
- }
- }
- }
-}
diff --git a/test/integration/targets/ansible-galaxy-collection-cli/files/expected.txt b/test/integration/targets/ansible-galaxy-collection-cli/files/expected.txt
index 69218290..110009e3 100644
--- a/test/integration/targets/ansible-galaxy-collection-cli/files/expected.txt
+++ b/test/integration/targets/ansible-galaxy-collection-cli/files/expected.txt
@@ -1,11 +1,6 @@
MANIFEST.json
FILES.json
README.rst
-GPL
-LICENSES/
-LICENSES/MIT.txt
-.reuse/
-.reuse/dep5
changelogs/
docs/
playbooks/
@@ -93,7 +88,6 @@ plugins/test/bar.yml
plugins/test/baz.yaml
plugins/test/test.py
plugins/vars/bar.yml
-plugins/vars/bar.yml.license
plugins/vars/baz.yaml
plugins/vars/test.py
roles/foo/
diff --git a/test/integration/targets/ansible-galaxy-collection-cli/files/galaxy.yml b/test/integration/targets/ansible-galaxy-collection-cli/files/galaxy.yml
index 140bf2a7..8f0ada0b 100644
--- a/test/integration/targets/ansible-galaxy-collection-cli/files/galaxy.yml
+++ b/test/integration/targets/ansible-galaxy-collection-cli/files/galaxy.yml
@@ -2,7 +2,6 @@ namespace: ns
name: col
version: 1.0.0
readme: README.rst
-license_file: GPL
authors:
- Ansible
manifest:
diff --git a/test/integration/targets/ansible-galaxy-collection-cli/files/make_collection_dir.py b/test/integration/targets/ansible-galaxy-collection-cli/files/make_collection_dir.py
index 60c43cc7..913a6f79 100644
--- a/test/integration/targets/ansible-galaxy-collection-cli/files/make_collection_dir.py
+++ b/test/integration/targets/ansible-galaxy-collection-cli/files/make_collection_dir.py
@@ -5,12 +5,8 @@ paths = [
'ns-col-1.0.0.tar.gz',
'foo.txt',
'README.rst',
- 'GPL',
- 'LICENSES/MIT.txt',
- '.reuse/dep5',
'artifacts/.gitkeep',
'plugins/vars/bar.yml',
- 'plugins/vars/bar.yml.license',
'plugins/vars/baz.yaml',
'plugins/vars/test.py',
'plugins/vars/docs.md',
diff --git a/test/integration/targets/ansible-galaxy-collection-scm/tasks/main.yml b/test/integration/targets/ansible-galaxy-collection-scm/tasks/main.yml
index f0e78ca0..dab599b1 100644
--- a/test/integration/targets/ansible-galaxy-collection-scm/tasks/main.yml
+++ b/test/integration/targets/ansible-galaxy-collection-scm/tasks/main.yml
@@ -5,7 +5,7 @@
- name: Test installing collections from git repositories
environment:
- ANSIBLE_COLLECTIONS_PATH: "{{ galaxy_dir }}/collections"
+ ANSIBLE_COLLECTIONS_PATHS: "{{ galaxy_dir }}/collections"
vars:
cleanup: True
galaxy_dir: "{{ galaxy_dir }}"
diff --git a/test/integration/targets/ansible-galaxy-collection-scm/tasks/multi_collection_repo_all.yml b/test/integration/targets/ansible-galaxy-collection-scm/tasks/multi_collection_repo_all.yml
index 91ed9124..f22f9844 100644
--- a/test/integration/targets/ansible-galaxy-collection-scm/tasks/multi_collection_repo_all.yml
+++ b/test/integration/targets/ansible-galaxy-collection-scm/tasks/multi_collection_repo_all.yml
@@ -14,8 +14,6 @@
command: 'ansible-galaxy collection install {{ artifact_path }} -p {{ alt_install_path }} --no-deps'
vars:
artifact_path: "{{ galaxy_dir }}/ansible_test-collection_1-1.0.0.tar.gz"
- environment:
- ANSIBLE_COLLECTIONS_PATH: ""
- name: check if the files and folders in build_ignore were respected
stat:
diff --git a/test/integration/targets/ansible-galaxy-collection-scm/tasks/setup_recursive_scm_dependency.yml b/test/integration/targets/ansible-galaxy-collection-scm/tasks/setup_recursive_scm_dependency.yml
index 520dbe5c..dd307d72 100644
--- a/test/integration/targets/ansible-galaxy-collection-scm/tasks/setup_recursive_scm_dependency.yml
+++ b/test/integration/targets/ansible-galaxy-collection-scm/tasks/setup_recursive_scm_dependency.yml
@@ -22,12 +22,7 @@
lineinfile:
path: '{{ scm_path }}/namespace_2/collection_2/galaxy.yml'
regexp: '^dependencies'
- # NOTE: The committish is set to `HEAD` here because Git's default has
- # NOTE: changed to `main` and it behaves differently in
- # NOTE: different envs with different Git versions.
- line: >-
- dependencies:
- {'git+file://{{ scm_path }}/namespace_1/.git#collection_1/': 'HEAD'}
+ line: "dependencies: {'git+file://{{ scm_path }}/namespace_1/.git#collection_1/': 'master'}"
- name: Commit the changes
shell: git add ./; git commit -m 'add collection'
diff --git a/test/integration/targets/ansible-galaxy-collection/library/reset_pulp.py b/test/integration/targets/ansible-galaxy-collection/library/reset_pulp.py
index c1f5e1d7..53c29f77 100644
--- a/test/integration/targets/ansible-galaxy-collection/library/reset_pulp.py
+++ b/test/integration/targets/ansible-galaxy-collection/library/reset_pulp.py
@@ -84,8 +84,7 @@ def invoke_api(module, url, method='GET', data=None, status_codes=None):
resp, info = fetch_url(module, url, method=method, data=data, headers=headers)
if info['status'] not in status_codes:
- info['url'] = url
- module.fail_json(**info)
+ module.fail_json(url=url, **info)
data = to_text(resp.read())
if data:
@@ -106,7 +105,7 @@ def delete_pulp_distribution(distribution, module):
def delete_pulp_orphans(module):
""" Deletes any orphaned pulp objects. """
- orphan_uri = module.params['galaxy_ng_server'] + 'pulp/api/v3/orphans/'
+ orphan_uri = module.params['pulp_api'] + '/pulp/api/v3/orphans/'
task_info = invoke_api(module, orphan_uri, method='DELETE', status_codes=[202])
wait_pulp_task(task_info['task'], module)
@@ -126,39 +125,25 @@ def get_galaxy_namespaces(module):
return [n['name'] for n in ns_info['data']]
-def get_pulp_distributions(module, distribution):
+def get_pulp_distributions(module):
""" Gets a list of all the pulp distributions. """
- distro_uri = module.params['galaxy_ng_server'] + 'pulp/api/v3/distributions/ansible/ansible/'
- distro_info = invoke_api(module, distro_uri + '?name=' + distribution)
+ distro_uri = module.params['pulp_api'] + '/pulp/api/v3/distributions/ansible/ansible/'
+ distro_info = invoke_api(module, distro_uri)
return [module.params['pulp_api'] + r['pulp_href'] for r in distro_info['results']]
-def get_pulp_repositories(module, repository):
+def get_pulp_repositories(module):
""" Gets a list of all the pulp repositories. """
- repo_uri = module.params['galaxy_ng_server'] + 'pulp/api/v3/repositories/ansible/ansible/'
- repo_info = invoke_api(module, repo_uri + '?name=' + repository)
+ repo_uri = module.params['pulp_api'] + '/pulp/api/v3/repositories/ansible/ansible/'
+ repo_info = invoke_api(module, repo_uri)
return [module.params['pulp_api'] + r['pulp_href'] for r in repo_info['results']]
-def get_repo_collections(repository, module):
- collections_uri = module.params['galaxy_ng_server'] + 'v3/plugin/ansible/content/' + repository + '/collections/index/'
- # status code 500 isn't really expected, an unhandled exception is causing this instead of a 404
- # See https://issues.redhat.com/browse/AAH-2329
- info = invoke_api(module, collections_uri + '?limit=100&offset=0', status_codes=[200, 500])
- if not info:
- return []
- return [module.params['pulp_api'] + c['href'] for c in info['data']]
-
-
-def delete_repo_collection(collection, module):
- task_info = invoke_api(module, collection, method='DELETE', status_codes=[202])
- wait_pulp_task(task_info['task'], module)
-
-
def new_galaxy_namespace(name, module):
""" Creates a new namespace in Galaxy NG. """
- ns_uri = module.params['galaxy_ng_server'] + 'v3/namespaces/ '
- data = {'name': name, 'groups': []}
+ ns_uri = module.params['galaxy_ng_server'] + 'v3/_ui/namespaces/'
+ data = {'name': name, 'groups': [{'name': 'system:partner-engineers', 'object_permissions':
+ ['add_namespace', 'change_namespace', 'upload_to_namespace']}]}
ns_info = invoke_api(module, ns_uri, method='POST', data=data, status_codes=[201])
return ns_info['id']
@@ -166,17 +151,16 @@ def new_galaxy_namespace(name, module):
def new_pulp_repository(name, module):
""" Creates a new pulp repository. """
- repo_uri = module.params['galaxy_ng_server'] + 'pulp/api/v3/repositories/ansible/ansible/'
- # retain_repo_versions to work around https://issues.redhat.com/browse/AAH-2332
- data = {'name': name, 'retain_repo_versions': '1024'}
+ repo_uri = module.params['pulp_api'] + '/pulp/api/v3/repositories/ansible/ansible/'
+ data = {'name': name}
repo_info = invoke_api(module, repo_uri, method='POST', data=data, status_codes=[201])
- return repo_info['pulp_href']
+ return module.params['pulp_api'] + repo_info['pulp_href']
def new_pulp_distribution(name, base_path, repository, module):
""" Creates a new pulp distribution for a repository. """
- distro_uri = module.params['galaxy_ng_server'] + 'pulp/api/v3/distributions/ansible/ansible/'
+ distro_uri = module.params['pulp_api'] + '/pulp/api/v3/distributions/ansible/ansible/'
data = {'name': name, 'base_path': base_path, 'repository': repository}
task_info = invoke_api(module, distro_uri, method='POST', data=data, status_codes=[202])
task_info = wait_pulp_task(task_info['task'], module)
@@ -210,15 +194,8 @@ def main():
)
module.params['force_basic_auth'] = True
- # It may be due to the process of cleaning up orphans, but we cannot delete the namespace
- # while a collection still exists, so this is just a new safety to nuke all collections
- # first
- for repository in module.params['repositories']:
- [delete_repo_collection(c, module) for c in get_repo_collections(repository, module)]
-
- for repository in module.params['repositories']:
- [delete_pulp_distribution(d, module) for d in get_pulp_distributions(module, repository)]
- [delete_pulp_repository(r, module) for r in get_pulp_repositories(module, repository)]
+ [delete_pulp_distribution(d, module) for d in get_pulp_distributions(module)]
+ [delete_pulp_repository(r, module) for r in get_pulp_repositories(module)]
delete_pulp_orphans(module)
[delete_galaxy_namespace(n, module) for n in get_galaxy_namespaces(module)]
diff --git a/test/integration/targets/ansible-galaxy-collection/library/setup_collections.py b/test/integration/targets/ansible-galaxy-collection/library/setup_collections.py
index 423edd9e..f4a51c4b 100644
--- a/test/integration/targets/ansible-galaxy-collection/library/setup_collections.py
+++ b/test/integration/targets/ansible-galaxy-collection/library/setup_collections.py
@@ -77,7 +77,6 @@ RETURN = '''
#
'''
-import datetime
import os
import subprocess
import tarfile
@@ -85,13 +84,13 @@ import tempfile
import yaml
from ansible.module_utils.basic import AnsibleModule
-from ansible.module_utils.common.text.converters import to_bytes
+from ansible.module_utils._text import to_bytes
from functools import partial
from multiprocessing import dummy as threading
from multiprocessing import TimeoutError
-COLLECTIONS_BUILD_AND_PUBLISH_TIMEOUT = 180
+COLLECTIONS_BUILD_AND_PUBLISH_TIMEOUT = 120
def publish_collection(module, collection):
@@ -105,7 +104,6 @@ def publish_collection(module, collection):
collection_dir = os.path.join(module.tmpdir, "%s-%s-%s" % (namespace, name, version))
b_collection_dir = to_bytes(collection_dir, errors='surrogate_or_strict')
os.mkdir(b_collection_dir)
- os.mkdir(os.path.join(b_collection_dir, b'meta'))
with open(os.path.join(b_collection_dir, b'README.md'), mode='wb') as fd:
fd.write(b"Collection readme")
@@ -122,8 +120,6 @@ def publish_collection(module, collection):
}
with open(os.path.join(b_collection_dir, b'galaxy.yml'), mode='wb') as fd:
fd.write(to_bytes(yaml.safe_dump(galaxy_meta), errors='surrogate_or_strict'))
- with open(os.path.join(b_collection_dir, b'meta/runtime.yml'), mode='wb') as fd:
- fd.write(b'requires_ansible: ">=1.0.0"')
with tempfile.NamedTemporaryFile(mode='wb') as temp_fd:
temp_fd.write(b"data")
@@ -250,8 +246,7 @@ def run_module():
supports_check_mode=False
)
- start = datetime.datetime.now()
- result = dict(changed=True, results=[], start=str(start))
+ result = dict(changed=True, results=[])
pool = threading.Pool(4)
publish_func = partial(publish_collection, module)
@@ -268,9 +263,7 @@ def run_module():
r['build']['rc'] + r['publish']['rc'] for r in result['results']
))
- end = datetime.datetime.now()
- delta = end - start
- module.exit_json(failed=failed, end=str(end), delta=str(delta), **result)
+ module.exit_json(failed=failed, **result)
def main():
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/build.yml b/test/integration/targets/ansible-galaxy-collection/tasks/build.yml
index 83e9acc9..8140d468 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/build.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/build.yml
@@ -1,29 +1,4 @@
---
-- name: create a dangling symbolic link inside collection directory
- ansible.builtin.file:
- src: '/non-existent-path/README.md'
- dest: '{{ galaxy_dir }}/scratch/ansible_test/my_collection/docs/README.md'
- state: link
- force: yes
-
-- name: build basic collection based on current directory with dangling symlink
- command: ansible-galaxy collection build {{ galaxy_verbosity }}
- args:
- chdir: '{{ galaxy_dir }}/scratch/ansible_test/my_collection'
- register: fail_symlink_build
- ignore_errors: yes
-
-- name: assert that build fails due to dangling symlink
- assert:
- that:
- - fail_symlink_build.failed
- - '"Failed to find the target path" in fail_symlink_build.stderr'
-
-- name: remove dangling symbolic link
- ansible.builtin.file:
- path: '{{ galaxy_dir }}/scratch/ansible_test/my_collection/docs/README.md'
- state: absent
-
- name: build basic collection based on current directory
command: ansible-galaxy collection build {{ galaxy_verbosity }}
args:
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/download.yml b/test/integration/targets/ansible-galaxy-collection/tasks/download.yml
index a554c277..b651a73e 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/download.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/download.yml
@@ -5,7 +5,7 @@
state: directory
- name: download collection with multiple dependencies with --no-deps
- command: ansible-galaxy collection download parent_dep.parent_collection:1.0.0 --no-deps -s galaxy_ng {{ galaxy_verbosity }}
+ command: ansible-galaxy collection download parent_dep.parent_collection:1.0.0 --no-deps -s pulp_v2 {{ galaxy_verbosity }}
register: download_collection
args:
chdir: '{{ galaxy_dir }}/download'
@@ -34,7 +34,7 @@
- (download_collection_actual.files[1].path | basename) in ['requirements.yml', 'parent_dep-parent_collection-1.0.0.tar.gz']
- name: download collection with multiple dependencies
- command: ansible-galaxy collection download parent_dep.parent_collection:1.0.0 -s galaxy_ng {{ galaxy_verbosity }}
+ command: ansible-galaxy collection download parent_dep.parent_collection:1.0.0 -s pulp_v2 {{ galaxy_verbosity }}
register: download_collection
args:
chdir: '{{ galaxy_dir }}/download'
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/fail_fast_resolvelib.yml b/test/integration/targets/ansible-galaxy-collection/tasks/fail_fast_resolvelib.yml
index d861cb4d..eb471f8e 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/fail_fast_resolvelib.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/fail_fast_resolvelib.yml
@@ -1,5 +1,5 @@
# resolvelib>=0.6.0 added an 'incompatibilities' parameter to find_matches
-# If incompatibilities aren't removed from the viable candidates, this example causes infinite recursion
+# If incompatibilities aren't removed from the viable candidates, this example causes infinite resursion
- name: test resolvelib removes incompatibilites in find_matches and errors quickly (prevent infinite recursion)
block:
- name: create collection dir
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/init.yml b/test/integration/targets/ansible-galaxy-collection/tasks/init.yml
index 46198fef..17a000db 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/init.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/init.yml
@@ -5,12 +5,6 @@
chdir: '{{ galaxy_dir }}/scratch'
register: init_relative
-- name: create required runtime.yml
- copy:
- content: |
- requires_ansible: '>=1.0.0'
- dest: '{{ galaxy_dir }}/scratch/ansible_test/my_collection/meta/runtime.yml'
-
- name: get result of create default skeleton
find:
path: '{{ galaxy_dir }}/scratch/ansible_test/my_collection'
@@ -98,65 +92,6 @@
- (init_custom_path_actual.files | map(attribute='path') | list)[2] | basename in ['docs', 'plugins', 'roles', 'meta']
- (init_custom_path_actual.files | map(attribute='path') | list)[3] | basename in ['docs', 'plugins', 'roles', 'meta']
-- name: test using a custom skeleton for collection init
- block:
- - name: create skeleton directories
- file:
- path: "{{ galaxy_dir }}/scratch/skeleton/{{ item }}"
- state: directory
- loop:
- - custom_skeleton
- - custom_skeleton/plugins
- - inventory
-
- - name: create files
- file:
- path: "{{ galaxy_dir }}/scratch/skeleton/{{ item }}"
- state: touch
- loop:
- - inventory/foo.py
- - galaxy.yml
-
- - name: create symlinks
- file:
- path: "{{ galaxy_dir }}/scratch/skeleton/{{ item.link }}"
- src: "{{ galaxy_dir }}/scratch/skeleton/{{ item.source }}"
- state: link
- loop:
- - link: custom_skeleton/plugins/inventory
- source: inventory
- - link: custom_skeleton/galaxy.yml
- source: galaxy.yml
-
- - name: initialize a collection using the skeleton
- command: ansible-galaxy collection init ansible_test.my_collection {{ init_path }} {{ skeleton }}
- vars:
- init_path: '--init-path {{ galaxy_dir }}/scratch/skeleton'
- skeleton: '--collection-skeleton {{ galaxy_dir }}/scratch/skeleton/custom_skeleton'
-
- - name: stat expected collection contents
- stat:
- path: "{{ galaxy_dir }}/scratch/skeleton/ansible_test/my_collection/{{ item }}"
- register: stat_result
- loop:
- - plugins
- - plugins/inventory
- - galaxy.yml
- - plugins/inventory/foo.py
-
- - assert:
- that:
- - stat_result.results[0].stat.isdir
- - stat_result.results[1].stat.islnk
- - stat_result.results[2].stat.islnk
- - stat_result.results[3].stat.isreg
-
- always:
- - name: cleanup
- file:
- path: "{{ galaxy_dir }}/scratch/skeleton"
- state: absent
-
- name: create collection for ignored files and folders
command: ansible-galaxy collection init ansible_test.ignore
args:
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/install.yml b/test/integration/targets/ansible-galaxy-collection/tasks/install.yml
index 92378266..cca83c7b 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/install.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/install.yml
@@ -165,13 +165,10 @@
failed_when:
- '"Could not satisfy the following requirements" not in fail_dep_mismatch.stderr'
- '" fail_dep2.name:<0.0.5 (dependency of fail_namespace.fail_collection:2.1.2)" not in fail_dep_mismatch.stderr'
- - 'pre_release_hint not in fail_dep_mismatch.stderr'
- vars:
- pre_release_hint: 'Hint: Pre-releases are not installed by default unless the specific version is given. To enable pre-releases, use --pre.'
- name: Find artifact url for namespace3.name
uri:
- url: '{{ test_api_server }}v3/plugin/ansible/content/primary/collections/index/namespace3/name/versions/1.0.0/'
+ url: '{{ test_server }}{{ vX }}collections/namespace3/name/versions/1.0.0/'
user: '{{ pulp_user }}'
password: '{{ pulp_password }}'
force_basic_auth: true
@@ -221,7 +218,7 @@
state: absent
- assert:
- that: expected_error in error
+ that: error == expected_error
vars:
error: "{{ result.stderr | regex_replace('\\n', ' ') }}"
expected_error: >-
@@ -261,14 +258,12 @@
ignore_errors: yes
register: result
- - debug:
- msg: "Actual - {{ error }}"
+ - debug: msg="Actual - {{ error }}"
- - debug:
- msg: "Expected - {{ expected_error }}"
+ - debug: msg="Expected - {{ expected_error }}"
- assert:
- that: expected_error in error
+ that: error == expected_error
always:
- name: clean up collection skeleton and artifact
file:
@@ -300,7 +295,7 @@
- name: Find artifact url for namespace4.name
uri:
- url: '{{ test_api_server }}v3/plugin/ansible/content/primary/collections/index/namespace4/name/versions/1.0.0/'
+ url: '{{ test_server }}{{ vX }}collections/namespace4/name/versions/1.0.0/'
user: '{{ pulp_user }}'
password: '{{ pulp_password }}'
force_basic_auth: true
@@ -330,11 +325,10 @@
environment:
ANSIBLE_GALAXY_SERVER_LIST: undefined
-# pulp_v2 doesn't require auth
-- when: v2|default(false)
+- when: not requires_auth
block:
- name: install a collection with an empty server list - {{ test_id }}
- command: ansible-galaxy collection install namespace5.name -s '{{ test_server }}' --api-version 2 {{ galaxy_verbosity }}
+ command: ansible-galaxy collection install namespace5.name -s '{{ test_server }}' {{ galaxy_verbosity }}
register: install_empty_server_list
environment:
ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
@@ -577,6 +571,7 @@
- namespace8
- namespace9
+# SIVEL
- name: assert invalid signature is not fatal with ansible-galaxy install --ignore-errors - {{ test_id }}
assert:
that:
@@ -651,7 +646,6 @@
- namespace8
- namespace9
-# test --ignore-signature-status-code extends ANSIBLE_GALAXY_IGNORE_SIGNATURE_STATUS_CODES env var
- name: install collections with only one valid signature by ignoring the other errors
command: ansible-galaxy install -r {{ req_file }} {{ cli_opts }} {{ galaxy_verbosity }} --ignore-signature-status-code FAILURE
register: install_req
@@ -692,60 +686,6 @@
vars:
install_stderr: "{{ install_req.stderr | regex_replace('\\n', ' ') }}"
-# test --ignore-signature-status-code passed multiple times
-- name: reinstall collections with only one valid signature by ignoring the other errors
- command: ansible-galaxy install -r {{ req_file }} {{ cli_opts }} {{ galaxy_verbosity }} {{ ignore_errors }}
- register: install_req
- vars:
- req_file: "{{ galaxy_dir }}/ansible_collections/requirements.yaml"
- cli_opts: "-s {{ test_name }} --keyring {{ keyring }} --force"
- keyring: "{{ gpg_homedir }}/pubring.kbx"
- ignore_errors: "--ignore-signature-status-code BADSIG --ignore-signature-status-code FAILURE"
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
- ANSIBLE_GALAXY_REQUIRED_VALID_SIGNATURE_COUNT: all
- ANSIBLE_NOCOLOR: True
- ANSIBLE_FORCE_COLOR: False
-
-- name: assert invalid signature is not fatal with ansible-galaxy install - {{ test_name }}
- assert:
- that:
- - install_req is success
- - '"Installing ''namespace7.name:1.0.0'' to" in install_req.stdout'
- - '"Signature verification failed for ''namespace7.name'' (return code 1)" not in install_req.stdout'
- - '"Not installing namespace7.name because GnuPG signature verification failed." not in install_stderr'
- - '"Installing ''namespace8.name:1.0.0'' to" in install_req.stdout'
- - '"Installing ''namespace9.name:1.0.0'' to" in install_req.stdout'
- vars:
- install_stderr: "{{ install_req.stderr | regex_replace('\\n', ' ') }}"
-
-# test --ignore-signature-status-code passed once with a list
-- name: reinstall collections with only one valid signature by ignoring the other errors
- command: ansible-galaxy install -r {{ req_file }} {{ cli_opts }} {{ galaxy_verbosity }} {{ ignore_errors }}
- register: install_req
- vars:
- req_file: "{{ galaxy_dir }}/ansible_collections/requirements.yaml"
- cli_opts: "-s {{ test_name }} --keyring {{ keyring }} --force"
- keyring: "{{ gpg_homedir }}/pubring.kbx"
- ignore_errors: "--ignore-signature-status-codes BADSIG FAILURE"
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
- ANSIBLE_GALAXY_REQUIRED_VALID_SIGNATURE_COUNT: all
- ANSIBLE_NOCOLOR: True
- ANSIBLE_FORCE_COLOR: False
-
-- name: assert invalid signature is not fatal with ansible-galaxy install - {{ test_name }}
- assert:
- that:
- - install_req is success
- - '"Installing ''namespace7.name:1.0.0'' to" in install_req.stdout'
- - '"Signature verification failed for ''namespace7.name'' (return code 1)" not in install_req.stdout'
- - '"Not installing namespace7.name because GnuPG signature verification failed." not in install_stderr'
- - '"Installing ''namespace8.name:1.0.0'' to" in install_req.stdout'
- - '"Installing ''namespace9.name:1.0.0'' to" in install_req.stdout'
- vars:
- install_stderr: "{{ install_req.stderr | regex_replace('\\n', ' ') }}"
-
- name: clean up collections from last test
file:
path: '{{ galaxy_dir }}/ansible_collections/{{ collection }}/name'
@@ -757,45 +697,44 @@
- namespace8
- namespace9
-- when: not v2|default(false)
- block:
- - name: install cache.cache at the current latest version
- command: ansible-galaxy collection install cache.cache -s '{{ test_name }}' -vvv
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
-
- - set_fact:
- cache_version_build: '{{ (cache_version_build | int) + 1 }}'
-
- - name: publish update for cache.cache test
- setup_collections:
- server: galaxy_ng
- collections:
- - namespace: cache
- name: cache
- version: 1.0.{{ cache_version_build }}
-
- - name: make sure the cache version list is ignored on a collection version change - {{ test_id }}
- command: ansible-galaxy collection install cache.cache -s '{{ test_name }}' --force -vvv
- register: install_cached_update
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
-
- - name: get result of cache version list is ignored on a collection version change - {{ test_id }}
- slurp:
- path: '{{ galaxy_dir }}/ansible_collections/cache/cache/MANIFEST.json'
- register: install_cached_update_actual
-
- - name: assert cache version list is ignored on a collection version change - {{ test_id }}
- assert:
- that:
- - '"Installing ''cache.cache:1.0.{{ cache_version_build }}'' to" in install_cached_update.stdout'
- - (install_cached_update_actual.content | b64decode | from_json).collection_info.version == '1.0.' ~ cache_version_build
+# Uncomment once pulp container is at pulp>=0.5.0
+#- name: install cache.cache at the current latest version
+# command: ansible-galaxy collection install cache.cache -s '{{ test_name }}' -vvv
+# environment:
+# ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
+#
+#- set_fact:
+# cache_version_build: '{{ (cache_version_build | int) + 1 }}'
+#
+#- name: publish update for cache.cache test
+# setup_collections:
+# server: galaxy_ng
+# collections:
+# - namespace: cache
+# name: cache
+# version: 1.0.{{ cache_version_build }}
+#
+#- name: make sure the cache version list is ignored on a collection version change - {{ test_id }}
+# command: ansible-galaxy collection install cache.cache -s '{{ test_name }}' --force -vvv
+# register: install_cached_update
+# environment:
+# ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
+#
+#- name: get result of cache version list is ignored on a collection version change - {{ test_id }}
+# slurp:
+# path: '{{ galaxy_dir }}/ansible_collections/cache/cache/MANIFEST.json'
+# register: install_cached_update_actual
+#
+#- name: assert cache version list is ignored on a collection version change - {{ test_id }}
+# assert:
+# that:
+# - '"Installing ''cache.cache:1.0.{{ cache_version_build }}'' to" in install_cached_update.stdout'
+# - (install_cached_update_actual.content | b64decode | from_json).collection_info.version == '1.0.' ~ cache_version_build
- name: install collection with symlink - {{ test_id }}
command: ansible-galaxy collection install symlink.symlink -s '{{ test_name }}' {{ galaxy_verbosity }}
environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
+ ANSIBLE_COLLECTIONS_PATHS: '{{ galaxy_dir }}/ansible_collections'
register: install_symlink
- find:
@@ -833,56 +772,6 @@
- install_symlink_actual.results[5].stat.islnk
- install_symlink_actual.results[5].stat.lnk_target == '../REÅDMÈ.md'
-
-# Testing an install from source to check that symlinks to directories
-# are preserved (see issue https://github.com/ansible/ansible/issues/78442)
-- name: symlink_dirs collection install from source test
- block:
-
- - name: create symlink_dirs collection
- command: ansible-galaxy collection init symlink_dirs.symlink_dirs --init-path "{{ galaxy_dir }}/scratch"
-
- - name: create directory in collection
- file:
- path: "{{ galaxy_dir }}/scratch/symlink_dirs/symlink_dirs/folderA"
- state: directory
-
- - name: create symlink to folderA
- file:
- dest: "{{ galaxy_dir }}/scratch/symlink_dirs/symlink_dirs/folderB"
- src: ./folderA
- state: link
- force: yes
-
- - name: install symlink_dirs collection from source
- command: ansible-galaxy collection install {{ galaxy_dir }}/scratch/symlink_dirs/symlink_dirs/
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
- register: install_symlink_dirs
-
- - name: get result of install collection with symlink_dirs - {{ test_id }}
- stat:
- path: '{{ galaxy_dir }}/ansible_collections/symlink_dirs/symlink_dirs/{{ path }}'
- register: install_symlink_dirs_actual
- loop_control:
- loop_var: path
- loop:
- - folderA
- - folderB
-
- - name: assert install collection with symlink_dirs - {{ test_id }}
- assert:
- that:
- - '"Installing ''symlink_dirs.symlink_dirs:1.0.0'' to" in install_symlink_dirs.stdout'
- - install_symlink_dirs_actual.results[0].stat.isdir
- - install_symlink_dirs_actual.results[1].stat.islnk
- - install_symlink_dirs_actual.results[1].stat.lnk_target == './folderA'
- always:
- - name: clean up symlink_dirs collection directory
- file:
- path: "{{ galaxy_dir }}/scratch/symlink_dirs"
- state: absent
-
- name: remove install directory for the next test because parent_dep.parent_collection was installed - {{ test_id }}
file:
path: '{{ galaxy_dir }}/ansible_collections'
@@ -891,7 +780,7 @@
- name: install collection and dep compatible with multiple requirements - {{ test_id }}
command: ansible-galaxy collection install parent_dep.parent_collection parent_dep2.parent_collection
environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
+ ANSIBLE_COLLECTIONS_PATHS: '{{ galaxy_dir }}/ansible_collections'
register: install_req
- name: assert install collections with ansible-galaxy install - {{ test_id }}
@@ -913,7 +802,7 @@
- name: install a collection to the same installation directory - {{ test_id }}
command: ansible-galaxy collection install namespace1.name1
environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
+ ANSIBLE_COLLECTIONS_PATHS: '{{ galaxy_dir }}/ansible_collections'
register: install_req
- name: assert installed collections with ansible-galaxy install - {{ test_id }}
@@ -1120,7 +1009,7 @@
args:
chdir: '{{ galaxy_dir }}/scratch'
environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
+ ANSIBLE_COLLECTIONS_PATHS: '{{ galaxy_dir }}/ansible_collections'
register: install_concrete_pre
- name: get result of install collections with concrete pre-release dep - {{ test_id }}
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/install_offline.yml b/test/integration/targets/ansible-galaxy-collection/tasks/install_offline.yml
index f3b9777c..74c99838 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/install_offline.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/install_offline.yml
@@ -25,14 +25,6 @@
regexp: "^dependencies:*"
line: "dependencies: {'ns.coll2': '>=1.0.0'}"
- - name: create required runtime.yml
- copy:
- dest: "{{ galaxy_dir }}/offline/setup/ns/{{ item }}/meta/runtime.yml"
- content: "requires_ansible: '>=1.0.0'"
- loop:
- - coll1
- - coll2
-
- name: build both collections
command: ansible-galaxy collection build {{ init_dir }}/ns/{{ item }}
args:
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/list.yml b/test/integration/targets/ansible-galaxy-collection/tasks/list.yml
index 1c93d54b..b8d63492 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/list.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/list.yml
@@ -1,4 +1,4 @@
-- name: initialize dev collection structure
+- name: initialize collection structure
command: ansible-galaxy collection init {{ item }} --init-path "{{ galaxy_dir }}/dev/ansible_collections" {{ galaxy_verbosity }}
loop:
- 'dev.collection1'
@@ -8,13 +8,6 @@
- 'dev.collection5'
- 'dev.collection6'
-- name: initialize prod collection structure
- command: ansible-galaxy collection init {{ item }} --init-path "{{ galaxy_dir }}/prod/ansible_collections" {{ galaxy_verbosity }}
- loop:
- - 'prod.collection1'
- - 'prod.collection2'
- - 'prod.collection3'
-
- name: replace the default version of the collections
lineinfile:
path: "{{ galaxy_dir }}/dev/ansible_collections/dev/{{ item.name }}/galaxy.yml"
@@ -60,13 +53,13 @@
- assert:
that:
- - "'dev.collection1 *' in list_result.stdout"
+ - "'dev.collection1 *' in list_result.stdout"
# Note the version displayed is the 'placeholder' string rather than "*" since it is not falsey
- - "'dev.collection2 placeholder' in list_result.stdout"
- - "'dev.collection3 *' in list_result.stdout"
- - "'dev.collection4 *' in list_result.stdout"
- - "'dev.collection5 *' in list_result.stdout"
- - "'dev.collection6 *' in list_result.stdout"
+ - "'dev.collection2 placeholder' in list_result.stdout"
+ - "'dev.collection3 *' in list_result.stdout"
+ - "'dev.collection4 *' in list_result.stdout"
+ - "'dev.collection5 *' in list_result.stdout"
+ - "'dev.collection6 *' in list_result.stdout"
- name: list collections in human format
command: ansible-galaxy collection list --format human
@@ -76,12 +69,12 @@
- assert:
that:
- - "'dev.collection1 *' in list_result_human.stdout"
+ - "'dev.collection1 *' in list_result_human.stdout"
# Note the version displayed is the 'placeholder' string rather than "*" since it is not falsey
- - "'dev.collection2 placeholder' in list_result_human.stdout"
- - "'dev.collection3 *' in list_result_human.stdout"
- - "'dev.collection5 *' in list_result.stdout"
- - "'dev.collection6 *' in list_result.stdout"
+ - "'dev.collection2 placeholder' in list_result_human.stdout"
+ - "'dev.collection3 *' in list_result_human.stdout"
+ - "'dev.collection5 *' in list_result.stdout"
+ - "'dev.collection6 *' in list_result.stdout"
- name: list collections in yaml format
command: ansible-galaxy collection list --format yaml
@@ -91,12 +84,6 @@
- assert:
that:
- - yaml_result[galaxy_dir ~ '/dev/ansible_collections'] != yaml_result[galaxy_dir ~ '/prod/ansible_collections']
- vars:
- yaml_result: '{{ list_result_yaml.stdout | from_yaml }}'
-
-- assert:
- that:
- "item.value | length == 6"
- "item.value['dev.collection1'].version == '*'"
- "item.value['dev.collection2'].version == 'placeholder'"
@@ -104,7 +91,6 @@
- "item.value['dev.collection5'].version == '*'"
- "item.value['dev.collection6'].version == '*'"
with_dict: "{{ list_result_yaml.stdout | from_yaml }}"
- when: "'dev' in item.key"
- name: list collections in json format
command: ansible-galaxy collection list --format json
@@ -121,7 +107,6 @@
- "item.value['dev.collection5'].version == '*'"
- "item.value['dev.collection6'].version == '*'"
with_dict: "{{ list_result_json.stdout | from_json }}"
- when: "'dev' in item.key"
- name: list single collection in json format
command: "ansible-galaxy collection list {{ item.key }} --format json"
@@ -152,7 +137,7 @@
register: list_result_error
ignore_errors: True
environment:
- ANSIBLE_COLLECTIONS_PATH: "i_dont_exist"
+ ANSIBLE_COLLECTIONS_PATH: ""
- assert:
that:
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/main.yml b/test/integration/targets/ansible-galaxy-collection/tasks/main.yml
index e17d6aa1..724c861e 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/main.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/main.yml
@@ -72,12 +72,13 @@
vars:
test_name: '{{ item.name }}'
test_server: '{{ item.server }}'
- test_api_server: '{{ item.api_server|default(item.server) }}'
+ vX: '{{ "v3/" if item.v3|default(false) else "v2/" }}'
loop:
- name: pulp_v2
- api_server: '{{ galaxy_ng_server }}'
- server: '{{ pulp_server }}primary/api/'
- v2: true
+ server: '{{ pulp_server }}published/api/'
+ - name: pulp_v3
+ server: '{{ pulp_server }}published/api/'
+ v3: true
- name: galaxy_ng
server: '{{ galaxy_ng_server }}'
v3: true
@@ -107,9 +108,8 @@
test_id: '{{ item.name }}'
test_name: '{{ item.name }}'
test_server: '{{ item.server }}'
- test_api_server: '{{ item.api_server|default(item.server) }}'
+ vX: '{{ "v3/" if item.v3|default(false) else "v2/" }}'
requires_auth: '{{ item.requires_auth|default(false) }}'
- v2: '{{ item.v2|default(false) }}'
args:
apply:
environment:
@@ -120,9 +120,10 @@
v3: true
requires_auth: true
- name: pulp_v2
- server: '{{ pulp_server }}primary/api/'
- api_server: '{{ galaxy_ng_server }}'
- v2: true
+ server: '{{ pulp_server }}published/api/'
+ - name: pulp_v3
+ server: '{{ pulp_server }}published/api/'
+ v3: true
- name: test installing and downloading collections with the range of supported resolvelib versions
include_tasks: supported_resolvelib.yml
@@ -134,17 +135,6 @@
loop_control:
loop_var: resolvelib_version
-- name: test choosing pinned pre-releases anywhere in the dependency tree
- # This is a regression test for the case when the end-user does not
- # explicitly allow installing pre-release collection versions, but their
- # precise pins are still selected if met among the dependencies, either
- # direct or transitive.
- include_tasks: pinned_pre_releases_in_deptree.yml
-
-- name: test installing prereleases via scm direct requests
- # In this test suite because the bug relies on the dep having versions on a Galaxy server
- include_tasks: virtual_direct_requests.yml
-
- name: publish collection with a dep on another server
setup_collections:
server: secondary
@@ -186,13 +176,13 @@
in install_cross_dep.stdout
# pulp_v2 is highest in the list so it will find it there first
- >-
- "'parent_dep.parent_collection:1.0.0' obtained from server galaxy_ng"
+ "'parent_dep.parent_collection:1.0.0' obtained from server pulp_v2"
in install_cross_dep.stdout
- >-
- "'child_dep.child_collection:0.9.9' obtained from server galaxy_ng"
+ "'child_dep.child_collection:0.9.9' obtained from server pulp_v2"
in install_cross_dep.stdout
- >-
- "'child_dep.child_dep2:1.2.2' obtained from server galaxy_ng"
+ "'child_dep.child_dep2:1.2.2' obtained from server pulp_v2"
in install_cross_dep.stdout
- (install_cross_dep_actual.results[0].content | b64decode | from_json).collection_info.version == '1.0.0'
- (install_cross_dep_actual.results[1].content | b64decode | from_json).collection_info.version == '1.0.0'
@@ -214,9 +204,10 @@
ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
ANSIBLE_CONFIG: '{{ galaxy_dir }}/ansible.cfg'
vars:
- test_api_fallback: 'galaxy_ng'
- test_api_fallback_versions: 'v3, pulp-v3, v1'
- test_name: 'pulp_v2'
+ test_api_fallback: 'pulp_v2'
+ test_api_fallback_versions: 'v1, v2'
+ test_name: 'galaxy_ng'
+ test_server: '{{ galaxy_ng_server }}'
- name: run ansible-galaxy collection list tests
include_tasks: list.yml
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/pinned_pre_releases_in_deptree.yml b/test/integration/targets/ansible-galaxy-collection/tasks/pinned_pre_releases_in_deptree.yml
deleted file mode 100644
index 3745fa31..00000000
--- a/test/integration/targets/ansible-galaxy-collection/tasks/pinned_pre_releases_in_deptree.yml
+++ /dev/null
@@ -1,79 +0,0 @@
----
-
-- name: >-
- test that the dependency resolver chooses pre-releases if they are pinned
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
- ANSIBLE_CONFIG: '{{ galaxy_dir }}/ansible.cfg'
- block:
- - name: reset installation directory
- file:
- state: "{{ item }}"
- path: "{{ galaxy_dir }}/ansible_collections"
- loop:
- - absent
- - directory
-
- - name: >-
- install collections with pre-release versions in the dependency tree
- command: >-
- ansible-galaxy collection install
- meta_ns_with_transitive_wildcard_dep.meta_name_with_transitive_wildcard_dep
- rc_meta_ns_with_transitive_dev_dep.rc_meta_name_with_transitive_dev_dep:=2.4.5-rc5
- {{ galaxy_verbosity }}
- register: prioritize_direct_req
- - assert:
- that:
- - >-
- "rc_meta_ns_with_transitive_dev_dep.rc_meta_name_with_transitive_dev_dep:2.4.5-rc5 was installed successfully"
- in prioritize_direct_req.stdout
- - >-
- "meta_ns_with_transitive_wildcard_dep.meta_name_with_transitive_wildcard_dep:4.5.6 was installed successfully"
- in prioritize_direct_req.stdout
- - >-
- "ns_with_dev_dep.name_with_dev_dep:6.7.8 was installed successfully"
- in prioritize_direct_req.stdout
- - >-
- "ns_with_wildcard_dep.name_with_wildcard_dep:5.6.7-beta.3 was installed successfully"
- in prioritize_direct_req.stdout
- - >-
- "dev_and_stables_ns.dev_and_stables_name:1.2.3-dev0 was installed successfully"
- in prioritize_direct_req.stdout
-
- - name: cleanup
- file:
- state: "{{ item }}"
- path: "{{ galaxy_dir }}/ansible_collections"
- loop:
- - absent
- - directory
-
- - name: >-
- install collection that only has pre-release versions published
- to the index
- command: >-
- ansible-galaxy collection install
- rc_meta_ns_with_transitive_dev_dep.rc_meta_name_with_transitive_dev_dep:*
- {{ galaxy_verbosity }}
- register: select_pre_release_if_no_stable
- - assert:
- that:
- - >-
- "rc_meta_ns_with_transitive_dev_dep.rc_meta_name_with_transitive_dev_dep:2.4.5-rc5 was installed successfully"
- in select_pre_release_if_no_stable.stdout
- - >-
- "ns_with_dev_dep.name_with_dev_dep:6.7.8 was installed successfully"
- in select_pre_release_if_no_stable.stdout
- - >-
- "dev_and_stables_ns.dev_and_stables_name:1.2.3-dev0 was installed successfully"
- in select_pre_release_if_no_stable.stdout
- always:
- - name: cleanup
- file:
- state: "{{ item }}"
- path: "{{ galaxy_dir }}/ansible_collections"
- loop:
- - absent
- - directory
-
-...
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/publish.yml b/test/integration/targets/ansible-galaxy-collection/tasks/publish.yml
index 1be16ae9..241eae60 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/publish.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/publish.yml
@@ -5,12 +5,9 @@
chdir: '{{ galaxy_dir }}'
register: publish_collection
-- name: ensure we can download the published collection - {{ test_name }}
- command: ansible-galaxy collection install -s {{ test_name }} -p "{{ remote_tmp_dir }}/publish/{{ test_name }}" ansible_test.my_collection==1.0.0 {{ galaxy_verbosity }}
-
- name: get result of publish collection - {{ test_name }}
uri:
- url: '{{ test_api_server }}v3/plugin/ansible/content/primary/collections/index/ansible_test/my_collection/versions/1.0.0/'
+ url: '{{ test_server }}{{ vX }}collections/ansible_test/my_collection/versions/1.0.0/'
return_content: yes
user: '{{ pulp_user }}'
password: '{{ pulp_password }}'
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/supported_resolvelib.yml b/test/integration/targets/ansible-galaxy-collection/tasks/supported_resolvelib.yml
index bff36892..763c5a19 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/supported_resolvelib.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/supported_resolvelib.yml
@@ -20,11 +20,11 @@
- include_tasks: install.yml
vars:
- test_name: galaxy_ng
+ test_name: pulp_v3
test_id: '{{ test_name }} (resolvelib {{ resolvelib_version }})'
- test_server: '{{ galaxy_ng_server }}'
- test_api_server: '{{ galaxy_ng_server }}'
- requires_auth: true
+ test_server: '{{ pulp_server }}published/api/'
+ vX: "v3/"
+ requires_auth: false
args:
apply:
environment:
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/upgrade.yml b/test/integration/targets/ansible-galaxy-collection/tasks/upgrade.yml
index debd70bc..893ea803 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/upgrade.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/upgrade.yml
@@ -142,7 +142,7 @@
- directory
- name: install a collection
- command: ansible-galaxy collection install namespace1.name1==0.0.1 {{ galaxy_verbosity }}
+ command: ansible-galaxy collection install namespace1.name1:0.0.1 {{ galaxy_verbosity }}
register: result
failed_when:
- '"namespace1.name1:0.0.1 was installed successfully" not in result.stdout_lines'
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/verify.yml b/test/integration/targets/ansible-galaxy-collection/tasks/verify.yml
index 0fe2f82d..dfe3d0f7 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/verify.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/verify.yml
@@ -3,11 +3,6 @@
args:
chdir: '{{ galaxy_dir }}/scratch'
-- name: created required runtime.yml
- copy:
- content: 'requires_ansible: ">=1.0.0"'
- dest: '{{ galaxy_dir }}/scratch/ansible_test/verify/meta/runtime.yml'
-
- name: build the collection
command: ansible-galaxy collection build scratch/ansible_test/verify
args:
@@ -36,9 +31,6 @@
- name: verify the collection against the first valid server
command: ansible-galaxy collection verify ansible_test.verify:1.0.0 -vvvv {{ galaxy_verbosity }}
register: verify
- vars:
- # This sets a specific precedence that the tests are expecting
- ANSIBLE_GALAXY_SERVER_LIST: offline,secondary,pulp_v2,galaxy_ng
- assert:
that:
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/virtual_direct_requests.yml b/test/integration/targets/ansible-galaxy-collection/tasks/virtual_direct_requests.yml
deleted file mode 100644
index 7b1931f0..00000000
--- a/test/integration/targets/ansible-galaxy-collection/tasks/virtual_direct_requests.yml
+++ /dev/null
@@ -1,77 +0,0 @@
-- environment:
- ANSIBLE_CONFIG: '{{ galaxy_dir }}/ansible.cfg'
- vars:
- scm_path: "{{ galaxy_dir }}/scms"
- metadata:
- collection1: |-
- name: collection1
- version: "1.0.0"
- dependencies:
- test_prereleases.collection2: '*'
- collection2: |
- name: collection2
- version: "1.0.0-dev0"
- dependencies: {}
- namespace_boilerplate: |-
- namespace: test_prereleases
- readme: README.md
- authors:
- - "ansible-core"
- description: test prerelease priority with virtual collections
- license:
- - GPL-2.0-or-later
- license_file: ''
- tags: []
- repository: https://github.com/ansible/ansible
- documentation: https://github.com/ansible/ansible
- homepage: https://github.com/ansible/ansible
- issues: https://github.com/ansible/ansible
- build_ignore: []
- block:
- - name: Initialize git repository
- command: 'git init {{ scm_path }}/test_prereleases'
-
- - name: Configure commiter for the repo
- shell: git config user.email ansible-test@ansible.com && git config user.name ansible-test
- args:
- chdir: "{{ scm_path }}/test_prereleases"
-
- - name: Add collections to the repo
- file:
- path: "{{ scm_path }}/test_prereleases/{{ item }}"
- state: directory
- loop:
- - collection1
- - collection2
-
- - name: Add collection metadata
- copy:
- dest: "{{ scm_path }}/test_prereleases/{{ item }}/galaxy.yml"
- content: "{{ metadata[item] + '\n' + metadata['namespace_boilerplate'] }}"
- loop:
- - collection1
- - collection2
-
- - name: Save the changes
- shell: git add . && git commit -m "Add collections to test installing a git repo directly takes priority over indirect Galaxy dep"
- args:
- chdir: '{{ scm_path }}/test_prereleases'
-
- - name: Validate the dependency also exists on Galaxy before test
- command: "ansible-galaxy collection install test_prereleases.collection2"
- register: prereq
- failed_when: '"test_prereleases.collection2:1.0.0 was installed successfully" not in prereq.stdout'
-
- - name: Install collections from source
- command: "ansible-galaxy collection install git+file://{{ scm_path }}/test_prereleases"
- register: prioritize_direct_req
-
- - assert:
- that:
- - '"test_prereleases.collection2:1.0.0-dev0 was installed successfully" in prioritize_direct_req.stdout'
-
- always:
- - name: Clean up test repos
- file:
- path: "{{ scm_path }}"
- state: absent
diff --git a/test/integration/targets/ansible-galaxy-collection/templates/ansible.cfg.j2 b/test/integration/targets/ansible-galaxy-collection/templates/ansible.cfg.j2
index a242979d..9bff527b 100644
--- a/test/integration/targets/ansible-galaxy-collection/templates/ansible.cfg.j2
+++ b/test/integration/targets/ansible-galaxy-collection/templates/ansible.cfg.j2
@@ -1,22 +1,28 @@
[galaxy]
# Ensures subsequent unstable reruns don't use the cached information causing another failure
cache_dir={{ remote_tmp_dir }}/galaxy_cache
-server_list=offline,galaxy_ng,secondary,pulp_v2
+server_list=offline,pulp_v2,pulp_v3,galaxy_ng,secondary
[galaxy_server.offline]
url={{ offline_server }}
[galaxy_server.pulp_v2]
-url={{ pulp_server }}primary/api/
+url={{ pulp_server }}published/api/
+username={{ pulp_user }}
+password={{ pulp_password }}
+
+[galaxy_server.pulp_v3]
+url={{ pulp_server }}published/api/
+v3=true
username={{ pulp_user }}
password={{ pulp_password }}
-api_version=2
[galaxy_server.galaxy_ng]
-url={{ galaxy_ng_server }}content/primary/
+url={{ galaxy_ng_server }}
token={{ galaxy_ng_token.json.token }}
[galaxy_server.secondary]
-url={{ galaxy_ng_server }}content/secondary/
+url={{ pulp_server }}secondary/api/
+v3=true
username={{ pulp_user }}
password={{ pulp_password }}
diff --git a/test/integration/targets/ansible-galaxy-collection/vars/main.yml b/test/integration/targets/ansible-galaxy-collection/vars/main.yml
index 066d2678..175d6696 100644
--- a/test/integration/targets/ansible-galaxy-collection/vars/main.yml
+++ b/test/integration/targets/ansible-galaxy-collection/vars/main.yml
@@ -9,20 +9,17 @@ supported_resolvelib_versions:
- "0.6.0"
- "0.7.0"
- "0.8.0"
- - "0.9.0"
- - "1.0.1"
unsupported_resolvelib_versions:
- "0.2.0" # Fails on import
- "0.5.1"
pulp_repositories:
- - primary
+ - published
- secondary
publish_namespaces:
- ansible_test
- - secondary
collection_list:
# Scenario to test out pre-release being ignored unless explicitly set and version pagination.
@@ -165,41 +162,3 @@ collection_list:
name: parent
dependencies:
namespace1.name1: '*'
-
- # non-prerelease is published to test that installing
- # the pre-release from SCM doesn't accidentally prefer indirect
- # dependencies from Galaxy
- - namespace: test_prereleases
- name: collection2
- version: 1.0.0
-
- - namespace: dev_and_stables_ns
- name: dev_and_stables_name
- version: 1.2.3-dev0
- - namespace: dev_and_stables_ns
- name: dev_and_stables_name
- version: 1.2.4
-
- - namespace: ns_with_wildcard_dep
- name: name_with_wildcard_dep
- version: 5.6.7-beta.3
- dependencies:
- dev_and_stables_ns.dev_and_stables_name: >-
- *
- - namespace: ns_with_dev_dep
- name: name_with_dev_dep
- version: 6.7.8
- dependencies:
- dev_and_stables_ns.dev_and_stables_name: 1.2.3-dev0
-
- - namespace: rc_meta_ns_with_transitive_dev_dep
- name: rc_meta_name_with_transitive_dev_dep
- version: 2.4.5-rc5
- dependencies:
- ns_with_dev_dep.name_with_dev_dep: >-
- *
- - namespace: meta_ns_with_transitive_wildcard_dep
- name: meta_name_with_transitive_wildcard_dep
- version: 4.5.6
- dependencies:
- ns_with_wildcard_dep.name_with_wildcard_dep: 5.6.7-beta.3
diff --git a/test/integration/targets/ansible-galaxy-role/files/create-role-archive.py b/test/integration/targets/ansible-galaxy-role/files/create-role-archive.py
index 48766638..cfd908c1 100755
--- a/test/integration/targets/ansible-galaxy-role/files/create-role-archive.py
+++ b/test/integration/targets/ansible-galaxy-role/files/create-role-archive.py
@@ -2,7 +2,6 @@
"""Create a role archive which overwrites an arbitrary file."""
import argparse
-import os
import pathlib
import tarfile
import tempfile
@@ -19,15 +18,6 @@ def main() -> None:
create_archive(args.archive, args.content, args.target)
-def generate_files_from_path(path):
- if os.path.isdir(path):
- for subpath in os.listdir(path):
- _path = os.path.join(path, subpath)
- yield from generate_files_from_path(_path)
- elif os.path.isfile(path):
- yield pathlib.Path(path)
-
-
def create_archive(archive_path: pathlib.Path, content_path: pathlib.Path, target_path: pathlib.Path) -> None:
with (
tarfile.open(name=archive_path, mode='w') as role_archive,
@@ -45,15 +35,10 @@ def create_archive(archive_path: pathlib.Path, content_path: pathlib.Path, targe
role_archive.add(meta_main_path)
role_archive.add(symlink_path)
- for path in generate_files_from_path(content_path):
- if path == content_path:
- arcname = str(symlink_path)
- else:
- arcname = os.path.join(temp_dir_path, path)
+ content_tarinfo = role_archive.gettarinfo(content_path, str(symlink_path))
- content_tarinfo = role_archive.gettarinfo(path, arcname)
- with path.open('rb') as file_content:
- role_archive.addfile(content_tarinfo, file_content)
+ with content_path.open('rb') as content_file:
+ role_archive.addfile(content_tarinfo, content_file)
if __name__ == '__main__':
diff --git a/test/integration/targets/ansible-galaxy-role/tasks/dir-traversal.yml b/test/integration/targets/ansible-galaxy-role/tasks/dir-traversal.yml
index 1c17daf7..c70e8998 100644
--- a/test/integration/targets/ansible-galaxy-role/tasks/dir-traversal.yml
+++ b/test/integration/targets/ansible-galaxy-role/tasks/dir-traversal.yml
@@ -23,9 +23,6 @@
command:
cmd: ansible-galaxy role install --roles-path '{{ remote_tmp_dir }}/dir-traversal/roles' dangerous.tar
chdir: '{{ remote_tmp_dir }}/dir-traversal/source'
- environment:
- ANSIBLE_NOCOLOR: True
- ANSIBLE_FORCE_COLOR: False
ignore_errors: true
register: galaxy_install_dangerous
@@ -45,86 +42,3 @@
- dangerous_overwrite_content.content|default('')|b64decode == ''
- not dangerous_overwrite_stat.stat.exists
- galaxy_install_dangerous is failed
- - "'is not a subpath of the role' in (galaxy_install_dangerous.stderr | regex_replace('\n', ' '))"
-
-- name: remove tarfile for next test
- file:
- path: '{{ item }}'
- state: absent
- loop:
- - '{{ remote_tmp_dir }}/dir-traversal/source/dangerous.tar'
- - '{{ remote_tmp_dir }}/dir-traversal/roles/dangerous.tar'
-
-- name: build dangerous dir traversal role that includes .. in the symlink path
- script:
- chdir: '{{ remote_tmp_dir }}/dir-traversal/source'
- cmd: create-role-archive.py dangerous.tar content.txt {{ remote_tmp_dir }}/dir-traversal/source/../target/target-file-to-overwrite.txt
- executable: '{{ ansible_playbook_python }}'
-
-- name: install dangerous role
- command:
- cmd: 'ansible-galaxy role install --roles-path {{ remote_tmp_dir }}/dir-traversal/roles dangerous.tar'
- chdir: '{{ remote_tmp_dir }}/dir-traversal/source'
- environment:
- ANSIBLE_NOCOLOR: True
- ANSIBLE_FORCE_COLOR: False
- ignore_errors: true
- register: galaxy_install_dangerous
-
-- name: check for overwritten file
- stat:
- path: '{{ remote_tmp_dir }}/dir-traversal/target/target-file-to-overwrite.txt'
- register: dangerous_overwrite_stat
-
-- name: get overwritten content
- slurp:
- path: '{{ remote_tmp_dir }}/dir-traversal/target/target-file-to-overwrite.txt'
- register: dangerous_overwrite_content
- when: dangerous_overwrite_stat.stat.exists
-
-- assert:
- that:
- - dangerous_overwrite_content.content|default('')|b64decode == ''
- - not dangerous_overwrite_stat.stat.exists
- - galaxy_install_dangerous is failed
- - "'is not a subpath of the role' in (galaxy_install_dangerous.stderr | regex_replace('\n', ' '))"
-
-- name: remove tarfile for next test
- file:
- path: '{{ remote_tmp_dir }}/dir-traversal/source/dangerous.tar'
- state: absent
-
-- name: build dangerous dir traversal role that includes .. in the relative symlink path
- script:
- chdir: '{{ remote_tmp_dir }}/dir-traversal/source'
- cmd: create-role-archive.py dangerous_rel.tar content.txt ../context.txt
-
-- name: install dangerous role with relative symlink
- command:
- cmd: 'ansible-galaxy role install --roles-path {{ remote_tmp_dir }}/dir-traversal/roles dangerous_rel.tar'
- chdir: '{{ remote_tmp_dir }}/dir-traversal/source'
- environment:
- ANSIBLE_NOCOLOR: True
- ANSIBLE_FORCE_COLOR: False
- ignore_errors: true
- register: galaxy_install_dangerous
-
-- name: check for symlink outside role
- stat:
- path: "{{ remote_tmp_dir | realpath }}/dir-traversal/roles/symlink"
- register: symlink_outside_role
-
-- assert:
- that:
- - not symlink_outside_role.stat.exists
- - galaxy_install_dangerous is failed
- - "'is not a subpath of the role' in (galaxy_install_dangerous.stderr | regex_replace('\n', ' '))"
-
-- name: remove test directories
- file:
- path: '{{ remote_tmp_dir }}/dir-traversal/{{ item }}'
- state: absent
- loop:
- - source
- - target
- - roles
diff --git a/test/integration/targets/ansible-galaxy-role/tasks/main.yml b/test/integration/targets/ansible-galaxy-role/tasks/main.yml
index 5f88a557..b39df11c 100644
--- a/test/integration/targets/ansible-galaxy-role/tasks/main.yml
+++ b/test/integration/targets/ansible-galaxy-role/tasks/main.yml
@@ -25,18 +25,10 @@
- name: Valid role archive
command: "tar cf {{ remote_tmp_dir }}/valid-role.tar {{ remote_tmp_dir }}/role.d"
-- name: Add invalid symlink
- file:
- state: link
- src: "~/invalid"
+- name: Invalid file
+ copy:
+ content: ""
dest: "{{ remote_tmp_dir }}/role.d/tasks/~invalid.yml"
- force: yes
-
-- name: Add another invalid symlink
- file:
- state: link
- src: "/"
- dest: "{{ remote_tmp_dir }}/role.d/tasks/invalid$name.yml"
- name: Valid requirements file
copy:
@@ -69,4 +61,3 @@
command: ansible-galaxy role remove invalid-testrole
- import_tasks: dir-traversal.yml
-- import_tasks: valid-role-symlinks.yml
diff --git a/test/integration/targets/ansible-galaxy-role/tasks/valid-role-symlinks.yml b/test/integration/targets/ansible-galaxy-role/tasks/valid-role-symlinks.yml
deleted file mode 100644
index 8a60b2ef..00000000
--- a/test/integration/targets/ansible-galaxy-role/tasks/valid-role-symlinks.yml
+++ /dev/null
@@ -1,78 +0,0 @@
-- name: create test directories
- file:
- path: '{{ remote_tmp_dir }}/dir-traversal/{{ item }}'
- state: directory
- loop:
- - source
- - target
- - roles
-
-- name: create subdir in the role content to test relative symlinks
- file:
- dest: '{{ remote_tmp_dir }}/dir-traversal/source/role_subdir'
- state: directory
-
-- copy:
- dest: '{{ remote_tmp_dir }}/dir-traversal/source/role_subdir/.keep'
- content: ''
-
-- set_fact:
- installed_roles: "{{ remote_tmp_dir | realpath }}/dir-traversal/roles"
-
-- name: build role with symlink to a directory in the role
- script:
- chdir: '{{ remote_tmp_dir }}/dir-traversal/source'
- cmd: create-role-archive.py safe-link-dir.tar ./ role_subdir/..
- executable: '{{ ansible_playbook_python }}'
-
-- name: install role successfully
- command:
- cmd: 'ansible-galaxy role install --roles-path {{ remote_tmp_dir }}/dir-traversal/roles safe-link-dir.tar'
- chdir: '{{ remote_tmp_dir }}/dir-traversal/source'
- register: galaxy_install_ok
-
-- name: check for the directory symlink in the role
- stat:
- path: "{{ installed_roles }}/safe-link-dir.tar/symlink"
- register: symlink_in_role
-
-- assert:
- that:
- - symlink_in_role.stat.exists
- - symlink_in_role.stat.lnk_source == installed_roles + '/safe-link-dir.tar'
-
-- name: remove tarfile for next test
- file:
- path: '{{ remote_tmp_dir }}/dir-traversal/source/safe-link-dir.tar'
- state: absent
-
-- name: build role with safe relative symlink
- script:
- chdir: '{{ remote_tmp_dir }}/dir-traversal/source'
- cmd: create-role-archive.py safe.tar ./ role_subdir/../context.txt
- executable: '{{ ansible_playbook_python }}'
-
-- name: install role successfully
- command:
- cmd: 'ansible-galaxy role install --roles-path {{ remote_tmp_dir }}/dir-traversal/roles safe.tar'
- chdir: '{{ remote_tmp_dir }}/dir-traversal/source'
- register: galaxy_install_ok
-
-- name: check for symlink in role
- stat:
- path: "{{ installed_roles }}/safe.tar/symlink"
- register: symlink_in_role
-
-- assert:
- that:
- - symlink_in_role.stat.exists
- - symlink_in_role.stat.lnk_source == installed_roles + '/safe.tar/context.txt'
-
-- name: remove test directories
- file:
- path: '{{ remote_tmp_dir }}/dir-traversal/{{ item }}'
- state: absent
- loop:
- - source
- - target
- - roles
diff --git a/test/integration/targets/ansible-galaxy/files/testserver.py b/test/integration/targets/ansible-galaxy/files/testserver.py
index 8cca6a83..13598507 100644
--- a/test/integration/targets/ansible-galaxy/files/testserver.py
+++ b/test/integration/targets/ansible-galaxy/files/testserver.py
@@ -1,15 +1,20 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import http.server
-import socketserver
+import sys
import ssl
if __name__ == '__main__':
- Handler = http.server.SimpleHTTPRequestHandler
- context = ssl.SSLContext()
- context.load_cert_chain(certfile='./cert.pem', keyfile='./key.pem')
- httpd = socketserver.TCPServer(("", 4443), Handler)
- httpd.socket = context.wrap_socket(httpd.socket, server_side=True)
+ if sys.version_info[0] >= 3:
+ import http.server
+ import socketserver
+ Handler = http.server.SimpleHTTPRequestHandler
+ httpd = socketserver.TCPServer(("", 4443), Handler)
+ else:
+ import BaseHTTPServer
+ import SimpleHTTPServer
+ Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
+ httpd = BaseHTTPServer.HTTPServer(("", 4443), Handler)
+ httpd.socket = ssl.wrap_socket(httpd.socket, certfile='./cert.pem', keyfile='./key.pem', server_side=True)
httpd.serve_forever()
diff --git a/test/integration/targets/ansible-galaxy/runme.sh b/test/integration/targets/ansible-galaxy/runme.sh
index fcd826c3..7d966e29 100755
--- a/test/integration/targets/ansible-galaxy/runme.sh
+++ b/test/integration/targets/ansible-galaxy/runme.sh
@@ -61,13 +61,10 @@ f_ansible_galaxy_create_role_repo_post()
git add .
git commit -m "local testing ansible galaxy role"
- # NOTE: `HEAD` is used because the newer Git versions create
- # NOTE: `main` by default and the older ones differ. We
- # NOTE: want to avoid hardcoding them.
git archive \
--format=tar \
--prefix="${repo_name}/" \
- HEAD > "${repo_tar}"
+ master > "${repo_tar}"
# Configure basic (insecure) HTTPS-accessible repository
galaxy_local_test_role_http_repo="${galaxy_webserver_root}/${galaxy_local_test_role}.git"
if [[ ! -d "${galaxy_local_test_role_http_repo}" ]]; then
@@ -357,7 +354,7 @@ pushd "${galaxy_testdir}"
popd # ${galaxy_testdir}
f_ansible_galaxy_status \
- "role info non-existent role"
+ "role info non-existant role"
mkdir -p "${role_testdir}"
pushd "${role_testdir}"
diff --git a/test/integration/targets/ansible-inventory/files/complex.ini b/test/integration/targets/ansible-inventory/files/complex.ini
deleted file mode 100644
index 227d9ea8..00000000
--- a/test/integration/targets/ansible-inventory/files/complex.ini
+++ /dev/null
@@ -1,35 +0,0 @@
-ihavenogroup
-
-[all]
-hostinall
-
-[all:vars]
-ansible_connection=local
-
-[test_group1]
-test1 myvar=something
-test2 myvar=something2
-test3
-
-[test_group2]
-test1
-test4
-test5
-
-[test_group3]
-test2 othervar=stuff
-test3
-test6
-
-[parent_1:children]
-test_group1
-
-[parent_2:children]
-test_group1
-
-[parent_3:children]
-test_group2
-test_group3
-
-[parent_3]
-test2
diff --git a/test/integration/targets/ansible-inventory/files/valid_sample.yml b/test/integration/targets/ansible-inventory/files/valid_sample.yml
index b8e7b882..477f82f2 100644
--- a/test/integration/targets/ansible-inventory/files/valid_sample.yml
+++ b/test/integration/targets/ansible-inventory/files/valid_sample.yml
@@ -4,4 +4,4 @@ all:
hosts:
something:
foo: bar
- ungrouped: {}
+ ungrouped: {} \ No newline at end of file
diff --git a/test/integration/targets/ansible-inventory/filter_plugins/toml.py b/test/integration/targets/ansible-inventory/filter_plugins/toml.py
deleted file mode 100644
index 997173c4..00000000
--- a/test/integration/targets/ansible-inventory/filter_plugins/toml.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# (c) 2017, Matt Martz <matt@sivel.net>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import functools
-
-from ansible.plugins.inventory.toml import HAS_TOML, toml_dumps
-try:
- from ansible.plugins.inventory.toml import toml
-except ImportError:
- pass
-
-from ansible.errors import AnsibleFilterError
-from ansible.module_utils.common.text.converters import to_text
-from ansible.module_utils.common._collections_compat import MutableMapping
-from ansible.module_utils.six import string_types
-
-
-def _check_toml(func):
- @functools.wraps(func)
- def inner(o):
- if not HAS_TOML:
- raise AnsibleFilterError('The %s filter plugin requires the python "toml" library' % func.__name__)
- return func(o)
- return inner
-
-
-@_check_toml
-def from_toml(o):
- if not isinstance(o, string_types):
- raise AnsibleFilterError('from_toml requires a string, got %s' % type(o))
- return toml.loads(to_text(o, errors='surrogate_or_strict'))
-
-
-@_check_toml
-def to_toml(o):
- if not isinstance(o, MutableMapping):
- raise AnsibleFilterError('to_toml requires a dict, got %s' % type(o))
- return to_text(toml_dumps(o), errors='surrogate_or_strict')
-
-
-class FilterModule(object):
- def filters(self):
- return {
- 'to_toml': to_toml,
- 'from_toml': from_toml
- }
diff --git a/test/integration/targets/ansible-inventory/tasks/json_output.yml b/test/integration/targets/ansible-inventory/tasks/json_output.yml
deleted file mode 100644
index 26520612..00000000
--- a/test/integration/targets/ansible-inventory/tasks/json_output.yml
+++ /dev/null
@@ -1,33 +0,0 @@
-- block:
- - name: check baseline
- command: ansible-inventory -i '{{ role_path }}/files/valid_sample.yml' --list
- register: limited
-
- - name: ensure non empty host list
- assert:
- that:
- - "'something' in inv['_meta']['hostvars']"
-
- - name: check that limit removes host
- command: ansible-inventory -i '{{ role_path }}/files/valid_sample.yml' --limit '!something' --list
- register: limited
-
- - name: ensure empty host list
- assert:
- that:
- - "'something' not in inv['_meta']['hostvars']"
-
- - name: check dupes
- command: ansible-inventory -i '{{ role_path }}/files/complex.ini' --list
- register: limited
-
- - name: ensure host only appears on directly assigned
- assert:
- that:
- - "'hosts' not in inv['parent_1']"
- - "'hosts' not in inv['parent_2']"
- - "'hosts' in inv['parent_3']"
- - "'test1' in inv['test_group1']['hosts']"
- vars:
- inv: '{{limited.stdout|from_json }}'
- delegate_to: localhost
diff --git a/test/integration/targets/ansible-inventory/tasks/main.yml b/test/integration/targets/ansible-inventory/tasks/main.yml
index c3459c12..84ac2c3c 100644
--- a/test/integration/targets/ansible-inventory/tasks/main.yml
+++ b/test/integration/targets/ansible-inventory/tasks/main.yml
@@ -145,10 +145,3 @@
loop_control:
loop_var: toml_package
when: toml_package is not contains 'tomllib' or (toml_package is contains 'tomllib' and ansible_facts.python.version_info >= [3, 11])
-
-
-- include_tasks: "{{item}}_output.yml"
- loop:
- - json
- - yaml
- - toml
diff --git a/test/integration/targets/ansible-inventory/tasks/toml_output.yml b/test/integration/targets/ansible-inventory/tasks/toml_output.yml
deleted file mode 100644
index 1e5df9aa..00000000
--- a/test/integration/targets/ansible-inventory/tasks/toml_output.yml
+++ /dev/null
@@ -1,43 +0,0 @@
-- name: only test if have toml in python
- command: "{{ansible_playbook_python}} -c 'import toml'"
- ignore_errors: true
- delegate_to: localhost
- register: has_toml
-
-- block:
- - name: check baseline
- command: ansible-inventory -i '{{ role_path }}/files/valid_sample.yml' --list --toml
- register: limited
-
- - name: ensure non empty host list
- assert:
- that:
- - "'something' in inv['somegroup']['hosts']"
-
- - name: check that limit removes host
- command: ansible-inventory -i '{{ role_path }}/files/valid_sample.yml' --limit '!something' --list --toml
- register: limited
- ignore_errors: true
-
- - name: ensure empty host list
- assert:
- that:
- - limited is failed
-
- - name: check dupes
- command: ansible-inventory -i '{{ role_path }}/files/complex.ini' --list --toml
- register: limited
-
- - debug: var=inv
-
- - name: ensure host only appears on directly assigned
- assert:
- that:
- - "'hosts' not in inv['parent_1']"
- - "'hosts' not in inv['parent_2']"
- - "'hosts' in inv['parent_3']"
- - "'test1' in inv['test_group1']['hosts']"
- vars:
- inv: '{{limited.stdout|from_toml}}'
- when: has_toml is success
- delegate_to: localhost
diff --git a/test/integration/targets/ansible-inventory/tasks/yaml_output.yml b/test/integration/targets/ansible-inventory/tasks/yaml_output.yml
deleted file mode 100644
index d41a8d0c..00000000
--- a/test/integration/targets/ansible-inventory/tasks/yaml_output.yml
+++ /dev/null
@@ -1,34 +0,0 @@
-- block:
- - name: check baseline
- command: ansible-inventory -i '{{ role_path }}/files/valid_sample.yml' --list --yaml
- register: limited
-
- - name: ensure something in host list
- assert:
- that:
- - "'something' in inv['all']['children']['somegroup']['hosts']"
-
- - name: check that limit removes host
- command: ansible-inventory -i '{{ role_path }}/files/valid_sample.yml' --limit '!something' --list --yaml
- register: limited
-
- - name: ensure empty host list
- assert:
- that:
- - not inv
-
- - name: check dupes
- command: ansible-inventory -i '{{ role_path }}/files/complex.ini' --list --yaml
- register: limited
-
- - name: ensure host only appears on directly assigned
- assert:
- that:
- - "'hosts' not in inv['all']['children']['parent_1']"
- - "'hosts' not in inv['all']['children']['parent_2']"
- - "'hosts' in inv['all']['children']['parent_3']"
- - "'test1' in inv['all']['children']['parent_1']['children']['test_group1']['hosts']"
- - "'hosts' not in inv['all']['children']['parent_2']['children']['test_group1']"
- vars:
- inv: '{{limited.stdout|from_yaml}}'
- delegate_to: localhost
diff --git a/test/integration/targets/ansible-playbook-callbacks/aliases b/test/integration/targets/ansible-playbook-callbacks/aliases
deleted file mode 100644
index d88a7fa6..00000000
--- a/test/integration/targets/ansible-playbook-callbacks/aliases
+++ /dev/null
@@ -1,4 +0,0 @@
-shippable/posix/group3
-context/controller
-needs/target/setup_remote_tmp_dir
-needs/target/support-callback_plugins
diff --git a/test/integration/targets/ansible-playbook-callbacks/all-callbacks.yml b/test/integration/targets/ansible-playbook-callbacks/all-callbacks.yml
deleted file mode 100644
index 85a53c74..00000000
--- a/test/integration/targets/ansible-playbook-callbacks/all-callbacks.yml
+++ /dev/null
@@ -1,123 +0,0 @@
-- hosts: localhost
- gather_facts: false
- vars_prompt:
- name: vars_prompt_var
- default: hamsandwich
- handlers:
- - name: handler1
- debug:
- msg: handler1
-
- - debug:
- msg: listen1
- listen:
- - listen1
- roles:
- - setup_remote_tmp_dir
- tasks:
- - name: ok
- debug:
- msg: ok
-
- - name: changed
- debug:
- msg: changed
- changed_when: true
-
- - name: skipped
- debug:
- msg: skipped
- when: false
-
- - name: failed
- debug:
- msg: failed
- failed_when: true
- ignore_errors: true
-
- - name: unreachable
- ping:
- delegate_to: example.org
- ignore_unreachable: true
- vars:
- ansible_timeout: 1
-
- - name: loop
- debug:
- ignore_errors: true
- changed_when: '{{ item.changed }}'
- failed_when: '{{ item.failed }}'
- when: '{{ item.when }}'
- loop:
- # ok
- - changed: false
- failed: false
- when: true
- # changed
- - changed: true
- failed: false
- when: true
- # failed
- - changed: false
- failed: true
- when: true
- # skipped
- - changed: false
- failed: false
- when: false
-
- - name: notify handler1
- debug:
- msg: notify handler1
- changed_when: true
- notify:
- - handler1
-
- - name: notify listen1
- debug:
- msg: notify listen1
- changed_when: true
- notify:
- - listen1
-
- - name: retry ok
- debug:
- register: result
- until: result.attempts == 2
- retries: 1
- delay: 0
-
- - name: retry failed
- debug:
- register: result
- until: result.attempts == 3
- retries: 1
- delay: 0
- ignore_errors: true
-
- - name: async poll ok
- command: sleep 3
- async: 5
- poll: 2
-
- - name: async poll failed
- shell: sleep 3; false
- async: 5
- poll: 2
- ignore_errors: true
-
- - include_tasks: include_me.yml
-
- - name: diff
- copy:
- content: diff
- dest: '{{ remote_tmp_dir }}/diff.txt'
- diff: true
-
-- hosts: i_dont_exist
-
-- hosts: localhost
- gather_facts: false
- max_fail_percentage: 0
- tasks:
- - fail:
diff --git a/test/integration/targets/ansible-playbook-callbacks/callbacks_list.expected b/test/integration/targets/ansible-playbook-callbacks/callbacks_list.expected
deleted file mode 100644
index 1d064a23..00000000
--- a/test/integration/targets/ansible-playbook-callbacks/callbacks_list.expected
+++ /dev/null
@@ -1,24 +0,0 @@
- 1 __init__
-92 v2_on_any
- 1 v2_on_file_diff
- 4 v2_playbook_on_handler_task_start
- 2 v2_playbook_on_include
- 1 v2_playbook_on_no_hosts_matched
- 3 v2_playbook_on_notify
- 3 v2_playbook_on_play_start
- 1 v2_playbook_on_start
- 1 v2_playbook_on_stats
-19 v2_playbook_on_task_start
- 1 v2_playbook_on_vars_prompt
- 1 v2_runner_item_on_failed
- 2 v2_runner_item_on_ok
- 1 v2_runner_item_on_skipped
- 1 v2_runner_on_async_failed
- 1 v2_runner_on_async_ok
- 2 v2_runner_on_async_poll
- 5 v2_runner_on_failed
-16 v2_runner_on_ok
- 1 v2_runner_on_skipped
-23 v2_runner_on_start
- 1 v2_runner_on_unreachable
- 2 v2_runner_retry
diff --git a/test/integration/targets/ansible-playbook-callbacks/runme.sh b/test/integration/targets/ansible-playbook-callbacks/runme.sh
deleted file mode 100755
index 933863e5..00000000
--- a/test/integration/targets/ansible-playbook-callbacks/runme.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env bash
-
-set -eux
-
-export ANSIBLE_CALLBACK_PLUGINS=../support-callback_plugins/callback_plugins
-export ANSIBLE_ROLES_PATH=../
-export ANSIBLE_STDOUT_CALLBACK=callback_debug
-export ANSIBLE_HOST_PATTERN_MISMATCH=warning
-
-ansible-playbook all-callbacks.yml 2>/dev/null | sort | uniq -c | tee callbacks_list.out
-
-diff -w callbacks_list.out callbacks_list.expected
diff --git a/test/integration/targets/ansible-pull/pull-integration-test/conn_secret.yml b/test/integration/targets/ansible-pull/pull-integration-test/conn_secret.yml
deleted file mode 100644
index f8849730..00000000
--- a/test/integration/targets/ansible-pull/pull-integration-test/conn_secret.yml
+++ /dev/null
@@ -1,12 +0,0 @@
-- hosts: localhost
- gather_facts: false
- tasks:
- - ping: data='{{ansible_password}}'
- register: dumb
- vars:
- ansible_python_interpreter: '{{ansible_playbook_python}}'
-
- - name: If we got here, password was passed!
- assert:
- that:
- - "dumb.ping == 'Testing123'"
diff --git a/test/integration/targets/ansible-pull/pull-integration-test/secret_connection_password b/test/integration/targets/ansible-pull/pull-integration-test/secret_connection_password
deleted file mode 100644
index 44e6a2c4..00000000
--- a/test/integration/targets/ansible-pull/pull-integration-test/secret_connection_password
+++ /dev/null
@@ -1 +0,0 @@
-Testing123
diff --git a/test/integration/targets/ansible-pull/runme.sh b/test/integration/targets/ansible-pull/runme.sh
index b591b283..582e8099 100755
--- a/test/integration/targets/ansible-pull/runme.sh
+++ b/test/integration/targets/ansible-pull/runme.sh
@@ -36,8 +36,7 @@ function pass_tests {
fi
# test for https://github.com/ansible/ansible/issues/13681
- # match play default output stats, was matching limit + docker
- if grep -E '127\.0\.0\.1\s*: ok=' "${temp_log}"; then
+ if grep -E '127\.0\.0\.1.*ok' "${temp_log}"; then
cat "${temp_log}"
echo "Found host 127.0.0.1 in output. Only localhost should be present."
exit 1
@@ -87,7 +86,5 @@ ANSIBLE_CONFIG='' ansible-pull -d "${pull_dir}" -U "${repo_dir}" "$@" multi_play
pass_tests_multi
-ANSIBLE_CONFIG='' ansible-pull -d "${pull_dir}" -U "${repo_dir}" conn_secret.yml --connection-password-file "${repo_dir}/secret_connection_password" "$@"
-
# fail if we try do delete /var/tmp
ANSIBLE_CONFIG='' ansible-pull -d var/tmp -U "${repo_dir}" --purge "$@"
diff --git a/test/integration/targets/ansible-runner/aliases b/test/integration/targets/ansible-runner/aliases
index f4caffd1..13e7d785 100644
--- a/test/integration/targets/ansible-runner/aliases
+++ b/test/integration/targets/ansible-runner/aliases
@@ -1,4 +1,5 @@
shippable/posix/group5
context/controller
+skip/osx
skip/macos
skip/freebsd
diff --git a/test/integration/targets/ansible-runner/files/adhoc_example1.py b/test/integration/targets/ansible-runner/files/adhoc_example1.py
index fe7f9446..ab24bcad 100644
--- a/test/integration/targets/ansible-runner/files/adhoc_example1.py
+++ b/test/integration/targets/ansible-runner/files/adhoc_example1.py
@@ -2,6 +2,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
+import os
import sys
import ansible_runner
diff --git a/test/integration/targets/ansible-test-cloud-foreman/aliases b/test/integration/targets/ansible-test-cloud-foreman/aliases
new file mode 100644
index 00000000..a4bdcea6
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-foreman/aliases
@@ -0,0 +1,3 @@
+cloud/foreman
+shippable/generic/group1
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-foreman/tasks/main.yml b/test/integration/targets/ansible-test-cloud-foreman/tasks/main.yml
new file mode 100644
index 00000000..4170d83e
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-foreman/tasks/main.yml
@@ -0,0 +1,6 @@
+- name: Verify endpoints respond
+ uri:
+ url: "{{ item }}"
+ validate_certs: no
+ with_items:
+ - http://{{ ansible_env.FOREMAN_HOST }}:{{ ansible_env.FOREMAN_PORT }}/ping
diff --git a/test/integration/targets/ansible-test-cloud-openshift/aliases b/test/integration/targets/ansible-test-cloud-openshift/aliases
index b714e82c..6e32db7b 100644
--- a/test/integration/targets/ansible-test-cloud-openshift/aliases
+++ b/test/integration/targets/ansible-test-cloud-openshift/aliases
@@ -1,4 +1,4 @@
cloud/openshift
shippable/generic/group1
-disabled # the container crashes when using a non-default network on some docker hosts (such as Ubuntu 20.04)
+disabled # disabled due to requirements conflict: botocore 1.20.6 has requirement urllib3<1.27,>=1.25.4, but you have urllib3 1.24.3.
context/controller
diff --git a/test/integration/targets/ansible-test-cloud-openshift/tasks/main.yml b/test/integration/targets/ansible-test-cloud-openshift/tasks/main.yml
index 6acb67dc..c3b51904 100644
--- a/test/integration/targets/ansible-test-cloud-openshift/tasks/main.yml
+++ b/test/integration/targets/ansible-test-cloud-openshift/tasks/main.yml
@@ -1,13 +1,6 @@
-- name: Load kubeconfig
- include_vars: "{{ lookup('env', 'K8S_AUTH_KUBECONFIG') }}"
-
-- name: Verify endpoints exist
- assert:
- that: clusters
-
- name: Verify endpoints respond
uri:
- url: "{{ item.cluster.server }}"
+ url: "{{ item }}"
validate_certs: no
with_items:
- - "{{ clusters }}"
+ - https://openshift-origin:8443/
diff --git a/test/integration/targets/ansible-test-cloud-vcenter/aliases b/test/integration/targets/ansible-test-cloud-vcenter/aliases
new file mode 100644
index 00000000..0cd8ad20
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-vcenter/aliases
@@ -0,0 +1,3 @@
+cloud/vcenter
+shippable/generic/group1
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-vcenter/tasks/main.yml b/test/integration/targets/ansible-test-cloud-vcenter/tasks/main.yml
new file mode 100644
index 00000000..49e5c16a
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-vcenter/tasks/main.yml
@@ -0,0 +1,6 @@
+- name: Verify endpoints respond
+ uri:
+ url: "{{ item }}"
+ validate_certs: no
+ with_items:
+ - http://{{ vcenter_hostname }}:5000/ # control endpoint for the simulator
diff --git a/test/integration/targets/ansible-test-container/runme.py b/test/integration/targets/ansible-test-container/runme.py
index 3c86b6dd..8ff48e0d 100755
--- a/test/integration/targets/ansible-test-container/runme.py
+++ b/test/integration/targets/ansible-test-container/runme.py
@@ -996,7 +996,7 @@ class AptBootstrapper(Bootstrapper):
@classmethod
def install_podman(cls) -> bool:
"""Return True if podman will be installed."""
- return not (os_release.id == 'ubuntu' and os_release.version_id in {'20.04', '22.04'})
+ return not (os_release.id == 'ubuntu' and os_release.version_id == '20.04')
@classmethod
def install_docker(cls) -> bool:
@@ -1053,14 +1053,13 @@ class ApkBootstrapper(Bootstrapper):
# crun added as podman won't install it as dep if runc is present
# but we don't want runc as it fails
# The edge `crun` package installed below requires ip6tables, and in
- # edge, the `iptables` package includes `ip6tables`, but in 3.18 they
- # are separate. Remove `ip6tables` once we update to 3.19.
+ # edge, the `iptables` package includes `ip6tables`, but in 3.16 they
+ # are separate.
packages = ['docker', 'podman', 'openssl', 'crun', 'ip6tables']
run_command('apk', 'add', *packages)
- # 3.18 only contains crun 1.8.4, to get 1.9.2 to resolve the run/shm issue, install crun from 3.19
- # Remove once we update to 3.19
- run_command('apk', 'upgrade', '-U', '--repository=http://dl-cdn.alpinelinux.org/alpine/v3.19/community', 'crun')
+ # 3.16 only contains crun 1.4.5, to get 1.9.2 to resolve the run/shm issue, install crun from edge
+ run_command('apk', 'upgrade', '-U', '--repository=http://dl-cdn.alpinelinux.org/alpine/edge/community', 'crun')
run_command('service', 'docker', 'start')
run_command('modprobe', 'tun')
diff --git a/test/integration/targets/ansible-test-sanity-import/ansible_collections/ns/col/plugins/lookup/vendor1.py b/test/integration/targets/ansible-test-sanity-import/ansible_collections/ns/col/plugins/lookup/vendor1.py
index f662b97d..f59b9091 100644
--- a/test/integration/targets/ansible-test-sanity-import/ansible_collections/ns/col/plugins/lookup/vendor1.py
+++ b/test/integration/targets/ansible-test-sanity-import/ansible_collections/ns/col/plugins/lookup/vendor1.py
@@ -16,10 +16,10 @@ RETURN = '''#'''
from ansible.plugins.lookup import LookupBase
# noinspection PyUnresolvedReferences
-from ansible.plugins import loader # import the loader to verify it works when the collection loader has already been loaded # pylint: disable=unused-import
+from ansible.plugins import loader # import the loader to verify it works when the collection loader has already been loaded
try:
- import demo # pylint: disable=unused-import
+ import demo
except ImportError:
pass
else:
diff --git a/test/integration/targets/ansible-test-sanity-import/ansible_collections/ns/col/plugins/lookup/vendor2.py b/test/integration/targets/ansible-test-sanity-import/ansible_collections/ns/col/plugins/lookup/vendor2.py
index 38860b03..22b4236a 100644
--- a/test/integration/targets/ansible-test-sanity-import/ansible_collections/ns/col/plugins/lookup/vendor2.py
+++ b/test/integration/targets/ansible-test-sanity-import/ansible_collections/ns/col/plugins/lookup/vendor2.py
@@ -16,10 +16,10 @@ RETURN = '''#'''
from ansible.plugins.lookup import LookupBase
# noinspection PyUnresolvedReferences
-from ansible.plugins import loader # import the loader to verify it works when the collection loader has already been loaded # pylint: disable=unused-import
+from ansible.plugins import loader # import the loader to verify it works when the collection loader has already been loaded
try:
- import demo # pylint: disable=unused-import
+ import demo
except ImportError:
pass
else:
diff --git a/test/integration/targets/ansible-test-sanity-import/expected.txt b/test/integration/targets/ansible-test-sanity-import/expected.txt
deleted file mode 100644
index ab41fd78..00000000
--- a/test/integration/targets/ansible-test-sanity-import/expected.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-plugins/lookup/stderr.py:0:0: stderr: unwanted stderr
-plugins/lookup/stdout.py:0:0: stdout: unwanted stdout
diff --git a/test/integration/targets/ansible-test-sanity-import/runme.sh b/test/integration/targets/ansible-test-sanity-import/runme.sh
index a49a71a0..a12e3e3f 100755
--- a/test/integration/targets/ansible-test-sanity-import/runme.sh
+++ b/test/integration/targets/ansible-test-sanity-import/runme.sh
@@ -1,29 +1,7 @@
#!/usr/bin/env bash
-set -eu
-
-# Create test scenarios at runtime that do not pass sanity tests.
-# This avoids the need to create ignore entries for the tests.
-
-mkdir -p ansible_collections/ns/col/plugins/lookup
-
-(
- cd ansible_collections/ns/col/plugins/lookup
-
- echo "import sys; sys.stdout.write('unwanted stdout')" > stdout.py # stdout: unwanted stdout
- echo "import sys; sys.stderr.write('unwanted stderr')" > stderr.py # stderr: unwanted stderr
-)
-
source ../collection/setup.sh
-# Run regular import sanity tests.
-
-ansible-test sanity --test import --color --failure-ok --lint --python "${ANSIBLE_TEST_PYTHON_VERSION}" "${@}" 1> actual-stdout.txt 2> actual-stderr.txt
-diff -u "${TEST_DIR}/expected.txt" actual-stdout.txt
-grep -f "${TEST_DIR}/expected.txt" actual-stderr.txt
-
-# Run import sanity tests which require modifications to the source directory.
-
vendor_dir="$(python -c 'import pathlib, ansible._vendor; print(pathlib.Path(ansible._vendor.__file__).parent)')"
cleanup() {
diff --git a/test/integration/targets/ansible-test-sanity-no-get-exception/aliases b/test/integration/targets/ansible-test-sanity-no-get-exception/aliases
deleted file mode 100644
index 7741d444..00000000
--- a/test/integration/targets/ansible-test-sanity-no-get-exception/aliases
+++ /dev/null
@@ -1,4 +0,0 @@
-shippable/posix/group3 # runs in the distro test containers
-shippable/generic/group1 # runs in the default test container
-context/controller
-needs/target/collection
diff --git a/test/integration/targets/ansible-test-sanity-no-get-exception/ansible_collections/ns/col/do-not-check-me.py b/test/integration/targets/ansible-test-sanity-no-get-exception/ansible_collections/ns/col/do-not-check-me.py
deleted file mode 100644
index ca252699..00000000
--- a/test/integration/targets/ansible-test-sanity-no-get-exception/ansible_collections/ns/col/do-not-check-me.py
+++ /dev/null
@@ -1,5 +0,0 @@
-from ansible.module_utils.pycompat24 import get_exception
-
-
-def do_stuff():
- get_exception()
diff --git a/test/integration/targets/ansible-test-sanity-no-get-exception/ansible_collections/ns/col/plugins/modules/check-me.py b/test/integration/targets/ansible-test-sanity-no-get-exception/ansible_collections/ns/col/plugins/modules/check-me.py
deleted file mode 100644
index ca252699..00000000
--- a/test/integration/targets/ansible-test-sanity-no-get-exception/ansible_collections/ns/col/plugins/modules/check-me.py
+++ /dev/null
@@ -1,5 +0,0 @@
-from ansible.module_utils.pycompat24 import get_exception
-
-
-def do_stuff():
- get_exception()
diff --git a/test/integration/targets/ansible-test-sanity-no-get-exception/expected.txt b/test/integration/targets/ansible-test-sanity-no-get-exception/expected.txt
deleted file mode 100644
index 4c432cb1..00000000
--- a/test/integration/targets/ansible-test-sanity-no-get-exception/expected.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-plugins/modules/check-me.py:1:44: do not use `get_exception`
-plugins/modules/check-me.py:5:4: do not use `get_exception`
diff --git a/test/integration/targets/ansible-test-sanity-no-get-exception/runme.sh b/test/integration/targets/ansible-test-sanity-no-get-exception/runme.sh
deleted file mode 100755
index b8ec2d04..00000000
--- a/test/integration/targets/ansible-test-sanity-no-get-exception/runme.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env bash
-
-set -eu
-
-source ../collection/setup.sh
-
-set -x
-
-ansible-test sanity --test no-get-exception --color --lint --failure-ok "${@}" > actual.txt
-
-diff -u "${TEST_DIR}/expected.txt" actual.txt
-diff -u do-not-check-me.py plugins/modules/check-me.py
diff --git a/test/integration/targets/ansible-test-sanity-pylint/aliases b/test/integration/targets/ansible-test-sanity-pylint/aliases
deleted file mode 100644
index 7741d444..00000000
--- a/test/integration/targets/ansible-test-sanity-pylint/aliases
+++ /dev/null
@@ -1,4 +0,0 @@
-shippable/posix/group3 # runs in the distro test containers
-shippable/generic/group1 # runs in the default test container
-context/controller
-needs/target/collection
diff --git a/test/integration/targets/ansible-test-sanity-pylint/ansible_collections/ns/col/galaxy.yml b/test/integration/targets/ansible-test-sanity-pylint/ansible_collections/ns/col/galaxy.yml
deleted file mode 100644
index 53a77279..00000000
--- a/test/integration/targets/ansible-test-sanity-pylint/ansible_collections/ns/col/galaxy.yml
+++ /dev/null
@@ -1,6 +0,0 @@
-namespace: ns
-name: col
-version:
-readme: README.rst
-authors:
- - Ansible
diff --git a/test/integration/targets/ansible-test-sanity-pylint/ansible_collections/ns/col/plugins/lookup/deprecated.py b/test/integration/targets/ansible-test-sanity-pylint/ansible_collections/ns/col/plugins/lookup/deprecated.py
deleted file mode 100644
index b7908b6c..00000000
--- a/test/integration/targets/ansible-test-sanity-pylint/ansible_collections/ns/col/plugins/lookup/deprecated.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-DOCUMENTATION = '''
-name: deprecated
-short_description: lookup
-description: Lookup.
-author:
- - Ansible Core Team
-'''
-
-EXAMPLES = '''#'''
-RETURN = '''#'''
-
-from ansible.plugins.lookup import LookupBase
-
-
-class LookupModule(LookupBase):
- def run(self, **kwargs):
- return []
diff --git a/test/integration/targets/ansible-test-sanity-pylint/expected.txt b/test/integration/targets/ansible-test-sanity-pylint/expected.txt
deleted file mode 100644
index df7bbc20..00000000
--- a/test/integration/targets/ansible-test-sanity-pylint/expected.txt
+++ /dev/null
@@ -1 +0,0 @@
-plugins/lookup/deprecated.py:27:0: collection-deprecated-version: Deprecated version ('2.0.0') found in call to Display.deprecated or AnsibleModule.deprecate
diff --git a/test/integration/targets/ansible-test-sanity-pylint/runme.sh b/test/integration/targets/ansible-test-sanity-pylint/runme.sh
deleted file mode 100755
index 72190bfa..00000000
--- a/test/integration/targets/ansible-test-sanity-pylint/runme.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env bash
-
-set -eu
-
-source ../collection/setup.sh
-
-# Create test scenarios at runtime that do not pass sanity tests.
-# This avoids the need to create ignore entries for the tests.
-
-echo "
-from ansible.utils.display import Display
-
-display = Display()
-display.deprecated('', version='2.0.0', collection_name='ns.col')" >> plugins/lookup/deprecated.py
-
-# Verify deprecation checking works for normal releases and pre-releases.
-
-for version in 2.0.0 2.0.0-dev0; do
- echo "Checking version: ${version}"
- sed "s/^version:.*\$/version: ${version}/" < galaxy.yml > galaxy.yml.tmp
- mv galaxy.yml.tmp galaxy.yml
- ansible-test sanity --test pylint --color --failure-ok --lint "${@}" 1> actual-stdout.txt 2> actual-stderr.txt
- diff -u "${TEST_DIR}/expected.txt" actual-stdout.txt
- grep -f "${TEST_DIR}/expected.txt" actual-stderr.txt
-done
diff --git a/test/integration/targets/ansible-test-sanity-replace-urlopen/aliases b/test/integration/targets/ansible-test-sanity-replace-urlopen/aliases
deleted file mode 100644
index 7741d444..00000000
--- a/test/integration/targets/ansible-test-sanity-replace-urlopen/aliases
+++ /dev/null
@@ -1,4 +0,0 @@
-shippable/posix/group3 # runs in the distro test containers
-shippable/generic/group1 # runs in the default test container
-context/controller
-needs/target/collection
diff --git a/test/integration/targets/ansible-test-sanity-replace-urlopen/ansible_collections/ns/col/do-not-check-me.py b/test/integration/targets/ansible-test-sanity-replace-urlopen/ansible_collections/ns/col/do-not-check-me.py
deleted file mode 100644
index 9b9c7e69..00000000
--- a/test/integration/targets/ansible-test-sanity-replace-urlopen/ansible_collections/ns/col/do-not-check-me.py
+++ /dev/null
@@ -1,5 +0,0 @@
-import urllib.request
-
-
-def do_stuff():
- urllib.request.urlopen('https://www.ansible.com/')
diff --git a/test/integration/targets/ansible-test-sanity-replace-urlopen/ansible_collections/ns/col/plugins/modules/check-me.py b/test/integration/targets/ansible-test-sanity-replace-urlopen/ansible_collections/ns/col/plugins/modules/check-me.py
deleted file mode 100644
index 9b9c7e69..00000000
--- a/test/integration/targets/ansible-test-sanity-replace-urlopen/ansible_collections/ns/col/plugins/modules/check-me.py
+++ /dev/null
@@ -1,5 +0,0 @@
-import urllib.request
-
-
-def do_stuff():
- urllib.request.urlopen('https://www.ansible.com/')
diff --git a/test/integration/targets/ansible-test-sanity-replace-urlopen/expected.txt b/test/integration/targets/ansible-test-sanity-replace-urlopen/expected.txt
deleted file mode 100644
index 4dd1bfb0..00000000
--- a/test/integration/targets/ansible-test-sanity-replace-urlopen/expected.txt
+++ /dev/null
@@ -1 +0,0 @@
-plugins/modules/check-me.py:5:20: use `ansible.module_utils.urls.open_url` instead of `urlopen`
diff --git a/test/integration/targets/ansible-test-sanity-replace-urlopen/runme.sh b/test/integration/targets/ansible-test-sanity-replace-urlopen/runme.sh
deleted file mode 100755
index e6637c57..00000000
--- a/test/integration/targets/ansible-test-sanity-replace-urlopen/runme.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env bash
-
-set -eu
-
-source ../collection/setup.sh
-
-set -x
-
-ansible-test sanity --test replace-urlopen --color --lint --failure-ok "${@}" > actual.txt
-
-diff -u "${TEST_DIR}/expected.txt" actual.txt
-diff -u do-not-check-me.py plugins/modules/check-me.py
diff --git a/test/integration/targets/ansible-test-sanity-use-compat-six/aliases b/test/integration/targets/ansible-test-sanity-use-compat-six/aliases
deleted file mode 100644
index 7741d444..00000000
--- a/test/integration/targets/ansible-test-sanity-use-compat-six/aliases
+++ /dev/null
@@ -1,4 +0,0 @@
-shippable/posix/group3 # runs in the distro test containers
-shippable/generic/group1 # runs in the default test container
-context/controller
-needs/target/collection
diff --git a/test/integration/targets/ansible-test-sanity-use-compat-six/ansible_collections/ns/col/do-not-check-me.py b/test/integration/targets/ansible-test-sanity-use-compat-six/ansible_collections/ns/col/do-not-check-me.py
deleted file mode 100644
index 7f7f9f58..00000000
--- a/test/integration/targets/ansible-test-sanity-use-compat-six/ansible_collections/ns/col/do-not-check-me.py
+++ /dev/null
@@ -1,5 +0,0 @@
-import six
-
-
-def do_stuff():
- assert six.text_type
diff --git a/test/integration/targets/ansible-test-sanity-use-compat-six/ansible_collections/ns/col/plugins/modules/check-me.py b/test/integration/targets/ansible-test-sanity-use-compat-six/ansible_collections/ns/col/plugins/modules/check-me.py
deleted file mode 100644
index 7f7f9f58..00000000
--- a/test/integration/targets/ansible-test-sanity-use-compat-six/ansible_collections/ns/col/plugins/modules/check-me.py
+++ /dev/null
@@ -1,5 +0,0 @@
-import six
-
-
-def do_stuff():
- assert six.text_type
diff --git a/test/integration/targets/ansible-test-sanity-use-compat-six/expected.txt b/test/integration/targets/ansible-test-sanity-use-compat-six/expected.txt
deleted file mode 100644
index 42ba83ba..00000000
--- a/test/integration/targets/ansible-test-sanity-use-compat-six/expected.txt
+++ /dev/null
@@ -1 +0,0 @@
-plugins/modules/check-me.py:1:1: use `ansible.module_utils.six` instead of `six`
diff --git a/test/integration/targets/ansible-test-sanity-use-compat-six/runme.sh b/test/integration/targets/ansible-test-sanity-use-compat-six/runme.sh
deleted file mode 100755
index dbd38f9f..00000000
--- a/test/integration/targets/ansible-test-sanity-use-compat-six/runme.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env bash
-
-set -eu
-
-source ../collection/setup.sh
-
-set -x
-
-ansible-test sanity --test use-compat-six --color --lint --failure-ok "${@}" > actual.txt
-
-diff -u "${TEST_DIR}/expected.txt" actual.txt
-diff -u do-not-check-me.py plugins/modules/check-me.py
diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/meta/runtime.yml b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/meta/runtime.yml
deleted file mode 100644
index 7c4b25dd..00000000
--- a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/meta/runtime.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-plugin_routing:
- modules:
- module:
- action_plugin: ns.col.action
diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/lookup/import_order_lookup.py b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/lookup/import_order_lookup.py
deleted file mode 100644
index 5a1f0ece..00000000
--- a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/lookup/import_order_lookup.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import annotations
-
-from ansible.plugins.lookup import LookupBase
-
-DOCUMENTATION = """
-name: import_order_lookup
-short_description: Import order lookup
-description: Import order lookup.
-"""
-
-
-class LookupModule(LookupBase):
- def run(self, terms, variables=None, **kwargs):
- return []
diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_1.py b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_1.py
deleted file mode 100644
index 1b23b490..00000000
--- a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_1.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/python
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-DOCUMENTATION = '''
-module: check_mode_attribute_1
-short_description: Test for check mode attribute 1
-description: Test for check mode attribute 1.
-author:
- - Ansible Core Team
-extends_documentation_fragment:
- - ansible.builtin.action_common_attributes
-attributes:
- check_mode:
- # doc says full support, code says none
- support: full
- diff_mode:
- support: none
- platform:
- platforms: all
-'''
-
-EXAMPLES = '''#'''
-RETURN = ''''''
-
-from ansible.module_utils.basic import AnsibleModule
-
-
-if __name__ == '__main__':
- module = AnsibleModule(argument_spec=dict(), supports_check_mode=False)
- module.exit_json()
diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_2.py b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_2.py
deleted file mode 100644
index 0687e9f0..00000000
--- a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_2.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/python
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-DOCUMENTATION = '''
-module: check_mode_attribute_2
-short_description: Test for check mode attribute 2
-description: Test for check mode attribute 2.
-author:
- - Ansible Core Team
-extends_documentation_fragment:
- - ansible.builtin.action_common_attributes
-attributes:
- check_mode:
- # doc says partial support, code says none
- support: partial
- details: Whatever this means.
- diff_mode:
- support: none
- platform:
- platforms: all
-'''
-
-EXAMPLES = '''#'''
-RETURN = ''''''
-
-from ansible.module_utils.basic import AnsibleModule
-
-
-if __name__ == '__main__':
- module = AnsibleModule(argument_spec=dict(), supports_check_mode=False)
- module.exit_json()
diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_3.py b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_3.py
deleted file mode 100644
index 61226e68..00000000
--- a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_3.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/python
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-DOCUMENTATION = '''
-module: check_mode_attribute_3
-short_description: Test for check mode attribute 3
-description: Test for check mode attribute 3.
-author:
- - Ansible Core Team
-extends_documentation_fragment:
- - ansible.builtin.action_common_attributes
-attributes:
- check_mode:
- # doc says no support, code says some
- support: none
- diff_mode:
- support: none
- platform:
- platforms: all
-'''
-
-EXAMPLES = '''#'''
-RETURN = ''''''
-
-from ansible.module_utils.basic import AnsibleModule
-
-
-if __name__ == '__main__':
- module = AnsibleModule(argument_spec=dict(), supports_check_mode=True)
- module.exit_json()
diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_4.py b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_4.py
deleted file mode 100644
index 1cb78137..00000000
--- a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_4.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/python
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-DOCUMENTATION = '''
-module: check_mode_attribute_4
-short_description: Test for check mode attribute 4
-description: Test for check mode attribute 4.
-author:
- - Ansible Core Team
-extends_documentation_fragment:
- - ansible.builtin.action_common_attributes
-attributes:
- check_mode:
- # documentation says some support, but no details
- support: partial
- diff_mode:
- support: none
- platform:
- platforms: all
-'''
-
-EXAMPLES = '''#'''
-RETURN = ''''''
-
-from ansible.module_utils.basic import AnsibleModule
-
-
-if __name__ == '__main__':
- module = AnsibleModule(argument_spec=dict(), supports_check_mode=True)
- module.exit_json()
diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_5.py b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_5.py
deleted file mode 100644
index a8d85562..00000000
--- a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_5.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/python
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-DOCUMENTATION = '''
-module: check_mode_attribute_5
-short_description: Test for check mode attribute 5
-description: Test for check mode attribute 5.
-author:
- - Ansible Core Team
-extends_documentation_fragment:
- - ansible.builtin.action_common_attributes
-attributes:
- check_mode:
- # Everything is correct: both docs and code claim no support
- support: none
- diff_mode:
- support: none
- platform:
- platforms: all
-'''
-
-EXAMPLES = '''#'''
-RETURN = ''''''
-
-from ansible.module_utils.basic import AnsibleModule
-
-
-if __name__ == '__main__':
- module = AnsibleModule(argument_spec=dict(), supports_check_mode=False)
- module.exit_json()
diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_6.py b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_6.py
deleted file mode 100644
index cd5a4fb1..00000000
--- a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_6.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/python
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-DOCUMENTATION = '''
-module: check_mode_attribute_6
-short_description: Test for check mode attribute 6
-description: Test for check mode attribute 6.
-author:
- - Ansible Core Team
-extends_documentation_fragment:
- - ansible.builtin.action_common_attributes
-attributes:
- check_mode:
- # Everything is correct: docs says partial support *with details*, code claims (at least some) support
- support: partial
- details: Some details.
- diff_mode:
- support: none
- platform:
- platforms: all
-'''
-
-EXAMPLES = '''#'''
-RETURN = ''''''
-
-from ansible.module_utils.basic import AnsibleModule
-
-
-if __name__ == '__main__':
- module = AnsibleModule(argument_spec=dict(), supports_check_mode=True)
- module.exit_json()
diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_7.py b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_7.py
deleted file mode 100644
index 73d976c2..00000000
--- a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_7.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/python
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-DOCUMENTATION = '''
-module: check_mode_attribute_7
-short_description: Test for check mode attribute 7
-description: Test for check mode attribute 7.
-author:
- - Ansible Core Team
-extends_documentation_fragment:
- - ansible.builtin.action_common_attributes
-attributes:
- check_mode:
- # Everything is correct: docs says full support, code claims (at least some) support
- support: full
- diff_mode:
- support: none
- platform:
- platforms: all
-'''
-
-EXAMPLES = '''#'''
-RETURN = ''''''
-
-from ansible.module_utils.basic import AnsibleModule
-
-
-if __name__ == '__main__':
- module = AnsibleModule(argument_spec=dict(), supports_check_mode=True)
- module.exit_json()
diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/import_order.py b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/import_order.py
deleted file mode 100644
index f4f3c9b8..00000000
--- a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/import_order.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/python
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-
-__metaclass__ = type
-
-from ansible.module_utils.basic import AnsibleModule
-
-DOCUMENTATION = '''
-module: import_order
-short_description: Import order test module
-description: Import order test module.
-author:
- - Ansible Core Team
-'''
-
-EXAMPLES = '''#'''
-RETURN = ''''''
-
-
-if __name__ == '__main__':
- module = AnsibleModule(argument_spec=dict())
- module.exit_json()
diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/semantic_markup.py b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/semantic_markup.py
deleted file mode 100644
index 587731d6..00000000
--- a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/semantic_markup.py
+++ /dev/null
@@ -1,127 +0,0 @@
-#!/usr/bin/python
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-
-__metaclass__ = type
-
-DOCUMENTATION = r'''
-module: semantic_markup
-short_description: Test semantic markup
-description:
- - Test semantic markup.
- - RV(does.not.exist=true).
-
-author:
- - Ansible Core Team
-
-options:
- foo:
- description:
- - Test.
- type: str
-
- a1:
- description:
- - O(foo)
- - O(foo=bar)
- - O(foo[1]=bar)
- - O(ignore:bar=baz)
- - O(ansible.builtin.copy#module:path=/)
- - V(foo)
- - V(bar(1\\2\)3)
- - V(C(foo\)).
- - E(env(var\))
- - RV(ansible.builtin.copy#module:backup)
- - RV(bar=baz)
- - RV(ignore:bam)
- - RV(ignore:bam.bar=baz)
- - RV(bar).
- - P(ansible.builtin.file#lookup)
- type: str
-
- a2:
- description: V(C\(foo\)).
- type: str
-
- a3:
- description: RV(bam).
- type: str
-
- a4:
- description: P(foo.bar#baz).
- type: str
-
- a5:
- description: P(foo.bar.baz).
- type: str
-
- a6:
- description: P(foo.bar.baz#woof).
- type: str
-
- a7:
- description: E(foo\(bar).
- type: str
-
- a8:
- description: O(bar).
- type: str
-
- a9:
- description: O(bar=bam).
- type: str
-
- a10:
- description: O(foo.bar=1).
- type: str
-
- a11:
- description: Something with suboptions.
- type: dict
- suboptions:
- b1:
- description:
- - V(C\(foo\)).
- - RV(bam).
- - P(foo.bar#baz).
- - P(foo.bar.baz).
- - P(foo.bar.baz#woof).
- - E(foo\(bar).
- - O(bar).
- - O(bar=bam).
- - O(foo.bar=1).
- type: str
-'''
-
-EXAMPLES = '''#'''
-
-RETURN = r'''
-bar:
- description: Bar.
- type: int
- returned: success
- sample: 5
-'''
-
-from ansible.module_utils.basic import AnsibleModule
-
-
-if __name__ == '__main__':
- module = AnsibleModule(argument_spec=dict(
- foo=dict(),
- a1=dict(),
- a2=dict(),
- a3=dict(),
- a4=dict(),
- a5=dict(),
- a6=dict(),
- a7=dict(),
- a8=dict(),
- a9=dict(),
- a10=dict(),
- a11=dict(type='dict', options=dict(
- b1=dict(),
- ))
- ))
- module.exit_json()
diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/sidecar.yaml b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/sidecar.yaml
index 4ca20efb..c2575422 100644
--- a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/sidecar.yaml
+++ b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/sidecar.yaml
@@ -17,9 +17,6 @@ DOCUMENTATION:
default: foo
author:
- Ansible Core Team
- seealso:
- - plugin: ns.col.import_order_lookup
- plugin_type: lookup
EXAMPLES: |
- name: example for sidecar
diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/failure/README.md b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/failure/README.md
index d158a987..bf1003fa 100644
--- a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/failure/README.md
+++ b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/failure/README.md
@@ -1,4 +1,3 @@
README
------
-
This is a simple collection used to test failures with ``ansible-test sanity --test validate-modules``.
diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/failure/meta/runtime.yml b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/failure/meta/runtime.yml
deleted file mode 100644
index 7c163fea..00000000
--- a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/failure/meta/runtime.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-plugin_routing:
- lookup:
- lookup:
- action_plugin: invalid
diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/ps_only/README.md b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/ps_only/README.md
index 9c1c1c34..bbdd5138 100644
--- a/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/ps_only/README.md
+++ b/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/ps_only/README.md
@@ -1,4 +1,3 @@
README
------
-
This is a simple PowerShell-only collection used to verify that ``ansible-test`` works on a collection.
diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/expected.txt b/test/integration/targets/ansible-test-sanity-validate-modules/expected.txt
index ca6e52a3..95f12f39 100644
--- a/test/integration/targets/ansible-test-sanity-validate-modules/expected.txt
+++ b/test/integration/targets/ansible-test-sanity-validate-modules/expected.txt
@@ -1,26 +1,5 @@
-plugins/lookup/import_order_lookup.py:5:0: import-before-documentation: Import found before documentation variables. All imports must appear below DOCUMENTATION/EXAMPLES/RETURN.
-plugins/modules/check_mode_attribute_1.py:0:0: attributes-check-mode: The module does not declare support for check mode, but the check_mode attribute's support value is 'full' and not 'none'
-plugins/modules/check_mode_attribute_2.py:0:0: attributes-check-mode: The module does not declare support for check mode, but the check_mode attribute's support value is 'partial' and not 'none'
-plugins/modules/check_mode_attribute_3.py:0:0: attributes-check-mode: The module does declare support for check mode, but the check_mode attribute's support value is 'none'
-plugins/modules/check_mode_attribute_4.py:0:0: attributes-check-mode-details: The module declares it does not fully support check mode, but has no details on what exactly that means
-plugins/modules/import_order.py:8:0: import-before-documentation: Import found before documentation variables. All imports must appear below DOCUMENTATION/EXAMPLES/RETURN.
plugins/modules/invalid_yaml_syntax.py:0:0: deprecation-mismatch: "meta/runtime.yml" and DOCUMENTATION.deprecation do not agree.
plugins/modules/invalid_yaml_syntax.py:0:0: missing-documentation: No DOCUMENTATION provided
plugins/modules/invalid_yaml_syntax.py:8:15: documentation-syntax-error: DOCUMENTATION is not valid YAML
plugins/modules/invalid_yaml_syntax.py:12:15: invalid-examples: EXAMPLES is not valid YAML
plugins/modules/invalid_yaml_syntax.py:16:15: return-syntax-error: RETURN is not valid YAML
-plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a11.suboptions.b1.description.0: While parsing "V(C\(" at index 1: Unnecessarily escaped "(" @ data['options']['a11']['suboptions']['b1']['description'][0]. Got 'V(C\\(foo\\)).'
-plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a11.suboptions.b1.description.2: While parsing "P(foo.bar#baz)" at index 1: Plugin name "foo.bar" is not a FQCN @ data['options']['a11']['suboptions']['b1']['description'][2]. Got 'P(foo.bar#baz).'
-plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a11.suboptions.b1.description.3: While parsing "P(foo.bar.baz)" at index 1: Parameter "foo.bar.baz" is not of the form FQCN#type @ data['options']['a11']['suboptions']['b1']['description'][3]. Got 'P(foo.bar.baz).'
-plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a11.suboptions.b1.description.4: Directive "P(foo.bar.baz#woof)" must contain a valid plugin type; found "woof" @ data['options']['a11']['suboptions']['b1']['description'][4]. Got 'P(foo.bar.baz#woof).'
-plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a11.suboptions.b1.description.5: While parsing "E(foo\(" at index 1: Unnecessarily escaped "(" @ data['options']['a11']['suboptions']['b1']['description'][5]. Got 'E(foo\\(bar).'
-plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a2.description: While parsing "V(C\(" at index 1: Unnecessarily escaped "(" for dictionary value @ data['options']['a2']['description']. Got 'V(C\\(foo\\)).'
-plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a4.description: While parsing "P(foo.bar#baz)" at index 1: Plugin name "foo.bar" is not a FQCN for dictionary value @ data['options']['a4']['description']. Got 'P(foo.bar#baz).'
-plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a5.description: While parsing "P(foo.bar.baz)" at index 1: Parameter "foo.bar.baz" is not of the form FQCN#type for dictionary value @ data['options']['a5']['description']. Got 'P(foo.bar.baz).'
-plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a6.description: Directive "P(foo.bar.baz#woof)" must contain a valid plugin type; found "woof" for dictionary value @ data['options']['a6']['description']. Got 'P(foo.bar.baz#woof).'
-plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a7.description: While parsing "E(foo\(" at index 1: Unnecessarily escaped "(" for dictionary value @ data['options']['a7']['description']. Got 'E(foo\\(bar).'
-plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: Directive "O(bar)" contains a non-existing option "bar"
-plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: Directive "O(bar=bam)" contains a non-existing option "bar"
-plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: Directive "O(foo.bar=1)" contains a non-existing option "foo.bar"
-plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: Directive "RV(bam)" contains a non-existing return value "bam"
-plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: Directive "RV(does.not.exist=true)" contains a non-existing return value "does.not.exist"
diff --git a/test/integration/targets/ansible-test-sanity-validate-modules/runme.sh b/test/integration/targets/ansible-test-sanity-validate-modules/runme.sh
index 5e2365ab..e0299969 100755
--- a/test/integration/targets/ansible-test-sanity-validate-modules/runme.sh
+++ b/test/integration/targets/ansible-test-sanity-validate-modules/runme.sh
@@ -6,17 +6,7 @@ set -eux
ansible-test sanity --test validate-modules --color --truncate 0 --failure-ok --lint "${@}" 1> actual-stdout.txt 2> actual-stderr.txt
diff -u "${TEST_DIR}/expected.txt" actual-stdout.txt
-grep -F -f "${TEST_DIR}/expected.txt" actual-stderr.txt
-
-cd ../col
-ansible-test sanity --test runtime-metadata
-
-cd ../failure
-if ansible-test sanity --test runtime-metadata 2>&1 | tee out.txt; then
- echo "runtime-metadata in failure should be invalid"
- exit 1
-fi
-grep out.txt -e 'extra keys not allowed'
+grep -f "${TEST_DIR}/expected.txt" actual-stderr.txt
cd ../ps_only
diff --git a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/README.md b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/README.md
index 67b8a83b..d8138d3b 100644
--- a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/README.md
+++ b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/README.md
@@ -1,4 +1,3 @@
README
------
-
This is a simple collection used to verify that ``ansible-test`` works on a collection.
diff --git a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/meta/runtime.yml b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/meta/runtime.yml
index 76ead137..fee22ad8 100644
--- a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/meta/runtime.yml
+++ b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/meta/runtime.yml
@@ -2,11 +2,4 @@ requires_ansible: '>=2.11' # force ansible-doc to check the Ansible version (re
plugin_routing:
modules:
hi:
- redirect: ns.col2.hello
- hiya:
- redirect: ns.col2.package.subdir.hiya
- module_utils:
- hi:
- redirect: ansible_collections.ns.col2.plugins.module_utils
- hello:
- redirect: ns.col2.hiya
+ redirect: hello
diff --git a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/plugin_utils/check_pylint.py b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/filter/check_pylint.py
index 1fe4dfad..f1be4f34 100644
--- a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/plugin_utils/check_pylint.py
+++ b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/filter/check_pylint.py
@@ -9,10 +9,15 @@ __metaclass__ = type
# syntax-error: Cannot import 'string' due to syntax error 'invalid syntax (&lt;unknown&gt;, line 109)'
# Python 3.9 fails with astroid 2.2.5 but works on 2.3.3
# syntax-error: Cannot import 'string' due to syntax error 'invalid syntax (&lt;unknown&gt;, line 104)'
-import string # pylint: disable=unused-import
+import string
# Python 3.9 fails with pylint 2.3.1 or 2.4.4 with astroid 2.3.3 but works with pylint 2.5.0 and astroid 2.4.0
# 'Call' object has no attribute 'value'
result = {None: None}[{}.get('something')]
+# pylint 2.3.1 and 2.4.4 report the following error but 2.5.0 and 2.6.0 do not
+# blacklisted-name: Black listed name "foo"
+# see: https://github.com/PyCQA/pylint/issues/3701
+# regression: documented as a known issue and removed from ignore.txt so pylint can be upgraded to 2.6.0
+# if future versions of pylint fix this issue then the ignore should be restored
foo = {}.keys()
diff --git a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/lookup/bad.py b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/lookup/bad.py
index 16e0bc88..580f9d87 100644
--- a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/lookup/bad.py
+++ b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/lookup/bad.py
@@ -19,9 +19,9 @@ EXAMPLES = '''
RETURN = ''' # '''
from ansible.plugins.lookup import LookupBase
-from ansible import constants # pylint: disable=unused-import
+from ansible import constants
-import lxml # pylint: disable=unused-import
+import lxml
class LookupModule(LookupBase):
diff --git a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/lookup/world.py b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/lookup/world.py
index 5cdd0966..dbb479a7 100644
--- a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/lookup/world.py
+++ b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/lookup/world.py
@@ -19,7 +19,7 @@ EXAMPLES = '''
RETURN = ''' # '''
from ansible.plugins.lookup import LookupBase
-from ansible import constants # pylint: disable=unused-import
+from ansible import constants
class LookupModule(LookupBase):
diff --git a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/modules/bad.py b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/modules/bad.py
index 8780e356..e79613bb 100644
--- a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/modules/bad.py
+++ b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/modules/bad.py
@@ -19,7 +19,7 @@ EXAMPLES = '''
RETURN = ''''''
from ansible.module_utils.basic import AnsibleModule
-from ansible import constants # intentionally trigger pylint ansible-bad-module-import error # pylint: disable=unused-import
+from ansible import constants # intentionally trigger pylint ansible-bad-module-import error
def main():
diff --git a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/random_directory/bad.py b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/random_directory/bad.py
index e34d1c37..2e35cf85 100644
--- a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/random_directory/bad.py
+++ b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/random_directory/bad.py
@@ -5,4 +5,4 @@ __metaclass__ = type
# This is not an allowed import, but since this file is in a plugins/ subdirectory that is not checked,
# the import sanity test will not complain.
-import lxml # pylint: disable=unused-import
+import lxml
diff --git a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py
index a5d896f7..82215438 100644
--- a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py
+++ b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py
@@ -4,12 +4,12 @@ __metaclass__ = type
import tempfile
try:
- import urllib2 # intentionally trigger pylint ansible-bad-import error # pylint: disable=unused-import
+ import urllib2 # intentionally trigger pylint ansible-bad-import error
except ImportError:
urllib2 = None
try:
- from urllib2 import Request # intentionally trigger pylint ansible-bad-import-from error # pylint: disable=unused-import
+ from urllib2 import Request # intentionally trigger pylint ansible-bad-import-from error
except ImportError:
Request = None
diff --git a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/tests/sanity/ignore.txt b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/tests/sanity/ignore.txt
index dcbe827c..e1b3f4ca 100644
--- a/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/tests/sanity/ignore.txt
+++ b/test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/tests/sanity/ignore.txt
@@ -1,7 +1,6 @@
plugins/modules/bad.py import
plugins/modules/bad.py pylint:ansible-bad-module-import
plugins/lookup/bad.py import
-plugins/plugin_utils/check_pylint.py pylint:disallowed-name
tests/integration/targets/hello/files/bad.py pylint:ansible-bad-function
tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import
tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import-from
diff --git a/test/integration/targets/ansible-test-sanity/runme.sh b/test/integration/targets/ansible-test-sanity/runme.sh
index 92584958..233db741 100755
--- a/test/integration/targets/ansible-test-sanity/runme.sh
+++ b/test/integration/targets/ansible-test-sanity/runme.sh
@@ -1,11 +1,5 @@
#!/usr/bin/env bash
-set -eux
-
-ansible-test sanity --color --allow-disabled -e "${@}"
-
-set +x
-
source ../collection/setup.sh
set -x
diff --git a/test/integration/targets/ansible-test-units-assertions/aliases b/test/integration/targets/ansible-test-units-assertions/aliases
deleted file mode 100644
index f25bc677..00000000
--- a/test/integration/targets/ansible-test-units-assertions/aliases
+++ /dev/null
@@ -1,4 +0,0 @@
-shippable/generic/group1 # runs in the default test container
-context/controller
-needs/target/collection
-needs/target/ansible-test
diff --git a/test/integration/targets/ansible-test-units-assertions/ansible_collections/ns/col/tests/unit/plugins/modules/test_assertion.py b/test/integration/targets/ansible-test-units-assertions/ansible_collections/ns/col/tests/unit/plugins/modules/test_assertion.py
deleted file mode 100644
index e1722004..00000000
--- a/test/integration/targets/ansible-test-units-assertions/ansible_collections/ns/col/tests/unit/plugins/modules/test_assertion.py
+++ /dev/null
@@ -1,6 +0,0 @@
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-
-def test_assertion():
- assert dict(yes=True) == dict(no=False)
diff --git a/test/integration/targets/ansible-test-units-assertions/runme.sh b/test/integration/targets/ansible-test-units-assertions/runme.sh
deleted file mode 100755
index 86fe5c81..00000000
--- a/test/integration/targets/ansible-test-units-assertions/runme.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env bash
-
-source ../collection/setup.sh
-
-set -x
-
-options=$("${TEST_DIR}"/../ansible-test/venv-pythons.py --only-versions)
-IFS=', ' read -r -a pythons <<< "${options}"
-
-for python in "${pythons[@]}"; do
- if ansible-test units --truncate 0 --python "${python}" --requirements "${@}" 2>&1 | tee pytest.log; then
- echo "Test did not fail as expected."
- exit 1
- fi
-
- if [ "${python}" = "2.7" ]; then
- grep "^E *AssertionError$" pytest.log
- else
-
- grep "^E *AssertionError: assert {'yes': True} == {'no': False}$" pytest.log
- fi
-done
diff --git a/test/integration/targets/ansible-test-units-forked/aliases b/test/integration/targets/ansible-test-units-forked/aliases
deleted file mode 100644
index 79d7dbd7..00000000
--- a/test/integration/targets/ansible-test-units-forked/aliases
+++ /dev/null
@@ -1,5 +0,0 @@
-shippable/posix/group3 # runs in the distro test containers
-shippable/generic/group1 # runs in the default test container
-context/controller
-needs/target/collection
-needs/target/ansible-test
diff --git a/test/integration/targets/ansible-test-units-forked/ansible_collections/ns/col/tests/unit/plugins/modules/test_ansible_forked.py b/test/integration/targets/ansible-test-units-forked/ansible_collections/ns/col/tests/unit/plugins/modules/test_ansible_forked.py
deleted file mode 100644
index 828099c6..00000000
--- a/test/integration/targets/ansible-test-units-forked/ansible_collections/ns/col/tests/unit/plugins/modules/test_ansible_forked.py
+++ /dev/null
@@ -1,43 +0,0 @@
-"""Unit tests to verify the functionality of the ansible-forked pytest plugin."""
-from __future__ import absolute_import, division, print_function
-
-__metaclass__ = type
-
-import os
-import pytest
-import signal
-import sys
-import warnings
-
-
-warnings.warn("This verifies that warnings generated during test collection are reported.")
-
-
-@pytest.mark.xfail
-def test_kill_xfail():
- os.kill(os.getpid(), signal.SIGKILL) # causes pytest to report stdout and stderr
-
-
-def test_kill():
- os.kill(os.getpid(), signal.SIGKILL) # causes pytest to report stdout and stderr
-
-
-@pytest.mark.xfail
-def test_exception_xfail():
- sys.stdout.write("This stdout message should be hidden due to xfail.")
- sys.stderr.write("This stderr message should be hidden due to xfail.")
- raise Exception("This error is expected, but should be hidden due to xfail.")
-
-
-def test_exception():
- sys.stdout.write("This stdout message should be reported since we're throwing an exception.")
- sys.stderr.write("This stderr message should be reported since we're throwing an exception.")
- raise Exception("This error is expected and should be visible.")
-
-
-def test_warning():
- warnings.warn("This verifies that warnings generated at test time are reported.")
-
-
-def test_passed():
- pass
diff --git a/test/integration/targets/ansible-test-units-forked/runme.sh b/test/integration/targets/ansible-test-units-forked/runme.sh
deleted file mode 100755
index c39f3c49..00000000
--- a/test/integration/targets/ansible-test-units-forked/runme.sh
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env bash
-
-source ../collection/setup.sh
-
-set -x
-
-options=$("${TEST_DIR}"/../ansible-test/venv-pythons.py --only-versions)
-IFS=', ' read -r -a pythons <<< "${options}"
-
-for python in "${pythons[@]}"; do
- echo "*** Checking Python ${python} ***"
-
- if ansible-test units --truncate 0 --target-python "venv/${python}" "${@}" > output.log 2>&1 ; then
- cat output.log
- echo "Unit tests on Python ${python} did not fail as expected. See output above."
- exit 1
- fi
-
- cat output.log
- echo "Unit tests on Python ${python} failed as expected. See output above. Checking for expected output ..."
-
- # Verify that the appropriate tests pased, failed or xfailed.
- grep 'PASSED tests/unit/plugins/modules/test_ansible_forked.py::test_passed' output.log
- grep 'PASSED tests/unit/plugins/modules/test_ansible_forked.py::test_warning' output.log
- grep 'XFAIL tests/unit/plugins/modules/test_ansible_forked.py::test_kill_xfail' output.log
- grep 'FAILED tests/unit/plugins/modules/test_ansible_forked.py::test_kill' output.log
- grep 'FAILED tests/unit/plugins/modules/test_ansible_forked.py::test_exception' output.log
- grep 'XFAIL tests/unit/plugins/modules/test_ansible_forked.py::test_exception_xfail' output.log
-
- # Verify that warnings are properly surfaced.
- grep 'UserWarning: This verifies that warnings generated at test time are reported.' output.log
- grep 'UserWarning: This verifies that warnings generated during test collection are reported.' output.log
-
- # Verify there are no unexpected warnings.
- grep 'Warning' output.log | grep -v 'UserWarning: This verifies that warnings generated ' && exit 1
-
- # Verify that details from failed tests are properly surfaced.
- grep "^Test CRASHED with exit code -9.$" output.log
- grep "^This stdout message should be reported since we're throwing an exception.$" output.log
- grep "^This stderr message should be reported since we're throwing an exception.$" output.log
- grep '^> *raise Exception("This error is expected and should be visible.")$' output.log
- grep "^E *Exception: This error is expected and should be visible.$" output.log
-
- echo "*** Done Checking Python ${python} ***"
-done
diff --git a/test/integration/targets/ansible-test/venv-pythons.py b/test/integration/targets/ansible-test/venv-pythons.py
index 97998bcd..b380f147 100755
--- a/test/integration/targets/ansible-test/venv-pythons.py
+++ b/test/integration/targets/ansible-test/venv-pythons.py
@@ -1,7 +1,6 @@
#!/usr/bin/env python
"""Return target Python options for use with ansible-test."""
-import argparse
import os
import shutil
import subprocess
@@ -11,11 +10,6 @@ from ansible import release
def main():
- parser = argparse.ArgumentParser()
- parser.add_argument('--only-versions', action='store_true')
-
- options = parser.parse_args()
-
ansible_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(release.__file__))))
source_root = os.path.join(ansible_root, 'test', 'lib')
@@ -39,10 +33,6 @@ def main():
print(f'{executable} - {"fail" if process.returncode else "pass"}', file=sys.stderr)
if not process.returncode:
- if options.only_versions:
- args.append(python_version)
- continue
-
args.extend(['--target-python', f'venv/{python_version}'])
print(' '.join(args))
diff --git a/test/integration/targets/ansible-vault/invalid_format/broken-group-vars-tasks.yml b/test/integration/targets/ansible-vault/invalid_format/broken-group-vars-tasks.yml
index 2365d47d..71dbacc0 100644
--- a/test/integration/targets/ansible-vault/invalid_format/broken-group-vars-tasks.yml
+++ b/test/integration/targets/ansible-vault/invalid_format/broken-group-vars-tasks.yml
@@ -20,4 +20,4 @@
# 3366323866663763660a323766383531396433663861656532373663373134376263383263316261
# 3137
-# $ ansible-playbook -i inventory --vault-password-file=vault-secret tasks.yml
+# $ ansible-playbook -i inventory --vault-password-file=vault-secret tasks.yml
diff --git a/test/integration/targets/ansible-vault/runme.sh b/test/integration/targets/ansible-vault/runme.sh
index 98399eca..50720ea9 100755
--- a/test/integration/targets/ansible-vault/runme.sh
+++ b/test/integration/targets/ansible-vault/runme.sh
@@ -47,18 +47,6 @@ echo $?
# view the vault encrypted password file
ansible-vault view "$@" --vault-id vault-password encrypted-vault-password
-# check if ansible-vault fails when destination is not writable
-NOT_WRITABLE_DIR="${MYTMPDIR}/not_writable"
-TEST_FILE_EDIT4="${NOT_WRITABLE_DIR}/testfile"
-mkdir "${NOT_WRITABLE_DIR}"
-touch "${TEST_FILE_EDIT4}"
-chmod ugo-w "${NOT_WRITABLE_DIR}"
-ansible-vault encrypt "$@" --vault-password-file vault-password "${TEST_FILE_EDIT4}" < /dev/null > log 2>&1 && :
-grep "not writable" log && :
-WRONG_RC=$?
-echo "rc was $WRONG_RC (1 is expected)"
-[ $WRONG_RC -eq 1 ]
-
# encrypt with a password from a vault encrypted password file and multiple vault-ids
# should fail because we dont know which vault id to use to encrypt with
ansible-vault encrypt "$@" --vault-id vault-password --vault-id encrypted-vault-password "${TEST_FILE_ENC_PASSWORD}" && :
@@ -586,23 +574,3 @@ ansible-playbook realpath.yml "$@" --vault-password-file symlink/get-password-sy
# using symlink
ansible-playbook symlink.yml "$@" --vault-password-file script/vault-secret.sh 2>&1 |grep "${ER}"
-
-### SALT TESTING ###
-# prep files for encryption
-for salted in test1 test2 test3
-do
- echo 'this is salty' > "salted_${salted}"
-done
-
-# encrypt files
-ANSIBLE_VAULT_ENCRYPT_SALT=salty ansible-vault encrypt salted_test1 --vault-password-file example1_password "$@"
-ANSIBLE_VAULT_ENCRYPT_SALT=salty ansible-vault encrypt salted_test2 --vault-password-file example1_password "$@"
-ansible-vault encrypt salted_test3 --vault-password-file example1_password "$@"
-
-# should be the same
-out=$(diff salted_test1 salted_test2)
-[ "${out}" == "" ]
-
-# shoudl be diff
-out=$(diff salted_test1 salted_test3 || true)
-[ "${out}" != "" ]
diff --git a/test/integration/targets/ansible-vault/test_vault.yml b/test/integration/targets/ansible-vault/test_vault.yml
index c21d49a6..7f8ed115 100644
--- a/test/integration/targets/ansible-vault/test_vault.yml
+++ b/test/integration/targets/ansible-vault/test_vault.yml
@@ -1,6 +1,6 @@
- hosts: testhost
gather_facts: False
vars:
- output_dir: .
+ - output_dir: .
roles:
- { role: test_vault, tags: test_vault}
diff --git a/test/integration/targets/ansible-vault/test_vaulted_template.yml b/test/integration/targets/ansible-vault/test_vaulted_template.yml
index 6a16ec86..b495211d 100644
--- a/test/integration/targets/ansible-vault/test_vaulted_template.yml
+++ b/test/integration/targets/ansible-vault/test_vaulted_template.yml
@@ -1,6 +1,6 @@
- hosts: testhost
gather_facts: False
vars:
- output_dir: .
+ - output_dir: .
roles:
- { role: test_vaulted_template, tags: test_vaulted_template}
diff --git a/test/integration/targets/ansible/aliases b/test/integration/targets/ansible/aliases
index c7f2050a..8278ec8b 100644
--- a/test/integration/targets/ansible/aliases
+++ b/test/integration/targets/ansible/aliases
@@ -1,3 +1,2 @@
shippable/posix/group3
context/controller
-needs/target/support-callback_plugins
diff --git a/test/integration/targets/ansible/ansible-testé.cfg b/test/integration/targets/ansible/ansible-testé.cfg
index a0e4e8d7..09af947f 100644
--- a/test/integration/targets/ansible/ansible-testé.cfg
+++ b/test/integration/targets/ansible/ansible-testé.cfg
@@ -1,3 +1,3 @@
[defaults]
remote_user = admin
-collections_path = /tmp/collections
+collections_paths = /tmp/collections
diff --git a/test/integration/targets/support-callback_plugins/callback_plugins/callback_debug.py b/test/integration/targets/ansible/callback_plugins/callback_debug.py
index 2462c1ff..2462c1ff 100644
--- a/test/integration/targets/support-callback_plugins/callback_plugins/callback_debug.py
+++ b/test/integration/targets/ansible/callback_plugins/callback_debug.py
diff --git a/test/integration/targets/ansible/runme.sh b/test/integration/targets/ansible/runme.sh
index d6780214..e9e72a9f 100755
--- a/test/integration/targets/ansible/runme.sh
+++ b/test/integration/targets/ansible/runme.sh
@@ -14,9 +14,9 @@ ANSIBLE_REMOTE_USER=administrator ansible-config dump| grep 'DEFAULT_REMOTE_USER
ansible-config list | grep 'DEFAULT_REMOTE_USER'
# Collection
-ansible-config view -c ./ansible-testé.cfg | grep 'collections_path = /tmp/collections'
+ansible-config view -c ./ansible-testé.cfg | grep 'collections_paths = /tmp/collections'
ansible-config dump -c ./ansible-testé.cfg | grep 'COLLECTIONS_PATHS([^)]*) ='
-ANSIBLE_COLLECTIONS_PATH=/tmp/collections ansible-config dump| grep 'COLLECTIONS_PATHS([^)]*) ='
+ANSIBLE_COLLECTIONS_PATHS=/tmp/collections ansible-config dump| grep 'COLLECTIONS_PATHS([^)]*) ='
ansible-config list | grep 'COLLECTIONS_PATHS'
# 'view' command must fail when config file is missing or has an invalid file extension
@@ -34,7 +34,7 @@ ansible localhost -m debug -a var=playbook_dir --playbook-dir=/doesnotexist/tmp
env -u ANSIBLE_PLAYBOOK_DIR ANSIBLE_CONFIG=./playbookdir_cfg.ini ansible localhost -m debug -a var=playbook_dir | grep '"playbook_dir": "/doesnotexist/tmp"'
# test adhoc callback triggers
-ANSIBLE_CALLBACK_PLUGINS=../support-callback_plugins/callback_plugins ANSIBLE_STDOUT_CALLBACK=callback_debug ANSIBLE_LOAD_CALLBACK_PLUGINS=1 ansible --playbook-dir . testhost -i ../../inventory -m ping | grep -E '^v2_' | diff -u adhoc-callback.stdout -
+ANSIBLE_STDOUT_CALLBACK=callback_debug ANSIBLE_LOAD_CALLBACK_PLUGINS=1 ansible --playbook-dir . testhost -i ../../inventory -m ping | grep -E '^v2_' | diff -u adhoc-callback.stdout -
# CB_WANTS_IMPLICIT isn't anything in Ansible itself.
# Our test cb plugin just accepts it. It lets us avoid copypasting the whole
diff --git a/test/integration/targets/apt/aliases b/test/integration/targets/apt/aliases
index 20c87093..5f892f9c 100644
--- a/test/integration/targets/apt/aliases
+++ b/test/integration/targets/apt/aliases
@@ -1,5 +1,6 @@
shippable/posix/group2
destructive
skip/freebsd
+skip/osx
skip/macos
skip/rhel
diff --git a/test/integration/targets/apt/tasks/apt.yml b/test/integration/targets/apt/tasks/apt.yml
index a0bc1992..d273eda7 100644
--- a/test/integration/targets/apt/tasks/apt.yml
+++ b/test/integration/targets/apt/tasks/apt.yml
@@ -372,7 +372,7 @@
- libcaca-dev
- libslang2-dev
-# # https://github.com/ansible/ansible/issues/38995
+# https://github.com/ansible/ansible/issues/38995
- name: build-dep for a package
apt:
name: tree
@@ -524,55 +524,6 @@
- "allow_change_held_packages_no_update is not changed"
- "allow_change_held_packages_hello_version.stdout == allow_change_held_packages_hello_version_again.stdout"
-# Remove pkg on hold
-- name: Put hello on hold
- shell: apt-mark hold hello
-
-- name: Get hold list
- shell: apt-mark showhold
- register: hello_hold
-
-- name: Check that the package hello is on the hold list
- assert:
- that:
- - "'hello' in hello_hold.stdout"
-
-- name: Try removing package hello
- apt:
- name: hello
- state: absent
- register: package_removed
- ignore_errors: true
-
-- name: verify the package is not removed with dpkg
- shell: dpkg -l hello
- register: dpkg_result
-
-- name: Verify that package was not removed
- assert:
- that:
- - package_removed is failed
- - dpkg_result is success
-
-- name: Try removing package (allow_change_held_packages=yes)
- apt:
- name: hello
- state: absent
- allow_change_held_packages: yes
- register: package_removed
-
-- name: verify the package is removed with dpkg
- shell: dpkg -l hello
- register: dpkg_result
- ignore_errors: true
-
-- name: Verify that package removal was succesfull
- assert:
- that:
- - package_removed is success
- - dpkg_result is failed
- - package_removed.changed
-
# Virtual package
- name: Install a virtual package
apt:
diff --git a/test/integration/targets/apt/tasks/repo.yml b/test/integration/targets/apt/tasks/repo.yml
index b1d08afa..d4cce78a 100644
--- a/test/integration/targets/apt/tasks/repo.yml
+++ b/test/integration/targets/apt/tasks/repo.yml
@@ -400,8 +400,6 @@
- { upgrade_type: safe, force_apt_get: True }
- { upgrade_type: full, force_apt_get: True }
- - include_tasks: "upgrade_scenarios.yml"
-
- name: Remove aptitude if not originally present
apt:
pkg: aptitude
diff --git a/test/integration/targets/apt/tasks/upgrade_scenarios.yml b/test/integration/targets/apt/tasks/upgrade_scenarios.yml
deleted file mode 100644
index a8bf76b3..00000000
--- a/test/integration/targets/apt/tasks/upgrade_scenarios.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-# See https://github.com/ansible/ansible/issues/77868
-# fail_on_autoremove is not valid parameter for aptitude
-- name: Use fail_on_autoremove using aptitude
- apt:
- upgrade: yes
- fail_on_autoremove: yes
- register: fail_on_autoremove_result
-
-- name: Check if fail_on_autoremove does not fail with aptitude
- assert:
- that:
- - not fail_on_autoremove_result.failed
-
-# See https://github.com/ansible/ansible/issues/77868
-# allow_downgrade is not valid parameter for aptitude
-- name: Use allow_downgrade using aptitude
- apt:
- upgrade: yes
- allow_downgrade: yes
- register: allow_downgrade_result
-
-- name: Check if allow_downgrade does not fail with aptitude
- assert:
- that:
- - not allow_downgrade_result.failed
diff --git a/test/integration/targets/apt_key/aliases b/test/integration/targets/apt_key/aliases
index 97f534a8..a820ec90 100644
--- a/test/integration/targets/apt_key/aliases
+++ b/test/integration/targets/apt_key/aliases
@@ -1,4 +1,5 @@
shippable/posix/group1
skip/freebsd
+skip/osx
skip/macos
skip/rhel
diff --git a/test/integration/targets/apt_key/tasks/main.yml b/test/integration/targets/apt_key/tasks/main.yml
index 7aee56a7..ffb89b22 100644
--- a/test/integration/targets/apt_key/tasks/main.yml
+++ b/test/integration/targets/apt_key/tasks/main.yml
@@ -21,7 +21,7 @@
- import_tasks: 'apt_key_inline_data.yml'
when: ansible_distribution in ('Ubuntu', 'Debian')
-
+
- import_tasks: 'file.yml'
when: ansible_distribution in ('Ubuntu', 'Debian')
diff --git a/test/integration/targets/apt_repository/aliases b/test/integration/targets/apt_repository/aliases
index b4fe8dba..34e2b540 100644
--- a/test/integration/targets/apt_repository/aliases
+++ b/test/integration/targets/apt_repository/aliases
@@ -1,5 +1,6 @@
destructive
shippable/posix/group1
skip/freebsd
+skip/osx
skip/macos
skip/rhel
diff --git a/test/integration/targets/apt_repository/tasks/apt.yml b/test/integration/targets/apt_repository/tasks/apt.yml
index 2ddf4140..9c15e647 100644
--- a/test/integration/targets/apt_repository/tasks/apt.yml
+++ b/test/integration/targets/apt_repository/tasks/apt.yml
@@ -152,11 +152,6 @@
- 'result.changed'
- 'result.state == "present"'
- 'result.repo == test_ppa_spec'
- - '"sources_added" in result'
- - 'result.sources_added | length == 1'
- - '"git" in result.sources_added[0]'
- - '"sources_removed" in result'
- - 'result.sources_removed | length == 0'
- result_cache is not changed
- name: 'examine apt cache mtime'
@@ -172,17 +167,6 @@
apt_repository: repo='{{test_ppa_spec}}' state=absent
register: result
-- assert:
- that:
- - 'result.changed'
- - 'result.state == "absent"'
- - 'result.repo == test_ppa_spec'
- - '"sources_added" in result'
- - 'result.sources_added | length == 0'
- - '"sources_removed" in result'
- - 'result.sources_removed | length == 1'
- - '"git" in result.sources_removed[0]'
-
# When installing a repo with the spec, the key is *NOT* added
- name: 'ensure ppa key is absent (expect: pass)'
apt_key: id='{{test_ppa_key}}' state=absent
@@ -240,7 +224,7 @@
- assert:
that:
- result is failed
- - result.msg.startswith("argument 'repo' is of type <class 'NoneType'> and we were unable to convert to str")
+ - result.msg == 'Please set argument \'repo\' to a non-empty value'
- name: Test apt_repository with an empty value for repo
apt_repository:
diff --git a/test/integration/targets/apt_repository/tasks/mode_cleanup.yaml b/test/integration/targets/apt_repository/tasks/mode_cleanup.yaml
index 62960ccd..726de111 100644
--- a/test/integration/targets/apt_repository/tasks/mode_cleanup.yaml
+++ b/test/integration/targets/apt_repository/tasks/mode_cleanup.yaml
@@ -4,4 +4,4 @@
- name: Delete existing repo
file:
path: "{{ test_repo_path }}"
- state: absent
+ state: absent \ No newline at end of file
diff --git a/test/integration/targets/argspec/library/argspec.py b/test/integration/targets/argspec/library/argspec.py
index 2d86d77b..b6d6d110 100644
--- a/test/integration/targets/argspec/library/argspec.py
+++ b/test/integration/targets/argspec/library/argspec.py
@@ -23,10 +23,6 @@ def main():
'type': 'str',
'choices': ['absent', 'present'],
},
- 'default_value': {
- 'type': 'bool',
- 'default': True,
- },
'path': {},
'content': {},
'mapping': {
@@ -250,7 +246,7 @@ def main():
('state', 'present', ('path', 'content'), True),
),
mutually_exclusive=(
- ('path', 'content', 'default_value',),
+ ('path', 'content'),
),
required_one_of=(
('required_one_of_one', 'required_one_of_two'),
diff --git a/test/integration/targets/become/tasks/main.yml b/test/integration/targets/become/tasks/main.yml
index c05824d7..4a2ce64b 100644
--- a/test/integration/targets/become/tasks/main.yml
+++ b/test/integration/targets/become/tasks/main.yml
@@ -11,8 +11,8 @@
ansible_become_user: "{{ become_test_config.user }}"
ansible_become_method: "{{ become_test_config.method }}"
ansible_become_password: "{{ become_test_config.password | default(None) }}"
- loop: "{{
- (become_methods | selectattr('skip', 'undefined') | list)+
+ loop: "{{
+ (become_methods | selectattr('skip', 'undefined') | list)+
(become_methods | selectattr('skip', 'defined') | rejectattr('skip') | list)
}}"
loop_control:
diff --git a/test/integration/targets/blockinfile/tasks/append_newline.yml b/test/integration/targets/blockinfile/tasks/append_newline.yml
deleted file mode 100644
index ae3aef81..00000000
--- a/test/integration/targets/blockinfile/tasks/append_newline.yml
+++ /dev/null
@@ -1,119 +0,0 @@
-- name: Create append_newline test file
- copy:
- dest: "{{ remote_tmp_dir_test }}/append_newline.txt"
- content: |
- line1
- line2
- line3
-
-- name: add content to file appending a new line
- blockinfile:
- path: "{{ remote_tmp_dir_test }}/append_newline.txt"
- append_newline: true
- insertafter: "line1"
- block: |
- line1.5
- register: insert_appending_a_new_line
-
-- name: add content to file appending a new line (again)
- blockinfile:
- path: "{{ remote_tmp_dir_test }}/append_newline.txt"
- append_newline: true
- insertafter: "line1"
- block: |
- line1.5
- register: insert_appending_a_new_line_again
-
-- name: get file content after adding content appending a new line
- stat:
- path: "{{ remote_tmp_dir_test }}/append_newline.txt"
- register: appended_a_new_line
-
-- name: check content is the expected one after inserting content appending a new line
- assert:
- that:
- - insert_appending_a_new_line is changed
- - insert_appending_a_new_line_again is not changed
- - appended_a_new_line.stat.checksum == "525ffd613a0b0eb6675e506226dc2adedf621f34"
-
-- name: add content to file without appending a new line
- blockinfile:
- path: "{{ remote_tmp_dir_test }}/append_newline.txt"
- marker: "#{mark} UNWRAPPED TEXT"
- insertafter: "line2"
- block: |
- line2.5
- register: insert_without_appending_new_line
-
-- name: get file content after adding content without appending a new line
- stat:
- path: "{{ remote_tmp_dir_test }}/append_newline.txt"
- register: without_appending_new_line
-
-- name: check content is the expected one after inserting without appending a new line
- assert:
- that:
- - insert_without_appending_new_line is changed
- - without_appending_new_line.stat.checksum == "d5f5ed1428af50b5484a5184dc7e1afda1736646"
-
-- name: append a new line to existing block
- blockinfile:
- path: "{{ remote_tmp_dir_test }}/append_newline.txt"
- append_newline: true
- marker: "#{mark} UNWRAPPED TEXT"
- insertafter: "line2"
- block: |
- line2.5
- register: append_new_line_to_existing_block
-
-- name: get file content after appending a line to existing block
- stat:
- path: "{{ remote_tmp_dir_test }}/append_newline.txt"
- register: new_line_appended
-
-- name: check content is the expected one after appending a new line to an existing block
- assert:
- that:
- - append_new_line_to_existing_block is changed
- - new_line_appended.stat.checksum == "b09dd16be73a0077027d5a324294db8a75a7b0f9"
-
-- name: add a block appending a new line at the end of the file
- blockinfile:
- path: "{{ remote_tmp_dir_test }}/append_newline.txt"
- append_newline: true
- marker: "#{mark} END OF FILE TEXT"
- insertafter: "line3"
- block: |
- line3.5
- register: insert_appending_new_line_at_the_end_of_file
-
-- name: get file content after appending new line at the end of the file
- stat:
- path: "{{ remote_tmp_dir_test }}/append_newline.txt"
- register: inserted_block_appending_new_line_at_the_end_of_the_file
-
-- name: check content is the expected one after adding a block appending a new line at the end of the file
- assert:
- that:
- - insert_appending_new_line_at_the_end_of_file is changed
- - inserted_block_appending_new_line_at_the_end_of_the_file.stat.checksum == "9b90722b84d9bdda1be781cc4bd44d8979887691"
-
-
-- name: Removing a block with append_newline set to true does not append another line
- blockinfile:
- path: "{{ remote_tmp_dir_test }}/append_newline.txt"
- append_newline: true
- marker: "#{mark} UNWRAPPED TEXT"
- state: absent
- register: remove_block_appending_new_line
-
-- name: get file content after removing existing block appending new line
- stat:
- path: "{{ remote_tmp_dir_test }}/append_newline.txt"
- register: removed_block_appending_new_line
-
-- name: check content is the expected one after removing a block appending a new line
- assert:
- that:
- - remove_block_appending_new_line is changed
- - removed_block_appending_new_line.stat.checksum == "9a40d4c0969255cd6147537b38309d69a9b10049"
diff --git a/test/integration/targets/blockinfile/tasks/create_dir.yml b/test/integration/targets/blockinfile/tasks/create_dir.yml
deleted file mode 100644
index a16ada5e..00000000
--- a/test/integration/targets/blockinfile/tasks/create_dir.yml
+++ /dev/null
@@ -1,29 +0,0 @@
-- name: Set up a directory to test module error handling
- file:
- path: "{{ remote_tmp_dir_test }}/unreadable"
- state: directory
- mode: "000"
-
-- name: Create a directory and file with blockinfile
- blockinfile:
- path: "{{ remote_tmp_dir_test }}/unreadable/createme/file.txt"
- block: |
- line 1
- line 2
- state: present
- create: yes
- register: permissions_error
- ignore_errors: yes
-
-- name: assert the error looks right
- assert:
- that:
- - permissions_error.msg.startswith('Error creating')
- when: "ansible_user_id != 'root'"
-
-- name: otherwise (root) assert the directory and file exists
- stat:
- path: "{{ remote_tmp_dir_test }}/unreadable/createme/file.txt"
- register: path_created
- failed_when: path_created.exists is false
- when: "ansible_user_id == 'root'"
diff --git a/test/integration/targets/blockinfile/tasks/main.yml b/test/integration/targets/blockinfile/tasks/main.yml
index f26cb165..054e5549 100644
--- a/test/integration/targets/blockinfile/tasks/main.yml
+++ b/test/integration/targets/blockinfile/tasks/main.yml
@@ -31,7 +31,6 @@
- import_tasks: add_block_to_existing_file.yml
- import_tasks: create_file.yml
-- import_tasks: create_dir.yml
- import_tasks: preserve_line_endings.yml
- import_tasks: block_without_trailing_newline.yml
- import_tasks: file_without_trailing_newline.yml
@@ -40,5 +39,3 @@
- import_tasks: insertafter.yml
- import_tasks: insertbefore.yml
- import_tasks: multiline_search.yml
-- import_tasks: append_newline.yml
-- import_tasks: prepend_newline.yml
diff --git a/test/integration/targets/blockinfile/tasks/prepend_newline.yml b/test/integration/targets/blockinfile/tasks/prepend_newline.yml
deleted file mode 100644
index 535db017..00000000
--- a/test/integration/targets/blockinfile/tasks/prepend_newline.yml
+++ /dev/null
@@ -1,119 +0,0 @@
-- name: Create prepend_newline test file
- copy:
- dest: "{{ remote_tmp_dir_test }}/prepend_newline.txt"
- content: |
- line1
- line2
- line3
-
-- name: add content to file prepending a new line at the beginning of the file
- blockinfile:
- path: "{{ remote_tmp_dir_test }}/prepend_newline.txt"
- prepend_newline: true
- insertbefore: "line1"
- block: |
- line0.5
- register: insert_prepending_a_new_line_at_the_beginning_of_the_file
-
-- name: get file content after adding content prepending a new line at the beginning of the file
- stat:
- path: "{{ remote_tmp_dir_test }}/prepend_newline.txt"
- register: prepended_a_new_line_at_the_beginning_of_the_file
-
-- name: check content is the expected one after prepending a new line at the beginning of the file
- assert:
- that:
- - insert_prepending_a_new_line_at_the_beginning_of_the_file is changed
- - prepended_a_new_line_at_the_beginning_of_the_file.stat.checksum == "bfd32c880bbfadd1983c67836c46bf8ed9d50343"
-
-- name: add content to file prepending a new line
- blockinfile:
- path: "{{ remote_tmp_dir_test }}/prepend_newline.txt"
- prepend_newline: true
- marker: "#{mark} WRAPPED TEXT"
- insertafter: "line1"
- block: |
- line1.5
- register: insert_prepending_a_new_line
-
-- name: add content to file prepending a new line (again)
- blockinfile:
- path: "{{ remote_tmp_dir_test }}/prepend_newline.txt"
- prepend_newline: true
- marker: "#{mark} WRAPPED TEXT"
- insertafter: "line1"
- block: |
- line1.5
- register: insert_prepending_a_new_line_again
-
-- name: get file content after adding content prepending a new line
- stat:
- path: "{{ remote_tmp_dir_test }}/prepend_newline.txt"
- register: prepended_a_new_line
-
-- name: check content is the expected one after inserting content prepending a new line
- assert:
- that:
- - insert_prepending_a_new_line is changed
- - insert_prepending_a_new_line_again is not changed
- - prepended_a_new_line.stat.checksum == "d5b8b42690f4a38b9a040adc3240a6f81ad5f8ee"
-
-- name: add content to file without prepending a new line
- blockinfile:
- path: "{{ remote_tmp_dir_test }}/prepend_newline.txt"
- marker: "#{mark} UNWRAPPED TEXT"
- insertafter: "line3"
- block: |
- line3.5
- register: insert_without_prepending_new_line
-
-- name: get file content after adding content without prepending a new line
- stat:
- path: "{{ remote_tmp_dir_test }}/prepend_newline.txt"
- register: without_prepending_new_line
-
-- name: check content is the expected one after inserting without prepending a new line
- assert:
- that:
- - insert_without_prepending_new_line is changed
- - without_prepending_new_line.stat.checksum == "ad06200e7ee5b22b7eff4c57075b42d038eaffb6"
-
-- name: prepend a new line to existing block
- blockinfile:
- path: "{{ remote_tmp_dir_test }}/prepend_newline.txt"
- prepend_newline: true
- marker: "#{mark} UNWRAPPED TEXT"
- insertafter: "line3"
- block: |
- line3.5
- register: prepend_new_line_to_existing_block
-
-- name: get file content after prepending a new line to an existing block
- stat:
- path: "{{ remote_tmp_dir_test }}/prepend_newline.txt"
- register: new_line_prepended
-
-- name: check content is the expected one after prepending a new line to an existing block
- assert:
- that:
- - prepend_new_line_to_existing_block is changed
- - new_line_prepended.stat.checksum == "f2dd48160fb3c7c8e02d292666a1a3f08503f6bf"
-
-- name: Removing a block with prepend_newline set to true does not prepend another line
- blockinfile:
- path: "{{ remote_tmp_dir_test }}/prepend_newline.txt"
- prepend_newline: true
- marker: "#{mark} UNWRAPPED TEXT"
- state: absent
- register: remove_block_prepending_new_line
-
-- name: get file content after removing existing block prepending new line
- stat:
- path: "{{ remote_tmp_dir_test }}/prepend_newline.txt"
- register: removed_block_prepending_new_line
-
-- name: check content is the expected one after removing a block prepending a new line
- assert:
- that:
- - remove_block_prepending_new_line is changed
- - removed_block_prepending_new_line.stat.checksum == "c97c3da7d607acfd5d786fbb81f3d93d867c914a" \ No newline at end of file
diff --git a/test/integration/targets/blocks/unsafe_failed_task.yml b/test/integration/targets/blocks/unsafe_failed_task.yml
index e74327b9..adfa492a 100644
--- a/test/integration/targets/blocks/unsafe_failed_task.yml
+++ b/test/integration/targets/blocks/unsafe_failed_task.yml
@@ -1,7 +1,7 @@
- hosts: localhost
gather_facts: false
vars:
- data: {}
+ - data: {}
tasks:
- block:
- name: template error
diff --git a/test/integration/targets/callback_default/callback_default.out.result_format_yaml_lossy_verbose.stdout b/test/integration/targets/callback_default/callback_default.out.result_format_yaml_lossy_verbose.stdout
index ed455756..71a4ef9e 100644
--- a/test/integration/targets/callback_default/callback_default.out.result_format_yaml_lossy_verbose.stdout
+++ b/test/integration/targets/callback_default/callback_default.out.result_format_yaml_lossy_verbose.stdout
@@ -43,7 +43,6 @@ fatal: [testhost]: FAILED! =>
TASK [Skipped task] ************************************************************
skipping: [testhost] =>
changed: false
- false_condition: false
skip_reason: Conditional result was False
TASK [Task with var in name (foo bar)] *****************************************
@@ -121,7 +120,6 @@ ok: [testhost] => (item=debug-3) =>
msg: debug-3
skipping: [testhost] => (item=debug-4) =>
ansible_loop_var: item
- false_condition: item != 4
item: 4
fatal: [testhost]: FAILED! =>
msg: One or more items failed
@@ -201,11 +199,9 @@ skipping: [testhost] =>
TASK [debug] *******************************************************************
skipping: [testhost] => (item=1) =>
ansible_loop_var: item
- false_condition: false
item: 1
skipping: [testhost] => (item=2) =>
ansible_loop_var: item
- false_condition: false
item: 2
skipping: [testhost] =>
msg: All items skipped
diff --git a/test/integration/targets/callback_default/callback_default.out.result_format_yaml_verbose.stdout b/test/integration/targets/callback_default/callback_default.out.result_format_yaml_verbose.stdout
index 3a121a5f..7a99cc74 100644
--- a/test/integration/targets/callback_default/callback_default.out.result_format_yaml_verbose.stdout
+++ b/test/integration/targets/callback_default/callback_default.out.result_format_yaml_verbose.stdout
@@ -45,7 +45,6 @@ fatal: [testhost]: FAILED! =>
TASK [Skipped task] ************************************************************
skipping: [testhost] =>
changed: false
- false_condition: false
skip_reason: Conditional result was False
TASK [Task with var in name (foo bar)] *****************************************
@@ -127,7 +126,6 @@ ok: [testhost] => (item=debug-3) =>
msg: debug-3
skipping: [testhost] => (item=debug-4) =>
ansible_loop_var: item
- false_condition: item != 4
item: 4
fatal: [testhost]: FAILED! =>
msg: One or more items failed
@@ -208,11 +206,9 @@ skipping: [testhost] =>
TASK [debug] *******************************************************************
skipping: [testhost] => (item=1) =>
ansible_loop_var: item
- false_condition: false
item: 1
skipping: [testhost] => (item=2) =>
ansible_loop_var: item
- false_condition: false
item: 2
skipping: [testhost] =>
msg: All items skipped
diff --git a/test/integration/targets/check_mode/check_mode.yml b/test/integration/targets/check_mode/check_mode.yml
index ebf1c5b5..a5777506 100644
--- a/test/integration/targets/check_mode/check_mode.yml
+++ b/test/integration/targets/check_mode/check_mode.yml
@@ -1,7 +1,7 @@
- name: Test that check works with check_mode specified in roles
hosts: testhost
vars:
- output_dir: .
+ - output_dir: .
roles:
- { role: test_always_run, tags: test_always_run }
- { role: test_check_mode, tags: test_check_mode }
diff --git a/test/integration/targets/check_mode/roles/test_check_mode/tasks/main.yml b/test/integration/targets/check_mode/roles/test_check_mode/tasks/main.yml
index ce9ecbf4..f926d144 100644
--- a/test/integration/targets/check_mode/roles/test_check_mode/tasks/main.yml
+++ b/test/integration/targets/check_mode/roles/test_check_mode/tasks/main.yml
@@ -25,8 +25,8 @@
register: foo
- name: verify that the file was marked as changed in check mode
- assert:
- that:
+ assert:
+ that:
- "template_result is changed"
- "not foo.stat.exists"
@@ -44,7 +44,7 @@
check_mode: no
- name: verify that the file was not changed
- assert:
- that:
+ assert:
+ that:
- "checkmode_disabled is changed"
- "template_result2 is not changed"
diff --git a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/connection/localconn.py b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/connection/localconn.py
index 77f80502..fc19a99d 100644
--- a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/connection/localconn.py
+++ b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/connection/localconn.py
@@ -1,7 +1,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.plugins.connection import ConnectionBase
DOCUMENTATION = """
diff --git a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/uses_mu_missing.py b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/uses_mu_missing.py
index 6f3a19d7..b945eb68 100644
--- a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/uses_mu_missing.py
+++ b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/uses_mu_missing.py
@@ -2,7 +2,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from ..module_utils import bogusmu # pylint: disable=relative-beyond-top-level,unused-import
+import json
+import sys
+
+from ..module_utils import bogusmu # pylint: disable=relative-beyond-top-level
def main():
diff --git a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/uses_mu_missing_redirect_collection.py b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/uses_mu_missing_redirect_collection.py
index 6f2320d3..59cb3c5e 100644
--- a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/uses_mu_missing_redirect_collection.py
+++ b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/uses_mu_missing_redirect_collection.py
@@ -2,7 +2,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from ..module_utils import missing_redirect_target_collection # pylint: disable=relative-beyond-top-level,unused-import
+import json
+import sys
+
+from ..module_utils import missing_redirect_target_collection # pylint: disable=relative-beyond-top-level
def main():
diff --git a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/uses_mu_missing_redirect_module.py b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/uses_mu_missing_redirect_module.py
index de5c2e58..31ffd17c 100644
--- a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/uses_mu_missing_redirect_module.py
+++ b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/uses_mu_missing_redirect_module.py
@@ -2,7 +2,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from ..module_utils import missing_redirect_target_module # pylint: disable=relative-beyond-top-level,unused-import
+import json
+import sys
+
+from ..module_utils import missing_redirect_target_module # pylint: disable=relative-beyond-top-level
def main():
diff --git a/test/integration/targets/collections/collections/ansible_collections/testns/content_adj/plugins/inventory/statichost.py b/test/integration/targets/collections/collections/ansible_collections/testns/content_adj/plugins/inventory/statichost.py
index 92696481..ae6941f3 100644
--- a/test/integration/targets/collections/collections/ansible_collections/testns/content_adj/plugins/inventory/statichost.py
+++ b/test/integration/targets/collections/collections/ansible_collections/testns/content_adj/plugins/inventory/statichost.py
@@ -19,6 +19,7 @@ DOCUMENTATION = '''
required: True
'''
+from ansible.errors import AnsibleParserError
from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable
diff --git a/test/integration/targets/collections/test_task_resolved_plugin/callback_plugins/display_resolved_action.py b/test/integration/targets/collections/test_task_resolved_plugin/callback_plugins/display_resolved_action.py
index f1242e14..23cce104 100644
--- a/test/integration/targets/collections/test_task_resolved_plugin/callback_plugins/display_resolved_action.py
+++ b/test/integration/targets/collections/test_task_resolved_plugin/callback_plugins/display_resolved_action.py
@@ -14,6 +14,7 @@ DOCUMENTATION = '''
- Enable in configuration.
'''
+from ansible import constants as C
from ansible.plugins.callback import CallbackBase
diff --git a/test/integration/targets/ansible-playbook-callbacks/include_me.yml b/test/integration/targets/collections/testcoll2/MANIFEST.json
index e69de29b..e69de29b 100644
--- a/test/integration/targets/ansible-playbook-callbacks/include_me.yml
+++ b/test/integration/targets/collections/testcoll2/MANIFEST.json
diff --git a/test/integration/targets/command_nonexisting/tasks/main.yml b/test/integration/targets/command_nonexisting/tasks/main.yml
index e54ecb3f..d21856e7 100644
--- a/test/integration/targets/command_nonexisting/tasks/main.yml
+++ b/test/integration/targets/command_nonexisting/tasks/main.yml
@@ -1,4 +1,4 @@
- command: commandthatdoesnotexist --would-be-awkward
register: res
changed_when: "'changed' in res.stdout"
- failed_when: "res.stdout != '' or res.stderr != ''"
+ failed_when: "res.stdout != '' or res.stderr != ''" \ No newline at end of file
diff --git a/test/integration/targets/command_shell/scripts/yoink.sh b/test/integration/targets/command_shell/scripts/yoink.sh
deleted file mode 100755
index ca955da0..00000000
--- a/test/integration/targets/command_shell/scripts/yoink.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/usr/bin/env bash
-sleep 10
diff --git a/test/integration/targets/command_shell/tasks/main.yml b/test/integration/targets/command_shell/tasks/main.yml
index 2cc365db..1f4aa5d7 100644
--- a/test/integration/targets/command_shell/tasks/main.yml
+++ b/test/integration/targets/command_shell/tasks/main.yml
@@ -284,30 +284,6 @@
that:
- "command_result6.stdout == '9cd0697c6a9ff6689f0afb9136fa62e0b3fee903'"
-- name: check default var expansion
- command: /bin/sh -c 'echo "\$TEST"'
- environment:
- TEST: z
- register: command_result7
-
-- name: assert vars were expanded
- assert:
- that:
- - command_result7.stdout == '\\z'
-
-- name: check disabled var expansion
- command: /bin/sh -c 'echo "\$TEST"'
- args:
- expand_argument_vars: false
- environment:
- TEST: z
- register: command_result8
-
-- name: assert vars were not expanded
- assert:
- that:
- - command_result8.stdout == '$TEST'
-
##
## shell
##
@@ -570,21 +546,3 @@
- command_strip.stderr == 'hello \n '
- command_no_strip.stdout== 'hello \n \r\n'
- command_no_strip.stderr == 'hello \n \r\n'
-
-- name: run shell with expand_argument_vars
- shell: echo 'hi'
- args:
- expand_argument_vars: false
- register: shell_expand_failure
- ignore_errors: true
-
-- name: assert shell with expand_arguments_vars failed
- assert:
- that:
- - shell_expand_failure is failed
- - "shell_expand_failure.msg == 'Unsupported parameters for (shell) module: expand_argument_vars'"
-
-- name: Run command that backgrounds, to ensure no hang
- shell: '{{ role_path }}/scripts/yoink.sh &'
- delegate_to: localhost
- timeout: 5
diff --git a/test/integration/targets/conditionals/play.yml b/test/integration/targets/conditionals/play.yml
index 56ec8438..455818c9 100644
--- a/test/integration/targets/conditionals/play.yml
+++ b/test/integration/targets/conditionals/play.yml
@@ -665,29 +665,3 @@
- item
loop:
- 1 == 1
-
- - set_fact:
- sentinel_file: '{{ lookup("env", "OUTPUT_DIR")}}/LOOKUP_SIDE_EFFECT.txt'
-
- - name: ensure sentinel file is absent
- file:
- path: '{{ sentinel_file }}'
- state: absent
- - name: get an untrusted var that's a valid Jinja expression with a side-effect
- shell: |
- echo "lookup('pipe', 'echo bang > \"$SENTINEL_FILE\" && cat \"$SENTINEL_FILE\"')"
- environment:
- SENTINEL_FILE: '{{ sentinel_file }}'
- register: untrusted_expr
- - name: use a conditional with an inline template that refers to the untrusted expression
- debug:
- msg: look at some seemingly innocuous stuff
- when: '"foo" in {{ untrusted_expr.stdout }}'
- ignore_errors: true
- - name: ensure the untrusted expression side-effect has not executed
- stat:
- path: '{{ sentinel_file }}'
- register: sentinel_stat
- - assert:
- that:
- - not sentinel_stat.stat.exists
diff --git a/test/integration/targets/connection_delegation/aliases b/test/integration/targets/connection_delegation/aliases
index 0ce76011..6c965663 100644
--- a/test/integration/targets/connection_delegation/aliases
+++ b/test/integration/targets/connection_delegation/aliases
@@ -1,5 +1,6 @@
shippable/posix/group3
context/controller
skip/freebsd # No sshpass
+skip/osx # No sshpass
skip/macos # No sshpass
skip/rhel # No sshpass
diff --git a/test/integration/targets/connection_paramiko_ssh/test_connection.inventory b/test/integration/targets/connection_paramiko_ssh/test_connection.inventory
index cd17c090..a3f34ab7 100644
--- a/test/integration/targets/connection_paramiko_ssh/test_connection.inventory
+++ b/test/integration/targets/connection_paramiko_ssh/test_connection.inventory
@@ -2,6 +2,6 @@
paramiko_ssh-pipelining ansible_ssh_pipelining=true
paramiko_ssh-no-pipelining ansible_ssh_pipelining=false
[paramiko_ssh:vars]
-ansible_host={{ 'localhost'|string }}
+ansible_host=localhost
ansible_connection=paramiko_ssh
ansible_python_interpreter="{{ ansible_playbook_python }}"
diff --git a/test/integration/targets/connection_psrp/tests.yml b/test/integration/targets/connection_psrp/tests.yml
index 08832b14..dabbf407 100644
--- a/test/integration/targets/connection_psrp/tests.yml
+++ b/test/integration/targets/connection_psrp/tests.yml
@@ -6,9 +6,6 @@
gather_facts: no
tasks:
- - name: reboot the host
- ansible.windows.win_reboot:
-
- name: test complex objects in raw output
# until PyYAML is upgraded to 4.x we need to use the \U escape for a unicode codepoint
# and enclose in a quote to it translates the \U
@@ -32,8 +29,15 @@
- raw_out.stdout_lines[4] == "winrm"
- raw_out.stdout_lines[5] == "string - \U0001F4A9"
+ # Become only works on Server 2008 when running with basic auth, skip this host for now as it is too complicated to
+ # override the auth protocol in the tests.
+ - name: check if we running on Server 2008
+ win_shell: '[System.Environment]::OSVersion.Version -ge [Version]"6.1"'
+ register: os_version
+
- name: test out become with psrp
win_whoami:
+ when: os_version|bool
register: whoami_out
become: yes
become_method: runas
@@ -43,6 +47,7 @@
assert:
that:
- whoami_out.account.sid == "S-1-5-18"
+ when: os_version|bool
- name: test out async with psrp
win_shell: Start-Sleep -Seconds 2; Write-Output abc
diff --git a/test/integration/targets/connection_winrm/tests.yml b/test/integration/targets/connection_winrm/tests.yml
index b086a3ad..78f92a49 100644
--- a/test/integration/targets/connection_winrm/tests.yml
+++ b/test/integration/targets/connection_winrm/tests.yml
@@ -6,9 +6,6 @@
gather_facts: no
tasks:
- - name: reboot the host
- ansible.windows.win_reboot:
-
- name: setup remote tmp dir
import_role:
name: ../../setup_remote_tmp_dir
diff --git a/test/integration/targets/copy/tasks/main.yml b/test/integration/targets/copy/tasks/main.yml
index 601312fa..b86c56ac 100644
--- a/test/integration/targets/copy/tasks/main.yml
+++ b/test/integration/targets/copy/tasks/main.yml
@@ -84,7 +84,6 @@
- import_tasks: check_mode.yml
# https://github.com/ansible/ansible/issues/57618
- # https://github.com/ansible/ansible/issues/79749
- name: Test diff contents
copy:
content: 'Ansible managed\n'
@@ -96,7 +95,6 @@
that:
- 'diff_output.diff[0].before == ""'
- '"Ansible managed" in diff_output.diff[0].after'
- - '"file.txt" in diff_output.diff[0].after_header'
- name: tests with remote_src and non files
import_tasks: src_remote_file_is_not_file.yml
diff --git a/test/integration/targets/copy/tasks/tests.yml b/test/integration/targets/copy/tasks/tests.yml
index 40ea9de3..d6c8e63c 100644
--- a/test/integration/targets/copy/tasks/tests.yml
+++ b/test/integration/targets/copy/tasks/tests.yml
@@ -420,80 +420,6 @@
- "stat_results2.stat.mode == '0547'"
#
-# test copying an empty dir to a dest dir with remote_src=True
-#
-
-- name: create empty test dir
- file:
- path: '{{ remote_dir }}/testcase_empty_dir'
- state: directory
-
-- name: test copying an empty dir to a dir that does not exist (dest ends with slash)
- copy:
- src: '{{ remote_dir }}/testcase_empty_dir/'
- remote_src: yes
- dest: '{{ remote_dir }}/testcase_empty_dir_dest/'
- register: copy_result
-
-- name: get stat of newly created dir
- stat:
- path: '{{ remote_dir }}/testcase_empty_dir_dest'
- register: stat_result
-
-- assert:
- that:
- - copy_result.changed
- - stat_result.stat.exists
- - stat_result.stat.isdir
-
-- name: test no change is made running the task twice
- copy:
- src: '{{ remote_dir }}/testcase_empty_dir/'
- remote_src: yes
- dest: '{{ remote_dir }}/testcase_empty_dir_dest/'
- register: copy_result
- failed_when: copy_result is changed
-
-- name: remove to test dest with no trailing slash
- file:
- path: '{{ remote_dir }}/testcase_empty_dir_dest/'
- state: absent
-
-- name: test copying an empty dir to a dir that does not exist (both src/dest have no trailing slash)
- copy:
- src: '{{ remote_dir }}/testcase_empty_dir'
- remote_src: yes
- dest: '{{ remote_dir }}/testcase_empty_dir_dest'
- register: copy_result
-
-- name: get stat of newly created dir
- stat:
- path: '{{ remote_dir }}/testcase_empty_dir_dest'
- register: stat_result
-
-- assert:
- that:
- - copy_result.changed
- - stat_result.stat.exists
- - stat_result.stat.isdir
-
-- name: test no change is made running the task twice
- copy:
- src: '{{ remote_dir }}/testcase_empty_dir/'
- remote_src: yes
- dest: '{{ remote_dir }}/testcase_empty_dir_dest/'
- register: copy_result
- failed_when: copy_result is changed
-
-- name: clean up src and dest
- file:
- path: "{{ item }}"
- state: absent
- loop:
- - '{{ remote_dir }}/testcase_empty_dir'
- - '{{ remote_dir }}/testcase_empty_dir_dest'
-
-#
# test recursive copy local_follow=False, no trailing slash
#
@@ -2358,81 +2284,3 @@
that:
- fail_copy_directory_with_enc_file is failed
- fail_copy_directory_with_enc_file.msg == 'A vault password or secret must be specified to decrypt {{role_path}}/files-different/vault/vault-file'
-
-#
-# Test for issue 74536: recursively copy all nested directories with remote_src=yes and src='dir/' when dest exists
-#
-- vars:
- src: '{{ remote_dir }}/testcase_74536'
- block:
- - name: create source dir with 3 nested subdirs
- file:
- path: '{{ src }}/a/b1/c1'
- state: directory
-
- - name: copy the source dir with a trailing slash
- copy:
- src: '{{ src }}/'
- remote_src: yes
- dest: '{{ src }}_dest/'
- register: copy_result
- failed_when: copy_result is not changed
-
- - name: remove the source dir to recreate with different subdirs
- file:
- path: '{{ src }}'
- state: absent
-
- - name: recreate source dir
- file:
- path: "{{ item }}"
- state: directory
- loop:
- - '{{ src }}/a/b1/c2'
- - '{{ src }}/a/b2/c3'
-
- - name: copy the source dir containing new subdirs into the existing dest dir
- copy:
- src: '{{ src }}/'
- remote_src: yes
- dest: '{{ src }}_dest/'
- register: copy_result
-
- - name: stat each directory that should exist
- stat:
- path: '{{ item }}'
- register: stat_result
- loop:
- - '{{ src }}_dest'
- - '{{ src }}_dest/a'
- - '{{ src }}_dest/a/b1'
- - '{{ src }}_dest/a/b2'
- - '{{ src }}_dest/a/b1/c1'
- - '{{ src }}_dest/a/b1/c2'
- - '{{ src }}_dest/a/b2/c3'
-
- - debug: msg="{{ stat_result }}"
-
- - assert:
- that:
- - copy_result is changed
- # all paths exist
- - stat_result.results | map(attribute='stat') | map(attribute='exists') | unique == [true]
- # all paths are dirs
- - stat_result.results | map(attribute='stat') | map(attribute='isdir') | unique == [true]
-
- - name: copy the src again to verify no changes will be made
- copy:
- src: '{{ src }}/'
- remote_src: yes
- dest: '{{ src }}_dest/'
- register: copy_result
- failed_when: copy_result is changed
-
- - name: clean up src and dest
- file:
- path: '{{ item }}'
- state: absent
- loop:
- - '{{ src }}'
- - '{{ src }}_dest'
diff --git a/test/integration/targets/cron/aliases b/test/integration/targets/cron/aliases
index f3703f85..f2f9ac9d 100644
--- a/test/integration/targets/cron/aliases
+++ b/test/integration/targets/cron/aliases
@@ -1,3 +1,4 @@
destructive
shippable/posix/group1
+skip/osx
skip/macos
diff --git a/test/integration/targets/deb822_repository/aliases b/test/integration/targets/deb822_repository/aliases
deleted file mode 100644
index 34e2b540..00000000
--- a/test/integration/targets/deb822_repository/aliases
+++ /dev/null
@@ -1,6 +0,0 @@
-destructive
-shippable/posix/group1
-skip/freebsd
-skip/osx
-skip/macos
-skip/rhel
diff --git a/test/integration/targets/deb822_repository/meta/main.yml b/test/integration/targets/deb822_repository/meta/main.yml
deleted file mode 100644
index 83e789ee..00000000
--- a/test/integration/targets/deb822_repository/meta/main.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-dependencies:
- - prepare_tests
- - role: setup_deb_repo
- install_repo: false
diff --git a/test/integration/targets/deb822_repository/tasks/install.yml b/test/integration/targets/deb822_repository/tasks/install.yml
deleted file mode 100644
index a5dce437..00000000
--- a/test/integration/targets/deb822_repository/tasks/install.yml
+++ /dev/null
@@ -1,40 +0,0 @@
-- name: Create repo to install from
- deb822_repository:
- name: ansible-test local
- uris: file:{{ repodir }}
- suites:
- - stable
- - testing
- components:
- - main
- architectures:
- - all
- trusted: yes
- register: deb822_install_repo
-
-- name: Update apt cache
- apt:
- update_cache: yes
- when: deb822_install_repo is changed
-
-- block:
- - name: Install package from local repo
- apt:
- name: foo=1.0.0
- register: deb822_install_pkg
- always:
- - name: Uninstall foo
- apt:
- name: foo
- state: absent
- when: deb822_install_pkg is changed
-
- - name: remove repo
- deb822_repository:
- name: ansible-test local
- state: absent
-
-- assert:
- that:
- - deb822_install_repo is changed
- - deb822_install_pkg is changed
diff --git a/test/integration/targets/deb822_repository/tasks/main.yml b/test/integration/targets/deb822_repository/tasks/main.yml
deleted file mode 100644
index 561ef2a6..00000000
--- a/test/integration/targets/deb822_repository/tasks/main.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-- meta: end_play
- when: ansible_os_family != 'Debian'
-
-- block:
- - name: install python3-debian
- apt:
- name: python3-debian
- state: present
- register: py3_deb_install
-
- - import_tasks: test.yml
-
- - import_tasks: install.yml
- always:
- - name: uninstall python3-debian
- apt:
- name: python3-debian
- state: absent
- when: py3_deb_install is changed
diff --git a/test/integration/targets/deb822_repository/tasks/test.yml b/test/integration/targets/deb822_repository/tasks/test.yml
deleted file mode 100644
index 4911bb92..00000000
--- a/test/integration/targets/deb822_repository/tasks/test.yml
+++ /dev/null
@@ -1,229 +0,0 @@
-- name: Create deb822 repo - check_mode
- deb822_repository:
- name: ansible-test focal archive
- uris: http://us.archive.ubuntu.com/ubuntu
- suites:
- - focal
- - focal-updates
- components:
- - main
- - restricted
- register: deb822_check_mode_1
- check_mode: true
-
-- name: Create deb822 repo
- deb822_repository:
- name: ansible-test focal archive
- uris: http://us.archive.ubuntu.com/ubuntu
- suites:
- - focal
- - focal-updates
- components:
- - main
- - restricted
- date_max_future: 10
- register: deb822_create_1
-
-- name: Check file mode
- stat:
- path: /etc/apt/sources.list.d/ansible-test-focal-archive.sources
- register: deb822_create_1_stat_1
-
-- name: Create another deb822 repo
- deb822_repository:
- name: ansible-test focal security
- uris: http://security.ubuntu.com/ubuntu
- suites:
- - focal-security
- components:
- - main
- - restricted
- register: deb822_create_2
-
-- name: Create deb822 repo idempotency
- deb822_repository:
- name: ansible-test focal archive
- uris: http://us.archive.ubuntu.com/ubuntu
- suites:
- - focal
- - focal-updates
- components:
- - main
- - restricted
- date_max_future: 10
- register: deb822_create_1_idem
-
-- name: Create deb822 repo - check_mode
- deb822_repository:
- name: ansible-test focal archive
- uris: http://us.archive.ubuntu.com/ubuntu
- suites:
- - focal
- - focal-updates
- components:
- - main
- - restricted
- date_max_future: 10
- register: deb822_check_mode_2
- check_mode: yes
-
-- name: Change deb822 repo mode
- deb822_repository:
- name: ansible-test focal archive
- uris: http://us.archive.ubuntu.com/ubuntu
- suites:
- - focal
- - focal-updates
- components:
- - main
- - restricted
- date_max_future: 10
- mode: '0600'
- register: deb822_create_1_mode
-
-- name: Check file mode
- stat:
- path: /etc/apt/sources.list.d/ansible-test-focal-archive.sources
- register: deb822_create_1_stat_2
-
-- assert:
- that:
- - deb822_check_mode_1 is changed
-
- - deb822_check_mode_2 is not changed
-
- - deb822_create_1 is changed
- - deb822_create_1.dest == '/etc/apt/sources.list.d/ansible-test-focal-archive.sources'
- - deb822_create_1.repo|trim == focal_archive_expected
-
- - deb822_create_1_idem is not changed
-
- - deb822_create_1_mode is changed
- - deb822_create_1_stat_1.stat.mode == '0644'
- - deb822_create_1_stat_2.stat.mode == '0600'
- vars:
- focal_archive_expected: |-
- X-Repolib-Name: ansible-test focal archive
- URIs: http://us.archive.ubuntu.com/ubuntu
- Suites: focal focal-updates
- Components: main restricted
- Date-Max-Future: 10
- Types: deb
-
-- name: Remove repos
- deb822_repository:
- name: '{{ item }}'
- state: absent
- register: remove_repos_1
- loop:
- - ansible-test focal archive
- - ansible-test focal security
-
-- name: Check for repo files
- stat:
- path: /etc/apt/sources.list.d/ansible-test-{{ item }}.sources
- register: remove_stats
- loop:
- - focal-archive
- - focal-security
-
-- assert:
- that:
- - remove_repos_1 is changed
- - remove_stats.results|map(attribute='stat')|selectattr('exists') == []
-
-- name: Add repo with signed_by
- deb822_repository:
- name: ansible-test
- types: deb
- uris: https://deb.debian.org
- suites: stable
- components:
- - main
- - contrib
- - non-free
- signed_by: |-
- -----BEGIN PGP PUBLIC KEY BLOCK-----
-
- mDMEYCQjIxYJKwYBBAHaRw8BAQdAD/P5Nvvnvk66SxBBHDbhRml9ORg1WV5CvzKY
- CuMfoIS0BmFiY2RlZoiQBBMWCgA4FiEErCIG1VhKWMWo2yfAREZd5NfO31cFAmAk
- IyMCGyMFCwkIBwMFFQoJCAsFFgIDAQACHgECF4AACgkQREZd5NfO31fbOwD6ArzS
- dM0Dkd5h2Ujy1b6KcAaVW9FOa5UNfJ9FFBtjLQEBAJ7UyWD3dZzhvlaAwunsk7DG
- 3bHcln8DMpIJVXht78sL
- =IE0r
- -----END PGP PUBLIC KEY BLOCK-----
- register: signed_by_inline
-
-- name: Change signed_by to URL
- deb822_repository:
- name: ansible-test
- types: deb
- uris: https://deb.debian.org
- suites: stable
- components:
- - main
- - contrib
- - non-free
- signed_by: https://ci-files.testing.ansible.com/test/integration/targets/apt_key/apt-key-example-binary.gpg
- register: signed_by_url
-
-- assert:
- that:
- - signed_by_inline.key_filename is none
- - signed_by_inline.repo|trim == signed_by_inline_expected
- - signed_by_url is changed
- - signed_by_url.key_filename == '/etc/apt/keyrings/ansible-test.gpg'
- - >
- 'BEGIN' not in signed_by_url.repo
- vars:
- signed_by_inline_expected: |-
- X-Repolib-Name: ansible-test
- Types: deb
- URIs: https://deb.debian.org
- Suites: stable
- Components: main contrib non-free
- Signed-By:
- -----BEGIN PGP PUBLIC KEY BLOCK-----
- .
- mDMEYCQjIxYJKwYBBAHaRw8BAQdAD/P5Nvvnvk66SxBBHDbhRml9ORg1WV5CvzKY
- CuMfoIS0BmFiY2RlZoiQBBMWCgA4FiEErCIG1VhKWMWo2yfAREZd5NfO31cFAmAk
- IyMCGyMFCwkIBwMFFQoJCAsFFgIDAQACHgECF4AACgkQREZd5NfO31fbOwD6ArzS
- dM0Dkd5h2Ujy1b6KcAaVW9FOa5UNfJ9FFBtjLQEBAJ7UyWD3dZzhvlaAwunsk7DG
- 3bHcln8DMpIJVXht78sL
- =IE0r
- -----END PGP PUBLIC KEY BLOCK-----
-
-- name: remove ansible-test repo
- deb822_repository:
- name: ansible-test
- state: absent
- register: ansible_test_repo_remove
-
-- name: check for ansible-test repo and key
- stat:
- path: '{{ item }}'
- register: ansible_test_repo_stats
- loop:
- - /etc/apt/sources.list.d/ansible-test.sources
- - /etc/apt/keyrings/ansible-test.gpg
-
-- assert:
- that:
- - ansible_test_repo_remove is changed
- - ansible_test_repo_stats.results|map(attribute='stat')|selectattr('exists') == []
-
-- name: Check if http-agent works when using cloudflare repo - check_mode
- deb822_repository:
- name: cloudflared
- types: deb
- uris: https://pkg.cloudflare.com/cloudflared
- suites: "bullseye"
- components: main
- signed_by: https://pkg.cloudflare.com/cloudflare-main.gpg
- state: present
- check_mode: true
- register: ansible_test_http_agent
-
-- assert:
- that:
- - ansible_test_http_agent is changed
diff --git a/test/integration/targets/debconf/tasks/main.yml b/test/integration/targets/debconf/tasks/main.yml
index f9236268..d3d63cdf 100644
--- a/test/integration/targets/debconf/tasks/main.yml
+++ b/test/integration/targets/debconf/tasks/main.yml
@@ -33,44 +33,4 @@
- 'debconf_test0.current is defined'
- '"tzdata/Zones/Etc" in debconf_test0.current'
- 'debconf_test0.current["tzdata/Zones/Etc"] == "UTC"'
-
- - name: install debconf-utils
- apt:
- name: debconf-utils
- state: present
- register: debconf_utils_deb_install
-
- - name: Check if password is set
- debconf:
- name: ddclient
- question: ddclient/password
- value: "MySecretValue"
- vtype: password
- register: debconf_test1
-
- - name: validate results for test 1
- assert:
- that:
- - debconf_test1.changed
-
- - name: Change password again
- debconf:
- name: ddclient
- question: ddclient/password
- value: "MySecretValue"
- vtype: password
- no_log: yes
- register: debconf_test2
-
- - name: validate results for test 1
- assert:
- that:
- - not debconf_test2.changed
- always:
- - name: uninstall debconf-utils
- apt:
- name: debconf-utils
- state: absent
- when: debconf_utils_deb_install is changed
-
- when: ansible_distribution in ('Ubuntu', 'Debian') \ No newline at end of file
+ when: ansible_distribution in ('Ubuntu', 'Debian')
diff --git a/test/integration/targets/delegate_to/delegate_local_from_root.yml b/test/integration/targets/delegate_to/delegate_local_from_root.yml
index b44f83bd..c9be4ff2 100644
--- a/test/integration/targets/delegate_to/delegate_local_from_root.yml
+++ b/test/integration/targets/delegate_to/delegate_local_from_root.yml
@@ -3,7 +3,7 @@
gather_facts: false
remote_user: root
tasks:
- - name: ensure we copy w/o errors due to remote user not being overridden
+ - name: ensure we copy w/o errors due to remote user not being overriden
copy:
src: testfile
dest: "{{ playbook_dir }}"
diff --git a/test/integration/targets/delegate_to/runme.sh b/test/integration/targets/delegate_to/runme.sh
index e0dcc746..1bdf27cf 100755
--- a/test/integration/targets/delegate_to/runme.sh
+++ b/test/integration/targets/delegate_to/runme.sh
@@ -76,7 +76,3 @@ ansible-playbook test_delegate_to_lookup_context.yml -i inventory -v "$@"
ansible-playbook delegate_local_from_root.yml -i inventory -v "$@" -e 'ansible_user=root'
ansible-playbook delegate_with_fact_from_delegate_host.yml "$@"
ansible-playbook delegate_facts_loop.yml -i inventory -v "$@"
-ansible-playbook test_random_delegate_to_with_loop.yml -i inventory -v "$@"
-
-# Run playbook multiple times to ensure there are no false-negatives
-for i in $(seq 0 10); do ansible-playbook test_random_delegate_to_without_loop.yml -i inventory -v "$@"; done;
diff --git a/test/integration/targets/delegate_to/test_delegate_to.yml b/test/integration/targets/delegate_to/test_delegate_to.yml
index eb601e02..dcfa9d03 100644
--- a/test/integration/targets/delegate_to/test_delegate_to.yml
+++ b/test/integration/targets/delegate_to/test_delegate_to.yml
@@ -1,9 +1,9 @@
- hosts: testhost3
vars:
- template_role: ./roles/test_template
- output_dir: "{{ playbook_dir }}"
- templated_var: foo
- templated_dict: { 'hello': 'world' }
+ - template_role: ./roles/test_template
+ - output_dir: "{{ playbook_dir }}"
+ - templated_var: foo
+ - templated_dict: { 'hello': 'world' }
tasks:
- name: Test no delegate_to
setup:
@@ -57,25 +57,6 @@
- name: remove test file
file: path={{ output_dir }}/tmp.txt state=absent
- - name: Use omit to thwart delegation
- ping:
- delegate_to: "{{ jenkins_install_key_on|default(omit) }}"
- register: d_omitted
-
- - name: Use empty to thwart delegation should fail
- ping:
- delegate_to: "{{ jenkins_install_key_on }}"
- when: jenkins_install_key_on != ""
- vars:
- jenkins_install_key_on: ''
- ignore_errors: true
- register: d_empty
-
- - name: Ensure previous 2 tests actually did what was expected
- assert:
- that:
- - d_omitted is success
- - d_empty is failed
- name: verify delegation with per host vars
hosts: testhost6
diff --git a/test/integration/targets/delegate_to/test_random_delegate_to_with_loop.yml b/test/integration/targets/delegate_to/test_random_delegate_to_with_loop.yml
deleted file mode 100644
index cd7b888b..00000000
--- a/test/integration/targets/delegate_to/test_random_delegate_to_with_loop.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-- hosts: localhost
- gather_facts: false
- tasks:
- - add_host:
- name: 'host{{ item }}'
- groups:
- - test
- loop: '{{ range(10) }}'
-
- # This task may fail, if it does, it means the same thing as if the assert below fails
- - set_fact:
- dv: '{{ ansible_delegated_vars[ansible_host]["ansible_host"] }}'
- delegate_to: '{{ groups.test|random }}'
- delegate_facts: true
- # Purposefully smaller loop than group count
- loop: '{{ range(5) }}'
-
-- hosts: test
- gather_facts: false
- tasks:
- - assert:
- that:
- - dv == inventory_hostname
- # The small loop above means we won't set this var for every host
- # and a smaller loop is faster, and may catch the error in the above task
- when: dv is defined
diff --git a/test/integration/targets/delegate_to/test_random_delegate_to_without_loop.yml b/test/integration/targets/delegate_to/test_random_delegate_to_without_loop.yml
deleted file mode 100644
index 95278628..00000000
--- a/test/integration/targets/delegate_to/test_random_delegate_to_without_loop.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-- hosts: localhost
- gather_facts: false
- tasks:
- - add_host:
- name: 'host{{ item }}'
- groups:
- - test
- loop: '{{ range(10) }}'
-
- - set_fact:
- dv: '{{ ansible_delegated_vars[ansible_host]["ansible_host"] }}'
- delegate_to: '{{ groups.test|random }}'
- delegate_facts: true
diff --git a/test/integration/targets/dnf/aliases b/test/integration/targets/dnf/aliases
index b12f3547..d6f27b8e 100644
--- a/test/integration/targets/dnf/aliases
+++ b/test/integration/targets/dnf/aliases
@@ -1,4 +1,6 @@
destructive
shippable/posix/group1
+skip/power/centos
skip/freebsd
+skip/osx
skip/macos
diff --git a/test/integration/targets/dnf/tasks/dnf.yml b/test/integration/targets/dnf/tasks/dnf.yml
index 9845f3db..ec1c36f8 100644
--- a/test/integration/targets/dnf/tasks/dnf.yml
+++ b/test/integration/targets/dnf/tasks/dnf.yml
@@ -224,7 +224,7 @@
- assert:
that:
- dnf_result is success
- - dnf_result.results|length >= 2
+ - dnf_result.results|length == 2
- "dnf_result.results[0].startswith('Removed: ')"
- "dnf_result.results[1].startswith('Removed: ')"
@@ -427,10 +427,6 @@
- shell: 'dnf -y group install "Custom Group" && dnf -y group remove "Custom Group"'
register: shell_dnf_result
-- dnf:
- name: "@Custom Group"
- state: absent
-
# GROUP UPGRADE - this will go to the same method as group install
# but through group_update - it is its invocation we're testing here
# see commit 119c9e5d6eb572c4a4800fbe8136095f9063c37b
@@ -450,10 +446,6 @@
# cleanup until https://github.com/ansible/ansible/issues/27377 is resolved
- shell: dnf -y group install "Custom Group" && dnf -y group remove "Custom Group"
-- dnf:
- name: "@Custom Group"
- state: absent
-
- name: try to install non existing group
dnf:
name: "@non-existing-group"
@@ -559,35 +551,30 @@
- "'No package non-existent-rpm available' in dnf_result['failures'][0]"
- "'Failed to install some of the specified packages' in dnf_result['msg']"
-- name: ensure sos isn't installed
+- name: use latest to install httpd
dnf:
- name: sos
- state: absent
-
-- name: use latest to install sos
- dnf:
- name: sos
+ name: httpd
state: latest
register: dnf_result
-- name: verify sos was installed
+- name: verify httpd was installed
assert:
that:
- - dnf_result is changed
+ - "'changed' in dnf_result"
-- name: uninstall sos
+- name: uninstall httpd
dnf:
- name: sos
+ name: httpd
state: removed
-- name: update sos only if it exists
+- name: update httpd only if it exists
dnf:
- name: sos
+ name: httpd
state: latest
update_only: yes
register: dnf_result
-- name: verify sos not installed
+- name: verify httpd not installed
assert:
that:
- "not dnf_result is changed"
@@ -668,28 +655,6 @@
- "'changed' in dnf_result"
- "'results' in dnf_result"
-# Install RPM from url with update_only
-- name: install from url with update_only
- dnf:
- name: "file://{{ pkg_path }}"
- state: latest
- update_only: true
- disable_gpg_check: true
- register: dnf_result
-
-- name: verify installation
- assert:
- that:
- - "dnf_result is success"
- - "not dnf_result is changed"
- - "dnf_result is not failed"
-
-- name: verify dnf module outputs
- assert:
- that:
- - "'changed' in dnf_result"
- - "'results' in dnf_result"
-
- name: Create a temp RPM file which does not contain nevra information
file:
name: "/tmp/non_existent_pkg.rpm"
diff --git a/test/integration/targets/dnf/tasks/main.yml b/test/integration/targets/dnf/tasks/main.yml
index 4941e2c3..65b77ceb 100644
--- a/test/integration/targets/dnf/tasks/main.yml
+++ b/test/integration/targets/dnf/tasks/main.yml
@@ -61,7 +61,6 @@
when:
- (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('29', '>=')) or
(ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '>='))
- - not dnf5|default(false)
tags:
- dnf_modularity
@@ -70,6 +69,5 @@
(ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '>='))
- include_tasks: cacheonly.yml
- when:
- - (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('23', '>=')) or
- (ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '>='))
+ when: (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('23', '>=')) or
+ (ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '>='))
diff --git a/test/integration/targets/dnf/tasks/skip_broken_and_nobest.yml b/test/integration/targets/dnf/tasks/skip_broken_and_nobest.yml
index f54c0a83..503cb4c3 100644
--- a/test/integration/targets/dnf/tasks/skip_broken_and_nobest.yml
+++ b/test/integration/targets/dnf/tasks/skip_broken_and_nobest.yml
@@ -240,8 +240,7 @@
- name: Do an "upgrade" to an older version of broken-a, allow_downgrade=false
dnf:
name:
- #- broken-a-1.2.3-1*
- - broken-a-1.2.3-1.el7.x86_64
+ - broken-a-1.2.3-1*
state: latest
allow_downgrade: false
check_mode: true
diff --git a/test/integration/targets/dnf/tasks/test_sos_removal.yml b/test/integration/targets/dnf/tasks/test_sos_removal.yml
index 5e161dbb..0d70cf78 100644
--- a/test/integration/targets/dnf/tasks/test_sos_removal.yml
+++ b/test/integration/targets/dnf/tasks/test_sos_removal.yml
@@ -15,5 +15,5 @@
that:
- sos_rm is successful
- sos_rm is changed
- - sos_rm.results|select("contains", "Removed: sos-{{ sos_version }}-{{ sos_release }}")|length > 0
- - sos_rm.results|length > 0
+ - "'Removed: sos-' ~ sos_version ~ '-' ~ sos_release in sos_rm.results[0]"
+ - sos_rm.results|length == 1
diff --git a/test/integration/targets/dnf5/aliases b/test/integration/targets/dnf5/aliases
deleted file mode 100644
index 4baf6e62..00000000
--- a/test/integration/targets/dnf5/aliases
+++ /dev/null
@@ -1,6 +0,0 @@
-destructive
-shippable/posix/group1
-skip/freebsd
-skip/macos
-context/target
-needs/target/dnf
diff --git a/test/integration/targets/dnf5/playbook.yml b/test/integration/targets/dnf5/playbook.yml
deleted file mode 100644
index 16dfd22e..00000000
--- a/test/integration/targets/dnf5/playbook.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-- hosts: localhost
- tasks:
- - block:
- - command: "dnf install -y 'dnf-command(copr)'"
- - command: dnf copr enable -y rpmsoftwaremanagement/dnf5-unstable
- - command: dnf install -y python3-libdnf5
-
- - include_role:
- name: dnf
- vars:
- dnf5: true
- dnf_log_files:
- - /var/log/dnf5.log
- when:
- - ansible_distribution == 'Fedora'
- - ansible_distribution_major_version is version('37', '>=')
- module_defaults:
- dnf:
- use_backend: dnf5
diff --git a/test/integration/targets/dnf5/runme.sh b/test/integration/targets/dnf5/runme.sh
deleted file mode 100755
index 51a6bf45..00000000
--- a/test/integration/targets/dnf5/runme.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-set -ux
-export ANSIBLE_ROLES_PATH=../
-ansible-playbook playbook.yml "$@"
diff --git a/test/integration/targets/dpkg_selections/aliases b/test/integration/targets/dpkg_selections/aliases
index 9c44d752..c0d5684b 100644
--- a/test/integration/targets/dpkg_selections/aliases
+++ b/test/integration/targets/dpkg_selections/aliases
@@ -1,5 +1,6 @@
shippable/posix/group1
destructive
skip/freebsd
+skip/osx
skip/macos
skip/rhel
diff --git a/test/integration/targets/dpkg_selections/tasks/dpkg_selections.yaml b/test/integration/targets/dpkg_selections/tasks/dpkg_selections.yaml
index 016d7716..080db262 100644
--- a/test/integration/targets/dpkg_selections/tasks/dpkg_selections.yaml
+++ b/test/integration/targets/dpkg_selections/tasks/dpkg_selections.yaml
@@ -87,15 +87,3 @@
apt:
name: hello
state: absent
-
-- name: Try to select non-existent package
- dpkg_selections:
- name: kernel
- selection: hold
- ignore_errors: yes
- register: result
-
-- name: Check that module fails for non-existent package
- assert:
- that:
- - "'Failed to find package' in result.msg"
diff --git a/test/integration/targets/egg-info/lookup_plugins/import_pkg_resources.py b/test/integration/targets/egg-info/lookup_plugins/import_pkg_resources.py
index 28227fce..c0c5ccd5 100644
--- a/test/integration/targets/egg-info/lookup_plugins/import_pkg_resources.py
+++ b/test/integration/targets/egg-info/lookup_plugins/import_pkg_resources.py
@@ -1,7 +1,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import pkg_resources # pylint: disable=unused-import
+import pkg_resources
from ansible.plugins.lookup import LookupBase
diff --git a/test/integration/targets/environment/test_environment.yml b/test/integration/targets/environment/test_environment.yml
index f295cf3c..43f9c74e 100644
--- a/test/integration/targets/environment/test_environment.yml
+++ b/test/integration/targets/environment/test_environment.yml
@@ -7,8 +7,8 @@
- hosts: testhost
vars:
- test1:
- key1: val1
+ - test1:
+ key1: val1
environment:
PATH: '{{ansible_env.PATH + ":/lola"}}'
lola: 'ido'
@@ -41,9 +41,9 @@
- hosts: testhost
vars:
- test1:
- key1: val1
- test2:
+ - test1:
+ key1: val1
+ - test2:
key1: not1
other1: val2
environment: "{{test1}}"
diff --git a/test/integration/targets/error_from_connection/connection_plugins/dummy.py b/test/integration/targets/error_from_connection/connection_plugins/dummy.py
index d322fe0d..59a81a1b 100644
--- a/test/integration/targets/error_from_connection/connection_plugins/dummy.py
+++ b/test/integration/targets/error_from_connection/connection_plugins/dummy.py
@@ -11,6 +11,7 @@ DOCUMENTATION = """
version_added: "2.0"
options: {}
"""
+import ansible.constants as C
from ansible.errors import AnsibleError
from ansible.plugins.connection import ConnectionBase
diff --git a/test/integration/targets/expect/tasks/main.yml b/test/integration/targets/expect/tasks/main.yml
index 2aef5957..7bf18c5e 100644
--- a/test/integration/targets/expect/tasks/main.yml
+++ b/test/integration/targets/expect/tasks/main.yml
@@ -148,15 +148,6 @@
- "echo_result.stdout_lines[-2] == 'foobar'"
- "echo_result.stdout_lines[-1] == 'bar'"
-- name: test timeout is valid as null
- expect:
- command: "{{ansible_python_interpreter}} {{test_command_file}}"
- responses:
- foo: bar
- echo: true
- timeout: null # wait indefinitely
- timeout: 2 # but shouldn't be waiting long
-
- name: test response list
expect:
command: "{{ansible_python_interpreter}} {{test_command_file}} foo foo"
diff --git a/test/integration/targets/facts_linux_network/aliases b/test/integration/targets/facts_linux_network/aliases
index c9e1dc55..100ce23a 100644
--- a/test/integration/targets/facts_linux_network/aliases
+++ b/test/integration/targets/facts_linux_network/aliases
@@ -1,6 +1,7 @@
needs/privileged
shippable/posix/group1
skip/freebsd
+skip/osx
skip/macos
context/target
destructive
diff --git a/test/integration/targets/fetch/roles/fetch_tests/tasks/failures.yml b/test/integration/targets/fetch/roles/fetch_tests/tasks/failures.yml
index d0bf9bdc..8a6b5b7b 100644
--- a/test/integration/targets/fetch/roles/fetch_tests/tasks/failures.yml
+++ b/test/integration/targets/fetch/roles/fetch_tests/tasks/failures.yml
@@ -28,15 +28,6 @@
register: failed_fetch_dest_dir
ignore_errors: true
-- name: Test unreachable
- fetch:
- src: "{{ remote_tmp_dir }}/orig"
- dest: "{{ output_dir }}"
- register: unreachable_fetch
- ignore_unreachable: true
- vars:
- ansible_user: wrong
-
- name: Ensure fetch failed
assert:
that:
@@ -48,4 +39,3 @@
- failed_fetch_no_access.msg is search('file is not readable')
- failed_fetch_dest_dir is failed
- failed_fetch_dest_dir.msg is search('dest is an existing directory')
- - unreachable_fetch is unreachable
diff --git a/test/integration/targets/file/tasks/link_rewrite.yml b/test/integration/targets/file/tasks/link_rewrite.yml
index 2416c2ca..b0e1af3e 100644
--- a/test/integration/targets/file/tasks/link_rewrite.yml
+++ b/test/integration/targets/file/tasks/link_rewrite.yml
@@ -16,11 +16,11 @@
dest: "{{ tempdir.path }}/somelink"
state: link
-- stat:
+- stat:
path: "{{ tempdir.path }}/somelink"
register: link
-- stat:
+- stat:
path: "{{ tempdir.path }}/somefile"
register: file
@@ -32,12 +32,12 @@
- file:
path: "{{ tempdir.path }}/somelink"
mode: 0644
-
-- stat:
+
+- stat:
path: "{{ tempdir.path }}/somelink"
register: link
-- stat:
+- stat:
path: "{{ tempdir.path }}/somefile"
register: file
diff --git a/test/integration/targets/file/tasks/main.yml b/test/integration/targets/file/tasks/main.yml
index c1b4c791..a5bd68d7 100644
--- a/test/integration/targets/file/tasks/main.yml
+++ b/test/integration/targets/file/tasks/main.yml
@@ -779,7 +779,7 @@
register: touch_result_in_check_mode_fails_not_existing_group
- assert:
- that:
+ that:
- touch_result_in_check_mode_not_existing.changed
- touch_result_in_check_mode_preserve_access_time.changed
- touch_result_in_check_mode_change_only_mode.changed
diff --git a/test/integration/targets/filter_core/tasks/main.yml b/test/integration/targets/filter_core/tasks/main.yml
index 9d287a18..2d084191 100644
--- a/test/integration/targets/filter_core/tasks/main.yml
+++ b/test/integration/targets/filter_core/tasks/main.yml
@@ -454,38 +454,6 @@
- password_hash_2 is failed
- "'not support' in password_hash_2.msg"
-- name: install passlib if needed
- pip:
- name: passlib
- state: present
- register: installed_passlib
-
-- name: test using passlib with an unsupported hash type
- set_fact:
- foo: '{{"hey"|password_hash("msdcc")}}'
- ignore_errors: yes
- register: unsupported_hash_type
-
-- name: remove passlib if it was installed
- pip:
- name: passlib
- state: absent
- when: installed_passlib.changed
-
-- assert:
- that:
- - unsupported_hash_type.msg == msg
- vars:
- msg: "msdcc is not in the list of supported passlib algorithms: md5, blowfish, sha256, sha512"
-
-- name: test password_hash can work with bcrypt without passlib installed
- debug:
- msg: "{{ 'somestring'|password_hash('bcrypt') }}"
- register: crypt_bcrypt
- # Some implementations of crypt do not fail outright and return some short value.
- failed_when: crypt_bcrypt is failed or (crypt_bcrypt.msg|length|int) != 60
- when: ansible_facts.os_family in ['RedHat', 'Debian']
-
- name: Verify to_uuid throws on weird namespace
set_fact:
foo: '{{"hey"|to_uuid(namespace=22)}}'
diff --git a/test/integration/targets/filter_encryption/base.yml b/test/integration/targets/filter_encryption/base.yml
index 1479f734..8bf25f77 100644
--- a/test/integration/targets/filter_encryption/base.yml
+++ b/test/integration/targets/filter_encryption/base.yml
@@ -2,7 +2,6 @@
gather_facts: true
vars:
data: secret
- data2: 'foo: bar\n'
dvault: '{{ "secret"|vault("test")}}'
password: test
s_32: '{{(2**31-1)}}'
@@ -22,15 +21,6 @@
is_64: '{{ "64" in ansible_facts["architecture"] }}'
salt: '{{ is_64|bool|ternary(s_64, s_32)|random(seed=inventory_hostname)}}'
vaultedstring: '{{ is_64|bool|ternary(vaultedstring_64, vaultedstring_32) }}'
- # command line vaulted data2
- vaulted_id: !vault |
- $ANSIBLE_VAULT;1.2;AES256;test1
- 36383733336533656264393332663131613335333332346439356164383935656234663631356430
- 3533353537343834333538356366376233326364613362640a623832636339363966336238393039
- 35316562626335306534356162623030613566306235623863373036626531346364626166656134
- 3063376436656635330a363636376131663362633731313964353061663661376638326461393736
- 3863
- vaulted_to_id: "{{data2|vault('test1@secret', vault_id='test1')}}"
tasks:
- name: check vaulting
@@ -45,5 +35,3 @@
that:
- vaultedstring|unvault(password) == data
- vault|unvault(password) == data
- - vaulted_id|unvault('test1@secret', vault_id='test1')
- - vaulted_to_id|unvault('test1@secret', vault_id='test1')
diff --git a/test/integration/targets/filter_mathstuff/tasks/main.yml b/test/integration/targets/filter_mathstuff/tasks/main.yml
index 33fcae82..019f00e4 100644
--- a/test/integration/targets/filter_mathstuff/tasks/main.yml
+++ b/test/integration/targets/filter_mathstuff/tasks/main.yml
@@ -64,44 +64,44 @@
that:
- '[1,2,3]|intersect([4,5,6]) == []'
- '[1,2,3]|intersect([3,4,5,6]) == [3]'
- - '[1,2,3]|intersect([3,2,1]) | sort == [1,2,3]'
- - '(1,2,3)|intersect((4,5,6)) == []'
- - '(1,2,3)|intersect((3,4,5,6)) == [3]'
+ - '[1,2,3]|intersect([3,2,1]) == [1,2,3]'
+ - '(1,2,3)|intersect((4,5,6))|list == []'
+ - '(1,2,3)|intersect((3,4,5,6))|list == [3]'
- '["a","A","b"]|intersect(["B","c","C"]) == []'
- '["a","A","b"]|intersect(["b","B","c","C"]) == ["b"]'
- - '["a","A","b"]|intersect(["b","A","a"]) | sort(case_sensitive=True) == ["A","a","b"]'
- - '("a","A","b")|intersect(("B","c","C")) == []'
- - '("a","A","b")|intersect(("b","B","c","C")) == ["b"]'
+ - '["a","A","b"]|intersect(["b","A","a"]) == ["a","A","b"]'
+ - '("a","A","b")|intersect(("B","c","C"))|list == []'
+ - '("a","A","b")|intersect(("b","B","c","C"))|list == ["b"]'
- name: Verify difference
tags: difference
assert:
that:
- - '[1,2,3]|difference([4,5,6]) | sort == [1,2,3]'
- - '[1,2,3]|difference([3,4,5,6]) | sort == [1,2]'
+ - '[1,2,3]|difference([4,5,6]) == [1,2,3]'
+ - '[1,2,3]|difference([3,4,5,6]) == [1,2]'
- '[1,2,3]|difference([3,2,1]) == []'
- - '(1,2,3)|difference((4,5,6)) | sort == [1,2,3]'
- - '(1,2,3)|difference((3,4,5,6)) | sort == [1,2]'
- - '["a","A","b"]|difference(["B","c","C"]) | sort(case_sensitive=True) == ["A","a","b"]'
- - '["a","A","b"]|difference(["b","B","c","C"]) | sort(case_sensitive=True) == ["A","a"]'
+ - '(1,2,3)|difference((4,5,6))|list == [1,2,3]'
+ - '(1,2,3)|difference((3,4,5,6))|list == [1,2]'
+ - '["a","A","b"]|difference(["B","c","C"]) == ["a","A","b"]'
+ - '["a","A","b"]|difference(["b","B","c","C"]) == ["a","A"]'
- '["a","A","b"]|difference(["b","A","a"]) == []'
- - '("a","A","b")|difference(("B","c","C")) | sort(case_sensitive=True) == ["A","a","b"]'
- - '("a","A","b")|difference(("b","B","c","C")) | sort(case_sensitive=True) == ["A","a"]'
+ - '("a","A","b")|difference(("B","c","C"))|list|sort(case_sensitive=True) == ["A","a","b"]'
+ - '("a","A","b")|difference(("b","B","c","C"))|list|sort(case_sensitive=True) == ["A","a"]'
- name: Verify symmetric_difference
tags: symmetric_difference
assert:
that:
- - '[1,2,3]|symmetric_difference([4,5,6]) | sort == [1,2,3,4,5,6]'
- - '[1,2,3]|symmetric_difference([3,4,5,6]) | sort == [1,2,4,5,6]'
+ - '[1,2,3]|symmetric_difference([4,5,6]) == [1,2,3,4,5,6]'
+ - '[1,2,3]|symmetric_difference([3,4,5,6]) == [1,2,4,5,6]'
- '[1,2,3]|symmetric_difference([3,2,1]) == []'
- - '(1,2,3)|symmetric_difference((4,5,6)) | sort == [1,2,3,4,5,6]'
- - '(1,2,3)|symmetric_difference((3,4,5,6)) | sort == [1,2,4,5,6]'
- - '["a","A","b"]|symmetric_difference(["B","c","C"]) | sort(case_sensitive=True) == ["A","B","C","a","b","c"]'
- - '["a","A","b"]|symmetric_difference(["b","B","c","C"]) | sort(case_sensitive=True) == ["A","B","C","a","c"]'
+ - '(1,2,3)|symmetric_difference((4,5,6))|list == [1,2,3,4,5,6]'
+ - '(1,2,3)|symmetric_difference((3,4,5,6))|list == [1,2,4,5,6]'
+ - '["a","A","b"]|symmetric_difference(["B","c","C"]) == ["a","A","b","B","c","C"]'
+ - '["a","A","b"]|symmetric_difference(["b","B","c","C"]) == ["a","A","B","c","C"]'
- '["a","A","b"]|symmetric_difference(["b","A","a"]) == []'
- - '("a","A","b")|symmetric_difference(("B","c","C")) | sort(case_sensitive=True) == ["A","B","C","a","b","c"]'
- - '("a","A","b")|symmetric_difference(("b","B","c","C")) | sort(case_sensitive=True) == ["A","B","C","a","c"]'
+ - '("a","A","b")|symmetric_difference(("B","c","C"))|list|sort(case_sensitive=True) == ["A","B","C","a","b","c"]'
+ - '("a","A","b")|symmetric_difference(("b","B","c","C"))|list|sort(case_sensitive=True) == ["A","B","C","a","c"]'
- name: Verify union
tags: union
@@ -112,11 +112,11 @@
- '[1,2,3]|union([3,2,1]) == [1,2,3]'
- '(1,2,3)|union((4,5,6))|list == [1,2,3,4,5,6]'
- '(1,2,3)|union((3,4,5,6))|list == [1,2,3,4,5,6]'
- - '["a","A","b"]|union(["B","c","C"]) | sort(case_sensitive=True) == ["A","B","C","a","b","c"]'
- - '["a","A","b"]|union(["b","B","c","C"]) | sort(case_sensitive=True) == ["A","B","C","a","b","c"]'
- - '["a","A","b"]|union(["b","A","a"]) | sort(case_sensitive=True) == ["A","a","b"]'
- - '("a","A","b")|union(("B","c","C")) | sort(case_sensitive=True) == ["A","B","C","a","b","c"]'
- - '("a","A","b")|union(("b","B","c","C")) | sort(case_sensitive=True) == ["A","B","C","a","b","c"]'
+ - '["a","A","b"]|union(["B","c","C"]) == ["a","A","b","B","c","C"]'
+ - '["a","A","b"]|union(["b","B","c","C"]) == ["a","A","b","B","c","C"]'
+ - '["a","A","b"]|union(["b","A","a"]) == ["a","A","b"]'
+ - '("a","A","b")|union(("B","c","C"))|list|sort(case_sensitive=True) == ["A","B","C","a","b","c"]'
+ - '("a","A","b")|union(("b","B","c","C"))|list|sort(case_sensitive=True) == ["A","B","C","a","b","c"]'
- name: Verify min
tags: min
diff --git a/test/integration/targets/find/tasks/main.yml b/test/integration/targets/find/tasks/main.yml
index 9c4a960f..89c62b9b 100644
--- a/test/integration/targets/find/tasks/main.yml
+++ b/test/integration/targets/find/tasks/main.yml
@@ -374,6 +374,3 @@
- 'remote_tmp_dir_test ~ "/astest/old.txt" in astest_list'
- 'remote_tmp_dir_test ~ "/astest/.hidden.txt" in astest_list'
- '"checksum" in result.files[0]'
-
-- name: Run mode tests
- import_tasks: mode.yml
diff --git a/test/integration/targets/find/tasks/mode.yml b/test/integration/targets/find/tasks/mode.yml
deleted file mode 100644
index 1c900ea2..00000000
--- a/test/integration/targets/find/tasks/mode.yml
+++ /dev/null
@@ -1,68 +0,0 @@
-- name: create test files for mode matching
- file:
- path: '{{ remote_tmp_dir_test }}/mode_{{ item }}'
- state: touch
- mode: '{{ item }}'
- loop:
- - '0644'
- - '0444'
- - '0400'
- - '0700'
- - '0666'
-
-- name: exact mode octal
- find:
- path: '{{ remote_tmp_dir_test }}'
- pattern: 'mode_*'
- mode: '0644'
- exact_mode: true
- register: exact_mode_0644
-
-- name: exact mode symbolic
- find:
- path: '{{ remote_tmp_dir_test }}'
- pattern: 'mode_*'
- mode: 'u=rw,g=r,o=r'
- exact_mode: true
- register: exact_mode_0644_symbolic
-
-- name: find all user readable files octal
- find:
- path: '{{ remote_tmp_dir_test }}'
- pattern: 'mode_*'
- mode: '0400'
- exact_mode: false
- register: user_readable_octal
-
-- name: find all user readable files symbolic
- find:
- path: '{{ remote_tmp_dir_test }}'
- pattern: 'mode_*'
- mode: 'u=r'
- exact_mode: false
- register: user_readable_symbolic
-
-- name: all other readable files octal
- find:
- path: '{{ remote_tmp_dir_test }}'
- pattern: 'mode_*'
- mode: '0004'
- exact_mode: false
- register: other_readable_octal
-
-- name: all other readable files symbolic
- find:
- path: '{{ remote_tmp_dir_test }}'
- pattern: 'mode_*'
- mode: 'o=r'
- exact_mode: false
- register: other_readable_symbolic
-
-- assert:
- that:
- - exact_mode_0644.files == exact_mode_0644_symbolic.files
- - exact_mode_0644.files[0].path == remote_tmp_dir_test ~ '/mode_0644'
- - user_readable_octal.files == user_readable_symbolic.files
- - user_readable_octal.files|map(attribute='path')|map('basename')|sort == ['mode_0400', 'mode_0444', 'mode_0644', 'mode_0666', 'mode_0700']
- - other_readable_octal.files == other_readable_symbolic.files
- - other_readable_octal.files|map(attribute='path')|map('basename')|sort == ['mode_0444', 'mode_0644', 'mode_0666']
diff --git a/test/integration/targets/fork_safe_stdio/aliases b/test/integration/targets/fork_safe_stdio/aliases
index 7761837e..e968db72 100644
--- a/test/integration/targets/fork_safe_stdio/aliases
+++ b/test/integration/targets/fork_safe_stdio/aliases
@@ -1,3 +1,3 @@
shippable/posix/group3
context/controller
-needs/target/test_utils
+skip/macos
diff --git a/test/integration/targets/fork_safe_stdio/runme.sh b/test/integration/targets/fork_safe_stdio/runme.sh
index 863582f3..4438c3fe 100755
--- a/test/integration/targets/fork_safe_stdio/runme.sh
+++ b/test/integration/targets/fork_safe_stdio/runme.sh
@@ -7,7 +7,7 @@ echo "testing for stdio deadlock on forked workers (10s timeout)..."
# Enable a callback that trips deadlocks on forked-child stdout, time out after 10s; forces running
# in a pty, since that tends to be much slower than raw file I/O and thus more likely to trigger the deadlock.
# Redirect stdout to /dev/null since it's full of non-printable garbage we don't want to display unless it failed
-ANSIBLE_CALLBACKS_ENABLED=spewstdio SPEWSTDIO_ENABLED=1 python run-with-pty.py ../test_utils/scripts/timeout.py -- 10 ansible-playbook -i hosts -f 5 test.yml > stdout.txt && RC=$? || RC=$?
+ANSIBLE_CALLBACKS_ENABLED=spewstdio SPEWSTDIO_ENABLED=1 python run-with-pty.py timeout 10s ansible-playbook -i hosts -f 5 test.yml > stdout.txt && RC=$? || RC=$?
if [ $RC != 0 ]; then
echo "failed; likely stdout deadlock. dumping raw output (may be very large)"
diff --git a/test/integration/targets/gathering_facts/library/dummy1 b/test/integration/targets/gathering_facts/library/dummy1
deleted file mode 100755
index 5a10e2dd..00000000
--- a/test/integration/targets/gathering_facts/library/dummy1
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/sh
-
-CANARY="${OUTPUT_DIR}/canary.txt"
-
-echo "$0" >> "${CANARY}"
-LINES=0
-
-until test "${LINES}" -gt 2
-do
- LINES=`wc -l "${CANARY}" |awk '{print $1}'`
- sleep 1
-done
-
-echo '{
- "changed": false,
- "ansible_facts": {
- "dummy": "$0"
- }
-}'
diff --git a/test/integration/targets/gathering_facts/library/dummy2 b/test/integration/targets/gathering_facts/library/dummy2
deleted file mode 100755
index 5a10e2dd..00000000
--- a/test/integration/targets/gathering_facts/library/dummy2
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/sh
-
-CANARY="${OUTPUT_DIR}/canary.txt"
-
-echo "$0" >> "${CANARY}"
-LINES=0
-
-until test "${LINES}" -gt 2
-do
- LINES=`wc -l "${CANARY}" |awk '{print $1}'`
- sleep 1
-done
-
-echo '{
- "changed": false,
- "ansible_facts": {
- "dummy": "$0"
- }
-}'
diff --git a/test/integration/targets/gathering_facts/library/dummy3 b/test/integration/targets/gathering_facts/library/dummy3
deleted file mode 100755
index 5a10e2dd..00000000
--- a/test/integration/targets/gathering_facts/library/dummy3
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/sh
-
-CANARY="${OUTPUT_DIR}/canary.txt"
-
-echo "$0" >> "${CANARY}"
-LINES=0
-
-until test "${LINES}" -gt 2
-do
- LINES=`wc -l "${CANARY}" |awk '{print $1}'`
- sleep 1
-done
-
-echo '{
- "changed": false,
- "ansible_facts": {
- "dummy": "$0"
- }
-}'
diff --git a/test/integration/targets/gathering_facts/library/file_utils.py b/test/integration/targets/gathering_facts/library/file_utils.py
index 38fa9265..58538029 100644
--- a/test/integration/targets/gathering_facts/library/file_utils.py
+++ b/test/integration/targets/gathering_facts/library/file_utils.py
@@ -1,6 +1,9 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import json
+import sys
+
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.facts.utils import (
get_file_content,
diff --git a/test/integration/targets/gathering_facts/library/slow b/test/integration/targets/gathering_facts/library/slow
deleted file mode 100644
index 3984662e..00000000
--- a/test/integration/targets/gathering_facts/library/slow
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/sh
-
-sleep 10
-
-echo '{
- "changed": false,
- "ansible_facts": {
- "factsone": "from slow module",
- "common_fact": "also from slow module",
- "common_dict_fact": {
- "key_one": "from slow ",
- "key_two": "from slow "
- },
- "common_list_fact": [
- "never",
- "does",
- "see"
- ],
- "common_list_fact2": [
- "see",
- "does",
- "never",
- "theee"
- ]
- }
-}'
diff --git a/test/integration/targets/gathering_facts/runme.sh b/test/integration/targets/gathering_facts/runme.sh
index a90de0f0..c1df560c 100755
--- a/test/integration/targets/gathering_facts/runme.sh
+++ b/test/integration/targets/gathering_facts/runme.sh
@@ -25,17 +25,3 @@ ansible-playbook test_module_defaults.yml "$@" --tags default_fact_module
ANSIBLE_FACTS_MODULES='ansible.legacy.setup' ansible-playbook test_module_defaults.yml "$@" --tags custom_fact_module
ansible-playbook test_module_defaults.yml "$@" --tags networking
-
-# test it works by default
-ANSIBLE_FACTS_MODULES='ansible.legacy.slow' ansible -m gather_facts localhost --playbook-dir ./ "$@"
-
-# test that gather_facts will timeout parallel modules that dont support gather_timeout when using gather_Timeout
-ANSIBLE_FACTS_MODULES='ansible.legacy.slow' ansible -m gather_facts localhost --playbook-dir ./ -a 'gather_timeout=1 parallel=true' "$@" 2>&1 |grep 'Timeout exceeded'
-
-# test that gather_facts parallel w/o timing out
-ANSIBLE_FACTS_MODULES='ansible.legacy.slow' ansible -m gather_facts localhost --playbook-dir ./ -a 'gather_timeout=30 parallel=true' "$@" 2>&1 |grep -v 'Timeout exceeded'
-
-
-# test parallelism
-ANSIBLE_FACTS_MODULES='dummy1,dummy2,dummy3' ansible -m gather_facts localhost --playbook-dir ./ -a 'gather_timeout=30 parallel=true' "$@" 2>&1
-rm "${OUTPUT_DIR}/canary.txt"
diff --git a/test/integration/targets/get_url/tasks/hashlib.yml b/test/integration/targets/get_url/tasks/hashlib.yml
deleted file mode 100644
index cc50ad72..00000000
--- a/test/integration/targets/get_url/tasks/hashlib.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-- name: "Set hash algorithms to test"
- set_fact:
- algorithms:
- sha256: b1b6ce5073c8fac263a8fc5edfffdbd5dec1980c784e09c5bc69f8fb6056f006
- sha384: 298553d31087fd3f6659801d2e5cde3ff63fad609dc50ad8e194dde80bfb8a084edfa761f025928448f39d720fce55f2
- sha512: 69b589f7775fe04244e8a9db216a3c91db1680baa33ccd0c317b8d7f0334433f7362d00c8080b3365bf08d532956ba01dbebc497b51ced8f8b05a44a66b854bf
- sha3_256: 64e5ea73a2f799f35abd0b1242df5e70c84248c9883f89343d4cd5f6d493a139
- sha3_384: 976edebcb496ad8be0f7fa4411cc8e2404e7e65f1088fabf7be44484458726c61d4985bdaeff8700008ed1670a9b982d
- sha3_512: f8cca1d98e750e2c2ab44954dc9f1b6e8e35ace71ffcc1cd21c7770eb8eccfbd77d40b2d7d145120efbbb781599294ccc6148c6cda1aa66146363e5fdddd2336
-
-- name: "Verify various checksum algorithms work"
- get_url:
- url: 'http://localhost:{{ http_port }}/27617.txt' # content is 'ptux'
- dest: '{{ remote_tmp_dir }}/27617.{{ algorithm }}.txt'
- checksum: "{{ algorithm }}:{{ algorithms[algorithm] }}"
- force: yes
- loop: "{{ algorithms.keys() }}"
- loop_control:
- loop_var: algorithm
- when: ansible_python_version.startswith('3.') or not algorithm.startswith('sha3_')
diff --git a/test/integration/targets/get_url/tasks/main.yml b/test/integration/targets/get_url/tasks/main.yml
index c26cc08b..09814c70 100644
--- a/test/integration/targets/get_url/tasks/main.yml
+++ b/test/integration/targets/get_url/tasks/main.yml
@@ -398,8 +398,6 @@
port: '{{ http_port }}'
state: started
-- include_tasks: hashlib.yml
-
- name: download src with sha1 checksum url in check mode
get_url:
url: 'http://localhost:{{ http_port }}/27617.txt'
diff --git a/test/integration/targets/get_url/tasks/use_netrc.yml b/test/integration/targets/get_url/tasks/use_netrc.yml
index 234c904a..e1852a81 100644
--- a/test/integration/targets/get_url/tasks/use_netrc.yml
+++ b/test/integration/targets/get_url/tasks/use_netrc.yml
@@ -22,7 +22,7 @@
register: response_failed
- name: Parse token from msg.txt
- set_fact:
+ set_fact:
token: "{{ (response_failed['content'] | b64decode | from_json).token }}"
- name: assert Test Bearer authorization is failed with netrc
@@ -48,7 +48,7 @@
register: response
- name: Parse token from msg.txt
- set_fact:
+ set_fact:
token: "{{ (response['content'] | b64decode | from_json).token }}"
- name: assert Test Bearer authorization is successfull with use_netrc=False
@@ -64,4 +64,4 @@
state: absent
with_items:
- "{{ remote_tmp_dir }}/netrc"
- - "{{ remote_tmp_dir }}/msg.txt"
+ - "{{ remote_tmp_dir }}/msg.txt" \ No newline at end of file
diff --git a/test/integration/targets/git/tasks/depth.yml b/test/integration/targets/git/tasks/depth.yml
index 20f1b4e9..e0585ca3 100644
--- a/test/integration/targets/git/tasks/depth.yml
+++ b/test/integration/targets/git/tasks/depth.yml
@@ -95,16 +95,14 @@
repo: 'file://{{ repo_dir|expanduser }}/shallow'
dest: '{{ checkout_dir }}'
depth: 1
- version: >-
- {{ git_default_branch }}
+ version: master
- name: DEPTH | run a second time (now fetch, not clone)
git:
repo: 'file://{{ repo_dir|expanduser }}/shallow'
dest: '{{ checkout_dir }}'
depth: 1
- version: >-
- {{ git_default_branch }}
+ version: master
register: git_fetch
- name: DEPTH | ensure the fetch succeeded
@@ -122,8 +120,7 @@
repo: 'file://{{ repo_dir|expanduser }}/shallow'
dest: '{{ checkout_dir }}'
depth: 1
- version: >-
- {{ git_default_branch }}
+ version: master
- name: DEPTH | switch to older branch with depth=1 (uses fetch)
git:
diff --git a/test/integration/targets/git/tasks/forcefully-fetch-tag.yml b/test/integration/targets/git/tasks/forcefully-fetch-tag.yml
index db35e048..47c37478 100644
--- a/test/integration/targets/git/tasks/forcefully-fetch-tag.yml
+++ b/test/integration/targets/git/tasks/forcefully-fetch-tag.yml
@@ -11,7 +11,7 @@
git add leet;
git commit -m uh-oh;
git tag -f herewego;
- git push --tags origin '{{ git_default_branch }}'
+ git push --tags origin master
args:
chdir: "{{ repo_dir }}/tag_force_push_clone1"
@@ -26,7 +26,7 @@
git add leet;
git commit -m uh-oh;
git tag -f herewego;
- git push -f --tags origin '{{ git_default_branch }}'
+ git push -f --tags origin master
args:
chdir: "{{ repo_dir }}/tag_force_push_clone1"
diff --git a/test/integration/targets/git/tasks/gpg-verification.yml b/test/integration/targets/git/tasks/gpg-verification.yml
index bd57ed1d..8c8834a9 100644
--- a/test/integration/targets/git/tasks/gpg-verification.yml
+++ b/test/integration/targets/git/tasks/gpg-verification.yml
@@ -37,10 +37,8 @@
environment:
- GNUPGHOME: "{{ git_gpg_gpghome }}"
shell: |
- set -eEu
-
+ set -e
git init
-
touch an_empty_file
git add an_empty_file
git commit --no-gpg-sign --message "Commit, and don't sign"
@@ -50,11 +48,11 @@
git tag --annotate --message "This is not a signed tag" unsigned_annotated_tag HEAD
git commit --allow-empty --gpg-sign --message "Commit, and sign"
git tag --sign --message "This is a signed tag" signed_annotated_tag HEAD
- git checkout -b some_branch/signed_tip '{{ git_default_branch }}'
+ git checkout -b some_branch/signed_tip master
git commit --allow-empty --gpg-sign --message "Commit, and sign"
- git checkout -b another_branch/unsigned_tip '{{ git_default_branch }}'
+ git checkout -b another_branch/unsigned_tip master
git commit --allow-empty --no-gpg-sign --message "Commit, and don't sign"
- git checkout '{{ git_default_branch }}'
+ git checkout master
args:
chdir: "{{ git_gpg_source }}"
diff --git a/test/integration/targets/git/tasks/localmods.yml b/test/integration/targets/git/tasks/localmods.yml
index 409bbae2..0e0cf684 100644
--- a/test/integration/targets/git/tasks/localmods.yml
+++ b/test/integration/targets/git/tasks/localmods.yml
@@ -1,17 +1,6 @@
# test for https://github.com/ansible/ansible-modules-core/pull/5505
- name: LOCALMODS | prepare old git repo
- shell: |
- set -eEu
-
- rm -rf localmods
- mkdir localmods
- cd localmods
-
- git init
-
- echo "1" > a
- git add a
- git commit -m "1"
+ shell: rm -rf localmods; mkdir localmods; cd localmods; git init; echo "1" > a; git add a; git commit -m "1"
args:
chdir: "{{repo_dir}}"
@@ -66,18 +55,7 @@
# localmods and shallow clone
- name: LOCALMODS | prepare old git repo
- shell: |
- set -eEu
-
- rm -rf localmods
- mkdir localmods
- cd localmods
-
- git init
-
- echo "1" > a
- git add a
- git commit -m "1"
+ shell: rm -rf localmods; mkdir localmods; cd localmods; git init; echo "1" > a; git add a; git commit -m "1"
args:
chdir: "{{repo_dir}}"
diff --git a/test/integration/targets/git/tasks/main.yml b/test/integration/targets/git/tasks/main.yml
index c990251f..ed06eab5 100644
--- a/test/integration/targets/git/tasks/main.yml
+++ b/test/integration/targets/git/tasks/main.yml
@@ -16,37 +16,27 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-# NOTE: Moving `$HOME` to tmp dir allows this integration test be
-# NOTE: non-destructive. There is no other way to instruct Git use a custom
-# NOTE: config path. There are new `$GIT_CONFIG_KEY_{COUNT,KEY,VALUE}` vars
-# NOTE: for setting specific configuration values but those are only available
-# NOTE: since Git v2.31 which is why we cannot rely on them yet.
+- import_tasks: setup.yml
+- import_tasks: setup-local-repos.yml
-- block:
- - import_tasks: setup.yml
- - import_tasks: setup-local-repos.yml
-
- - import_tasks: formats.yml
- - import_tasks: missing_hostkey.yml
- - import_tasks: missing_hostkey_acceptnew.yml
- - import_tasks: no-destination.yml
- - import_tasks: specific-revision.yml
- - import_tasks: submodules.yml
- - import_tasks: change-repo-url.yml
- - import_tasks: depth.yml
- - import_tasks: single-branch.yml
- - import_tasks: checkout-new-tag.yml
- - include_tasks: gpg-verification.yml
- when:
+- import_tasks: formats.yml
+- import_tasks: missing_hostkey.yml
+- import_tasks: missing_hostkey_acceptnew.yml
+- import_tasks: no-destination.yml
+- import_tasks: specific-revision.yml
+- import_tasks: submodules.yml
+- import_tasks: change-repo-url.yml
+- import_tasks: depth.yml
+- import_tasks: single-branch.yml
+- import_tasks: checkout-new-tag.yml
+- include_tasks: gpg-verification.yml
+ when:
- not gpg_version.stderr
- gpg_version.stdout
- not (ansible_os_family == 'RedHat' and ansible_distribution_major_version is version('7', '<'))
- - import_tasks: localmods.yml
- - import_tasks: reset-origin.yml
- - import_tasks: ambiguous-ref.yml
- - import_tasks: archive.yml
- - import_tasks: separate-git-dir.yml
- - import_tasks: forcefully-fetch-tag.yml
- environment:
- HOME: >-
- {{ remote_tmp_dir }}
+- import_tasks: localmods.yml
+- import_tasks: reset-origin.yml
+- import_tasks: ambiguous-ref.yml
+- import_tasks: archive.yml
+- import_tasks: separate-git-dir.yml
+- import_tasks: forcefully-fetch-tag.yml
diff --git a/test/integration/targets/git/tasks/missing_hostkey.yml b/test/integration/targets/git/tasks/missing_hostkey.yml
index d8a2a818..136c5d5d 100644
--- a/test/integration/targets/git/tasks/missing_hostkey.yml
+++ b/test/integration/targets/git/tasks/missing_hostkey.yml
@@ -35,8 +35,7 @@
git:
repo: '{{ repo_format3 }}'
dest: '{{ checkout_dir }}'
- version: >-
- {{ git_default_branch }}
+ version: 'master'
accept_hostkey: false # should already have been accepted
key_file: '{{ github_ssh_private_key }}'
ssh_opts: '-o UserKnownHostsFile={{ remote_tmp_dir }}/known_hosts'
diff --git a/test/integration/targets/git/tasks/missing_hostkey_acceptnew.yml b/test/integration/targets/git/tasks/missing_hostkey_acceptnew.yml
index 338ae081..3fd19067 100644
--- a/test/integration/targets/git/tasks/missing_hostkey_acceptnew.yml
+++ b/test/integration/targets/git/tasks/missing_hostkey_acceptnew.yml
@@ -55,8 +55,7 @@
git:
repo: '{{ repo_format3 }}'
dest: '{{ checkout_dir }}'
- version: >-
- {{ git_default_branch }}
+ version: 'master'
accept_newhostkey: false # should already have been accepted
key_file: '{{ github_ssh_private_key }}'
ssh_opts: '-o UserKnownHostsFile={{ remote_tmp_dir }}/known_hosts'
diff --git a/test/integration/targets/git/tasks/reset-origin.yml b/test/integration/targets/git/tasks/reset-origin.yml
index cb497c44..8fddd4b1 100644
--- a/test/integration/targets/git/tasks/reset-origin.yml
+++ b/test/integration/targets/git/tasks/reset-origin.yml
@@ -12,14 +12,7 @@
state: directory
- name: RESET-ORIGIN | Initialise the repo with a file named origin,see github.com/ansible/ansible/pull/22502
- shell: |
- set -eEu
-
- git init
-
- echo "PR 22502" > origin
- git add origin
- git commit -m "PR 22502"
+ shell: git init; echo "PR 22502" > origin; git add origin; git commit -m "PR 22502"
args:
chdir: "{{ repo_dir }}/origin"
diff --git a/test/integration/targets/git/tasks/setup-local-repos.yml b/test/integration/targets/git/tasks/setup-local-repos.yml
index 4626f102..584a1693 100644
--- a/test/integration/targets/git/tasks/setup-local-repos.yml
+++ b/test/integration/targets/git/tasks/setup-local-repos.yml
@@ -9,32 +9,15 @@
- "{{ repo_dir }}/tag_force_push"
- name: SETUP-LOCAL-REPOS | prepare minimal git repo
- shell: |
- set -eEu
-
- git init
-
- echo "1" > a
- git add a
- git commit -m "1"
+ shell: git init; echo "1" > a; git add a; git commit -m "1"
args:
chdir: "{{ repo_dir }}/minimal"
- name: SETUP-LOCAL-REPOS | prepare git repo for shallow clone
shell: |
- set -eEu
-
- git init
-
- echo "1" > a
- git add a
- git commit -m "1"
- git tag earlytag
- git branch earlybranch
-
- echo "2" > a
- git add a
- git commit -m "2"
+ git init;
+ echo "1" > a; git add a; git commit -m "1"; git tag earlytag; git branch earlybranch;
+ echo "2" > a; git add a; git commit -m "2";
args:
chdir: "{{ repo_dir }}/shallow"
@@ -46,10 +29,7 @@
- name: SETUP-LOCAL-REPOS | prepare tmp git repo with two branches
shell: |
- set -eEu
-
git init
-
echo "1" > a; git add a; git commit -m "1"
git checkout -b test_branch; echo "2" > a; git commit -m "2 on branch" a
git checkout -b new_branch; echo "3" > a; git commit -m "3 on new branch" a
@@ -60,9 +40,6 @@
# We make the repo here for consistency with the other repos,
# but we finish setting it up in forcefully-fetch-tag.yml.
- name: SETUP-LOCAL-REPOS | prepare tag_force_push git repo
- shell: |
- set -eEu
-
- git init --bare
+ shell: git init --bare
args:
chdir: "{{ repo_dir }}/tag_force_push"
diff --git a/test/integration/targets/git/tasks/setup.yml b/test/integration/targets/git/tasks/setup.yml
index 982c03ff..06511053 100644
--- a/test/integration/targets/git/tasks/setup.yml
+++ b/test/integration/targets/git/tasks/setup.yml
@@ -28,44 +28,10 @@
register: gpg_version
- name: SETUP | set git global user.email if not already set
- shell: git config --global user.email 'noreply@example.com'
+ shell: git config --global user.email || git config --global user.email "noreply@example.com"
- name: SETUP | set git global user.name if not already set
- shell: git config --global user.name 'Ansible Test Runner'
-
-- name: SETUP | set git global init.defaultBranch
- shell: >-
- git config --global init.defaultBranch '{{ git_default_branch }}'
-
-- name: SETUP | set git global init.templateDir
- # NOTE: This custom Git repository template emulates the `init.defaultBranch`
- # NOTE: setting on Git versions below 2.28.
- # NOTE: Ref: https://superuser.com/a/1559582.
- # NOTE: Other workarounds mentioned there, like invoking
- # NOTE: `git symbolic-ref HEAD refs/heads/main` after each `git init` turned
- # NOTE: out to have mysterious side effects that break the tests in surprising
- # NOTE: ways.
- shell: |
- set -eEu
-
- git config --global \
- init.templateDir '{{ remote_tmp_dir }}/git-templates/git.git'
-
- mkdir -pv '{{ remote_tmp_dir }}/git-templates'
- set +e
- GIT_TEMPLATES_DIR=$(\
- 2>/dev/null \
- ls -1d \
- '/Library/Developer/CommandLineTools/usr/share/git-core/templates' \
- '/usr/local/share/git-core/templates' \
- '/usr/share/git-core/templates' \
- )
- set -e
- >&2 echo "Found Git's default templates directory: ${GIT_TEMPLATES_DIR}"
- cp -r "${GIT_TEMPLATES_DIR}" '{{ remote_tmp_dir }}/git-templates/git.git'
-
- echo 'ref: refs/heads/{{ git_default_branch }}' \
- > '{{ remote_tmp_dir }}/git-templates/git.git/HEAD'
+ shell: git config --global user.name || git config --global user.name "Ansible Test Runner"
- name: SETUP | create repo_dir
file:
diff --git a/test/integration/targets/git/tasks/single-branch.yml b/test/integration/targets/git/tasks/single-branch.yml
index ca8457ac..5cfb4d5b 100644
--- a/test/integration/targets/git/tasks/single-branch.yml
+++ b/test/integration/targets/git/tasks/single-branch.yml
@@ -52,8 +52,7 @@
repo: 'file://{{ repo_dir|expanduser }}/shallow_branches'
dest: '{{ checkout_dir }}'
single_branch: yes
- version: >-
- {{ git_default_branch }}
+ version: master
register: single_branch_3
- name: SINGLE_BRANCH | Clone example git repo using single_branch with version again
@@ -61,8 +60,7 @@
repo: 'file://{{ repo_dir|expanduser }}/shallow_branches'
dest: '{{ checkout_dir }}'
single_branch: yes
- version: >-
- {{ git_default_branch }}
+ version: master
register: single_branch_4
- name: SINGLE_BRANCH | List revisions
diff --git a/test/integration/targets/git/tasks/specific-revision.yml b/test/integration/targets/git/tasks/specific-revision.yml
index f1fe41d5..26fa7cf3 100644
--- a/test/integration/targets/git/tasks/specific-revision.yml
+++ b/test/integration/targets/git/tasks/specific-revision.yml
@@ -162,14 +162,7 @@
path: "{{ checkout_dir }}"
- name: SPECIFIC-REVISION | prepare origina repo
- shell: |
- set -eEu
-
- git init
-
- echo "1" > a
- git add a
- git commit -m "1"
+ shell: git init; echo "1" > a; git add a; git commit -m "1"
args:
chdir: "{{ checkout_dir }}"
@@ -198,14 +191,7 @@
force: yes
- name: SPECIFIC-REVISION | create new commit in original
- shell: |
- set -eEu
-
- git init
-
- echo "2" > b
- git add b
- git commit -m "2"
+ shell: git init; echo "2" > b; git add b; git commit -m "2"
args:
chdir: "{{ checkout_dir }}"
diff --git a/test/integration/targets/git/vars/main.yml b/test/integration/targets/git/vars/main.yml
index 55c7c438..b38531f3 100644
--- a/test/integration/targets/git/vars/main.yml
+++ b/test/integration/targets/git/vars/main.yml
@@ -41,7 +41,6 @@ repo_update_url_2: 'https://github.com/ansible-test-robinro/git-test-new'
known_host_files:
- "{{ lookup('env','HOME') }}/.ssh/known_hosts"
- '/etc/ssh/ssh_known_hosts'
-git_default_branch: main
git_version_supporting_depth: 1.9.1
git_version_supporting_ls_remote: 1.7.5
git_version_supporting_single_branch: 1.7.10
diff --git a/test/integration/targets/group/files/get_free_gid.py b/test/integration/targets/group/files/get_free_gid.py
deleted file mode 100644
index 4c07b5e3..00000000
--- a/test/integration/targets/group/files/get_free_gid.py
+++ /dev/null
@@ -1,23 +0,0 @@
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import grp
-
-
-def main():
- gids = [g.gr_gid for g in grp.getgrall()]
-
- # Start the gid numbering with 1
- # FreeBSD doesn't support the usage of gid 0, it doesn't fail (rc=0) but instead a number in the normal
- # range is picked.
- i = 1
- while True:
- if i not in gids:
- print(i)
- break
- i += 1
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/integration/targets/group/files/get_gid_for_group.py b/test/integration/targets/group/files/get_gid_for_group.py
deleted file mode 100644
index 5a8cc41f..00000000
--- a/test/integration/targets/group/files/get_gid_for_group.py
+++ /dev/null
@@ -1,18 +0,0 @@
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import grp
-import sys
-
-
-def main():
- group_name = None
- if len(sys.argv) >= 2:
- group_name = sys.argv[1]
-
- print(grp.getgrnam(group_name).gr_gid)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/integration/targets/group/files/gidget.py b/test/integration/targets/group/files/gidget.py
new file mode 100644
index 00000000..4b771516
--- /dev/null
+++ b/test/integration/targets/group/files/gidget.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import grp
+
+gids = [g.gr_gid for g in grp.getgrall()]
+
+i = 0
+while True:
+ if i not in gids:
+ print(i)
+ break
+ i += 1
diff --git a/test/integration/targets/group/tasks/main.yml b/test/integration/targets/group/tasks/main.yml
index 21235240..eb8126dd 100644
--- a/test/integration/targets/group/tasks/main.yml
+++ b/test/integration/targets/group/tasks/main.yml
@@ -16,4 +16,25 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-- import_tasks: tests.yml
+- name: ensure test groups are deleted before the test
+ group:
+ name: '{{ item }}'
+ state: absent
+ loop:
+ - ansibullgroup
+ - ansibullgroup2
+ - ansibullgroup3
+
+- block:
+ - name: run tests
+ include_tasks: tests.yml
+
+ always:
+ - name: remove test groups after test
+ group:
+ name: '{{ item }}'
+ state: absent
+ loop:
+ - ansibullgroup
+ - ansibullgroup2
+ - ansibullgroup3 \ No newline at end of file
diff --git a/test/integration/targets/group/tasks/tests.yml b/test/integration/targets/group/tasks/tests.yml
index eb92cd1d..f9a81220 100644
--- a/test/integration/targets/group/tasks/tests.yml
+++ b/test/integration/targets/group/tasks/tests.yml
@@ -1,412 +1,343 @@
---
-- name: ensure test groups are deleted before the test
+##
+## group add
+##
+
+- name: create group (check mode)
group:
- name: '{{ item }}'
- state: absent
- loop:
- - ansibullgroup
- - ansibullgroup2
- - ansibullgroup3
+ name: ansibullgroup
+ state: present
+ register: create_group_check
+ check_mode: True
+
+- name: get result of create group (check mode)
+ script: 'grouplist.sh "{{ ansible_distribution }}"'
+ register: create_group_actual_check
+
+- name: assert create group (check mode)
+ assert:
+ that:
+ - create_group_check is changed
+ - '"ansibullgroup" not in create_group_actual_check.stdout_lines'
+
+- name: create group
+ group:
+ name: ansibullgroup
+ state: present
+ register: create_group
+
+- name: get result of create group
+ script: 'grouplist.sh "{{ ansible_distribution }}"'
+ register: create_group_actual
+
+- name: assert create group
+ assert:
+ that:
+ - create_group is changed
+ - create_group.gid is defined
+ - '"ansibullgroup" in create_group_actual.stdout_lines'
+
+- name: create group (idempotent)
+ group:
+ name: ansibullgroup
+ state: present
+ register: create_group_again
-- block:
- ##
- ## group add
- ##
+- name: assert create group (idempotent)
+ assert:
+ that:
+ - not create_group_again is changed
- - name: create group (check mode)
- group:
- name: ansibullgroup
- state: present
- register: create_group_check
- check_mode: true
+##
+## group check
+##
- - name: get result of create group (check mode)
- script: 'grouplist.sh "{{ ansible_distribution }}"'
- register: create_group_actual_check
+- name: run existing group check tests
+ group:
+ name: "{{ create_group_actual.stdout_lines|random }}"
+ state: present
+ with_sequence: start=1 end=5
+ register: group_test1
+
+- name: validate results for testcase 1
+ assert:
+ that:
+ - group_test1.results is defined
+ - group_test1.results|length == 5
+
+- name: validate change results for testcase 1
+ assert:
+ that:
+ - not group_test1 is changed
+
+##
+## group add with gid
+##
+
+- name: get the next available gid
+ script: gidget.py
+ args:
+ executable: '{{ ansible_python_interpreter }}'
+ register: gid
+
+- name: create a group with a gid (check mode)
+ group:
+ name: ansibullgroup2
+ gid: '{{ gid.stdout_lines[0] }}'
+ state: present
+ register: create_group_gid_check
+ check_mode: True
+
+- name: get result of create a group with a gid (check mode)
+ script: 'grouplist.sh "{{ ansible_distribution }}"'
+ register: create_group_gid_actual_check
+
+- name: assert create group with a gid (check mode)
+ assert:
+ that:
+ - create_group_gid_check is changed
+ - '"ansibullgroup2" not in create_group_gid_actual_check.stdout_lines'
+
+- name: create a group with a gid
+ group:
+ name: ansibullgroup2
+ gid: '{{ gid.stdout_lines[0] }}'
+ state: present
+ register: create_group_gid
+
+- name: get gid of created group
+ command: "{{ ansible_python_interpreter | quote }} -c \"import grp; print(grp.getgrnam('ansibullgroup2').gr_gid)\""
+ register: create_group_gid_actual
+
+- name: assert create group with a gid
+ assert:
+ that:
+ - create_group_gid is changed
+ - create_group_gid.gid | int == gid.stdout_lines[0] | int
+ - create_group_gid_actual.stdout | trim | int == gid.stdout_lines[0] | int
+
+- name: create a group with a gid (idempotent)
+ group:
+ name: ansibullgroup2
+ gid: '{{ gid.stdout_lines[0] }}'
+ state: present
+ register: create_group_gid_again
- - name: assert create group (check mode)
- assert:
- that:
- - create_group_check is changed
- - '"ansibullgroup" not in create_group_actual_check.stdout_lines'
+- name: assert create group with a gid (idempotent)
+ assert:
+ that:
+ - not create_group_gid_again is changed
+ - create_group_gid_again.gid | int == gid.stdout_lines[0] | int
- - name: create group
+- block:
+ - name: create a group with a non-unique gid
group:
- name: ansibullgroup
+ name: ansibullgroup3
+ gid: '{{ gid.stdout_lines[0] }}'
+ non_unique: true
state: present
- register: create_group
+ register: create_group_gid_non_unique
- - name: get result of create group
- script: 'grouplist.sh "{{ ansible_distribution }}"'
- register: create_group_actual
-
- - name: assert create group
- assert:
- that:
- - create_group is changed
- - create_group.gid is defined
- - '"ansibullgroup" in create_group_actual.stdout_lines'
-
- - name: create group (idempotent)
+ - name: validate gid required with non_unique
group:
- name: ansibullgroup
- state: present
- register: create_group_again
+ name: foo
+ non_unique: true
+ register: missing_gid
+ ignore_errors: true
- - name: assert create group (idempotent)
+ - name: assert create group with a non unique gid
assert:
that:
- - not create_group_again is changed
+ - create_group_gid_non_unique is changed
+ - create_group_gid_non_unique.gid | int == gid.stdout_lines[0] | int
+ - missing_gid is failed
+ when: ansible_facts.distribution not in ['MacOSX', 'Alpine']
- ##
- ## group check
- ##
+##
+## group remove
+##
- - name: run existing group check tests
- group:
- name: "{{ create_group_actual.stdout_lines|random }}"
- state: present
- with_sequence: start=1 end=5
- register: group_test1
-
- - name: validate results for testcase 1
- assert:
- that:
- - group_test1.results is defined
- - group_test1.results|length == 5
-
- - name: validate change results for testcase 1
- assert:
- that:
- - not group_test1 is changed
+- name: delete group (check mode)
+ group:
+ name: ansibullgroup
+ state: absent
+ register: delete_group_check
+ check_mode: True
- ##
- ## group add with gid
- ##
+- name: get result of delete group (check mode)
+ script: grouplist.sh "{{ ansible_distribution }}"
+ register: delete_group_actual_check
- - name: get the next available gid
- script: get_free_gid.py
- args:
- executable: '{{ ansible_python_interpreter }}'
- register: gid
+- name: assert delete group (check mode)
+ assert:
+ that:
+ - delete_group_check is changed
+ - '"ansibullgroup" in delete_group_actual_check.stdout_lines'
- - name: create a group with a gid (check mode)
- group:
- name: ansibullgroup2
- gid: '{{ gid.stdout_lines[0] }}'
- state: present
- register: create_group_gid_check
- check_mode: true
+- name: delete group
+ group:
+ name: ansibullgroup
+ state: absent
+ register: delete_group
- - name: get result of create a group with a gid (check mode)
- script: 'grouplist.sh "{{ ansible_distribution }}"'
- register: create_group_gid_actual_check
+- name: get result of delete group
+ script: grouplist.sh "{{ ansible_distribution }}"
+ register: delete_group_actual
- - name: assert create group with a gid (check mode)
- assert:
- that:
- - create_group_gid_check is changed
- - '"ansibullgroup2" not in create_group_gid_actual_check.stdout_lines'
+- name: assert delete group
+ assert:
+ that:
+ - delete_group is changed
+ - '"ansibullgroup" not in delete_group_actual.stdout_lines'
- - name: create a group with a gid
+- name: delete group (idempotent)
+ group:
+ name: ansibullgroup
+ state: absent
+ register: delete_group_again
+
+- name: assert delete group (idempotent)
+ assert:
+ that:
+ - not delete_group_again is changed
+
+- name: Ensure lgroupadd is present
+ action: "{{ ansible_facts.pkg_mgr }}"
+ args:
+ name: libuser
+ state: present
+ when: ansible_facts.system in ['Linux'] and ansible_distribution != 'Alpine' and ansible_os_family != 'Suse'
+ tags:
+ - user_test_local_mode
+
+- name: Ensure lgroupadd is present - Alpine
+ command: apk add -U libuser
+ when: ansible_distribution == 'Alpine'
+ tags:
+ - user_test_local_mode
+
+# https://github.com/ansible/ansible/issues/56481
+- block:
+ - name: Test duplicate GID with local=yes
+ group:
+ name: "{{ item }}"
+ gid: 1337
+ local: yes
+ loop:
+ - group1_local_test
+ - group2_local_test
+ ignore_errors: yes
+ register: local_duplicate_gid_result
+
+ - assert:
+ that:
+ - local_duplicate_gid_result['results'][0] is success
+ - local_duplicate_gid_result['results'][1]['msg'] == "GID '1337' already exists with group 'group1_local_test'"
+ always:
+ - name: Cleanup
group:
- name: ansibullgroup2
- gid: '{{ gid.stdout_lines[0] }}'
- state: present
- register: create_group_gid
-
- - name: get gid of created group
- script: "get_gid_for_group.py ansibullgroup2"
- args:
- executable: '{{ ansible_python_interpreter }}'
- register: create_group_gid_actual
+ name: group1_local_test
+ state: absent
+ # only applicable to Linux, limit further to CentOS where 'luseradd' is installed
+ when: ansible_distribution == 'CentOS'
- - name: assert create group with a gid
- assert:
+# https://github.com/ansible/ansible/pull/59769
+- block:
+ - name: create a local group with a gid
+ group:
+ name: group1_local_test
+ gid: 1337
+ local: yes
+ state: present
+ register: create_local_group_gid
+
+ - name: get gid of created local group
+ command: "{{ ansible_python_interpreter | quote }} -c \"import grp; print(grp.getgrnam('group1_local_test').gr_gid)\""
+ register: create_local_group_gid_actual
+
+ - name: assert create local group with a gid
+ assert:
that:
- - create_group_gid is changed
- - create_group_gid.gid | int == gid.stdout_lines[0] | int
- - create_group_gid_actual.stdout | trim | int == gid.stdout_lines[0] | int
-
- - name: create a group with a gid (idempotent)
- group:
- name: ansibullgroup2
- gid: '{{ gid.stdout_lines[0] }}'
- state: present
- register: create_group_gid_again
-
- - name: assert create group with a gid (idempotent)
- assert:
+ - create_local_group_gid is changed
+ - create_local_group_gid.gid | int == 1337 | int
+ - create_local_group_gid_actual.stdout | trim | int == 1337 | int
+
+ - name: create a local group with a gid (idempotent)
+ group:
+ name: group1_local_test
+ gid: 1337
+ state: present
+ register: create_local_group_gid_again
+
+ - name: assert create local group with a gid (idempotent)
+ assert:
that:
- - not create_group_gid_again is changed
- - create_group_gid_again.gid | int == gid.stdout_lines[0] | int
-
- - block:
- - name: create a group with a non-unique gid
- group:
- name: ansibullgroup3
- gid: '{{ gid.stdout_lines[0] }}'
- non_unique: true
- state: present
- register: create_group_gid_non_unique
-
- - name: validate gid required with non_unique
- group:
- name: foo
- non_unique: true
- register: missing_gid
- ignore_errors: true
-
- - name: assert create group with a non unique gid
- assert:
- that:
- - create_group_gid_non_unique is changed
- - create_group_gid_non_unique.gid | int == gid.stdout_lines[0] | int
- - missing_gid is failed
- when: ansible_facts.distribution not in ['MacOSX', 'Alpine']
-
- ##
- ## group remove
- ##
-
- - name: delete group (check mode)
+ - not create_local_group_gid_again is changed
+ - create_local_group_gid_again.gid | int == 1337 | int
+ always:
+ - name: Cleanup create local group with a gid
group:
- name: ansibullgroup
+ name: group1_local_test
state: absent
- register: delete_group_check
- check_mode: true
-
- - name: get result of delete group (check mode)
- script: 'grouplist.sh "{{ ansible_distribution }}"'
- register: delete_group_actual_check
+ # only applicable to Linux, limit further to CentOS where 'luseradd' is installed
+ when: ansible_distribution == 'CentOS'
- - name: assert delete group (check mode)
- assert:
+# https://github.com/ansible/ansible/pull/59772
+- block:
+ - name: create group with a gid
+ group:
+ name: group1_test
+ gid: 1337
+ local: no
+ state: present
+ register: create_group_gid
+
+ - name: get gid of created group
+ command: "{{ ansible_python_interpreter | quote }} -c \"import grp; print(grp.getgrnam('group1_test').gr_gid)\""
+ register: create_group_gid_actual
+
+ - name: assert create group with a gid
+ assert:
that:
- - delete_group_check is changed
- - '"ansibullgroup" in delete_group_actual_check.stdout_lines'
-
- - name: delete group
- group:
- name: ansibullgroup
- state: absent
- register: delete_group
-
- - name: get result of delete group
- script: 'grouplist.sh "{{ ansible_distribution }}"'
- register: delete_group_actual
-
- - name: assert delete group
- assert:
+ - create_group_gid is changed
+ - create_group_gid.gid | int == 1337 | int
+ - create_group_gid_actual.stdout | trim | int == 1337 | int
+
+ - name: create local group with the same gid
+ group:
+ name: group1_test
+ gid: 1337
+ local: yes
+ state: present
+ register: create_local_group_gid
+
+ - name: assert create local group with a gid
+ assert:
that:
- - delete_group is changed
- - '"ansibullgroup" not in delete_group_actual.stdout_lines'
-
- - name: delete group (idempotent)
+ - create_local_group_gid.gid | int == 1337 | int
+ always:
+ - name: Cleanup create group with a gid
group:
- name: ansibullgroup
+ name: group1_test
+ local: no
state: absent
- register: delete_group_again
-
- - name: assert delete group (idempotent)
- assert:
- that:
- - not delete_group_again is changed
-
- - name: Ensure lgroupadd is present
- action: "{{ ansible_facts.pkg_mgr }}"
- args:
- name: libuser
- state: present
- when: ansible_facts.system in ['Linux'] and ansible_distribution != 'Alpine' and ansible_os_family != 'Suse'
- tags:
- - user_test_local_mode
-
- - name: Ensure lgroupadd is present - Alpine
- command: apk add -U libuser
- when: ansible_distribution == 'Alpine'
- tags:
- - user_test_local_mode
-
- # https://github.com/ansible/ansible/issues/56481
- - block:
- - name: Test duplicate GID with local=yes
- group:
- name: "{{ item }}"
- gid: 1337
- local: true
- loop:
- - group1_local_test
- - group2_local_test
- ignore_errors: true
- register: local_duplicate_gid_result
-
- - assert:
- that:
- - local_duplicate_gid_result['results'][0] is success
- - local_duplicate_gid_result['results'][1]['msg'] == "GID '1337' already exists with group 'group1_local_test'"
- always:
- - name: Cleanup
- group:
- name: group1_local_test
- state: absent
- # only applicable to Linux, limit further to CentOS where 'luseradd' is installed
- when: ansible_distribution == 'CentOS'
-
- # https://github.com/ansible/ansible/pull/59769
- - block:
- - name: create a local group with a gid
- group:
- name: group1_local_test
- gid: 1337
- local: true
- state: present
- register: create_local_group_gid
-
- - name: get gid of created local group
- script: "get_gid_for_group.py group1_local_test"
- args:
- executable: '{{ ansible_python_interpreter }}'
- register: create_local_group_gid_actual
-
- - name: assert create local group with a gid
- assert:
- that:
- - create_local_group_gid is changed
- - create_local_group_gid.gid | int == 1337 | int
- - create_local_group_gid_actual.stdout | trim | int == 1337 | int
-
- - name: create a local group with a gid (idempotent)
- group:
- name: group1_local_test
- gid: 1337
- state: present
- register: create_local_group_gid_again
-
- - name: assert create local group with a gid (idempotent)
- assert:
- that:
- - not create_local_group_gid_again is changed
- - create_local_group_gid_again.gid | int == 1337 | int
- always:
- - name: Cleanup create local group with a gid
- group:
- name: group1_local_test
- state: absent
- # only applicable to Linux, limit further to CentOS where 'luseradd' is installed
- when: ansible_distribution == 'CentOS'
-
- # https://github.com/ansible/ansible/pull/59772
- - block:
- - name: create group with a gid
- group:
- name: group1_test
- gid: 1337
- local: false
- state: present
- register: create_group_gid
-
- - name: get gid of created group
- script: "get_gid_for_group.py group1_test"
- args:
- executable: '{{ ansible_python_interpreter }}'
- register: create_group_gid_actual
-
- - name: assert create group with a gid
- assert:
- that:
- - create_group_gid is changed
- - create_group_gid.gid | int == 1337 | int
- - create_group_gid_actual.stdout | trim | int == 1337 | int
-
- - name: create local group with the same gid
- group:
- name: group1_test
- gid: 1337
- local: true
- state: present
- register: create_local_group_gid
-
- - name: assert create local group with a gid
- assert:
- that:
- - create_local_group_gid.gid | int == 1337 | int
- always:
- - name: Cleanup create group with a gid
- group:
- name: group1_test
- local: false
- state: absent
- - name: Cleanup create local group with the same gid
- group:
- name: group1_test
- local: true
- state: absent
- # only applicable to Linux, limit further to CentOS where 'lgroupadd' is installed
- when: ansible_distribution == 'CentOS'
-
- # https://github.com/ansible/ansible/pull/78172
- - block:
- - name: Create a group
- group:
- name: groupdeltest
- state: present
-
- - name: Create user with primary group of groupdeltest
- user:
- name: groupdeluser
- group: groupdeltest
- state: present
-
- - name: Show we can't delete the group usually
- group:
- name: groupdeltest
- state: absent
- ignore_errors: true
- register: failed_delete
-
- - name: assert we couldn't delete the group
- assert:
- that:
- - failed_delete is failed
-
- - name: force delete the group
- group:
- name: groupdeltest
- force: true
- state: absent
-
- always:
- - name: Cleanup user
- user:
- name: groupdeluser
- state: absent
-
- - name: Cleanup group
- group:
- name: groupdeltest
- state: absent
- when: ansible_distribution not in ["MacOSX", "Alpine", "FreeBSD"]
-
- # create system group
-
- - name: remove group
+ - name: Cleanup create local group with the same gid
group:
- name: ansibullgroup
+ name: group1_test
+ local: yes
state: absent
+ # only applicable to Linux, limit further to CentOS where 'lgroupadd' is installed
+ when: ansible_distribution == 'CentOS'
- - name: create system group
- group:
- name: ansibullgroup
- state: present
- system: true
+# create system group
- always:
- - name: remove test groups after test
- group:
- name: '{{ item }}'
- state: absent
- loop:
- - ansibullgroup
- - ansibullgroup2
- - ansibullgroup3
+- name: remove group
+ group:
+ name: ansibullgroup
+ state: absent
+
+- name: create system group
+ group:
+ name: ansibullgroup
+ state: present
+ system: yes
diff --git a/test/integration/targets/handlers/80880.yml b/test/integration/targets/handlers/80880.yml
deleted file mode 100644
index d362ea8e..00000000
--- a/test/integration/targets/handlers/80880.yml
+++ /dev/null
@@ -1,34 +0,0 @@
----
-- name: Test notification of handlers from other handlers
- hosts: localhost
- gather_facts: no
- handlers:
- - name: Handler 1
- debug:
- msg: Handler 1
- changed_when: true
- notify: Handler 2
- register: handler1_res
- - name: Handler 2
- debug:
- msg: Handler 2
- changed_when: true
- notify: Handler 3
- register: handler2_res
- - name: Handler 3
- debug:
- msg: Handler 3
- register: handler3_res
- tasks:
- - name: Trigger handlers
- ansible.builtin.debug:
- msg: Task 1
- changed_when: true
- notify: Handler 1
- post_tasks:
- - name: Assert results
- ansible.builtin.assert:
- that:
- - "handler1_res is defined and handler1_res is success"
- - "handler2_res is defined and handler2_res is success"
- - "handler3_res is defined and handler3_res is success"
diff --git a/test/integration/targets/handlers/82241.yml b/test/integration/targets/handlers/82241.yml
deleted file mode 100644
index 4a9421fb..00000000
--- a/test/integration/targets/handlers/82241.yml
+++ /dev/null
@@ -1,6 +0,0 @@
-- hosts: A
- gather_facts: false
- tasks:
- - import_role:
- name: role-82241
- tasks_from: entry_point.yml
diff --git a/test/integration/targets/handlers/nested_flush_handlers_failure_force.yml b/test/integration/targets/handlers/nested_flush_handlers_failure_force.yml
deleted file mode 100644
index 7380923e..00000000
--- a/test/integration/targets/handlers/nested_flush_handlers_failure_force.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-- hosts: A,B
- gather_facts: false
- force_handlers: true
- tasks:
- - block:
- - command: echo
- notify: h
-
- - meta: flush_handlers
- rescue:
- - debug:
- msg: flush_handlers_rescued
- always:
- - debug:
- msg: flush_handlers_always
- handlers:
- - name: h
- fail:
- when: inventory_hostname == "A"
diff --git a/test/integration/targets/handlers/roles/include_role_include_tasks_handler/handlers/include_handlers.yml b/test/integration/targets/handlers/roles/include_role_include_tasks_handler/handlers/include_handlers.yml
deleted file mode 100644
index f39ac4fc..00000000
--- a/test/integration/targets/handlers/roles/include_role_include_tasks_handler/handlers/include_handlers.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-- debug:
- msg: handler ran
diff --git a/test/integration/targets/handlers/roles/include_role_include_tasks_handler/handlers/main.yml b/test/integration/targets/handlers/roles/include_role_include_tasks_handler/handlers/main.yml
deleted file mode 100644
index 4ce8a3f2..00000000
--- a/test/integration/targets/handlers/roles/include_role_include_tasks_handler/handlers/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-- name: handler
- include_tasks: include_handlers.yml
diff --git a/test/integration/targets/handlers/roles/include_role_include_tasks_handler/tasks/main.yml b/test/integration/targets/handlers/roles/include_role_include_tasks_handler/tasks/main.yml
deleted file mode 100644
index 50aec1c7..00000000
--- a/test/integration/targets/handlers/roles/include_role_include_tasks_handler/tasks/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-- command: echo
- notify: handler
diff --git a/test/integration/targets/handlers/roles/r1-dep_chain-vars/defaults/main.yml b/test/integration/targets/handlers/roles/r1-dep_chain-vars/defaults/main.yml
deleted file mode 100644
index 555ff0e9..00000000
--- a/test/integration/targets/handlers/roles/r1-dep_chain-vars/defaults/main.yml
+++ /dev/null
@@ -1 +0,0 @@
-v: foo
diff --git a/test/integration/targets/handlers/roles/r1-dep_chain-vars/tasks/main.yml b/test/integration/targets/handlers/roles/r1-dep_chain-vars/tasks/main.yml
deleted file mode 100644
index 72576a01..00000000
--- a/test/integration/targets/handlers/roles/r1-dep_chain-vars/tasks/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-- include_role:
- name: r2-dep_chain-vars
diff --git a/test/integration/targets/handlers/roles/r2-dep_chain-vars/handlers/main.yml b/test/integration/targets/handlers/roles/r2-dep_chain-vars/handlers/main.yml
deleted file mode 100644
index 88f1248f..00000000
--- a/test/integration/targets/handlers/roles/r2-dep_chain-vars/handlers/main.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: h
- assert:
- that:
- - v is defined
diff --git a/test/integration/targets/handlers/roles/r2-dep_chain-vars/tasks/main.yml b/test/integration/targets/handlers/roles/r2-dep_chain-vars/tasks/main.yml
deleted file mode 100644
index 72eae5d6..00000000
--- a/test/integration/targets/handlers/roles/r2-dep_chain-vars/tasks/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-- command: echo
- notify: h
diff --git a/test/integration/targets/handlers/roles/role-82241/handlers/main.yml b/test/integration/targets/handlers/roles/role-82241/handlers/main.yml
deleted file mode 100644
index ad59b963..00000000
--- a/test/integration/targets/handlers/roles/role-82241/handlers/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-- name: handler
- include_tasks: included_tasks.yml
diff --git a/test/integration/targets/handlers/roles/role-82241/tasks/entry_point.yml b/test/integration/targets/handlers/roles/role-82241/tasks/entry_point.yml
deleted file mode 100644
index 50aec1c7..00000000
--- a/test/integration/targets/handlers/roles/role-82241/tasks/entry_point.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-- command: echo
- notify: handler
diff --git a/test/integration/targets/handlers/roles/role-82241/tasks/included_tasks.yml b/test/integration/targets/handlers/roles/role-82241/tasks/included_tasks.yml
deleted file mode 100644
index e3ffeb7e..00000000
--- a/test/integration/targets/handlers/roles/role-82241/tasks/included_tasks.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-- debug:
- msg: included_task_from_tasks_dir
diff --git a/test/integration/targets/handlers/roles/test_listen_role_dedup_global/handlers/main.yml b/test/integration/targets/handlers/roles/test_listen_role_dedup_global/handlers/main.yml
deleted file mode 100644
index 6ce84e44..00000000
--- a/test/integration/targets/handlers/roles/test_listen_role_dedup_global/handlers/main.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: role_handler
- debug:
- msg: "a handler from a role"
- listen: role_handler
diff --git a/test/integration/targets/handlers/roles/test_listen_role_dedup_role1/meta/main.yml b/test/integration/targets/handlers/roles/test_listen_role_dedup_role1/meta/main.yml
deleted file mode 100644
index b6a70c22..00000000
--- a/test/integration/targets/handlers/roles/test_listen_role_dedup_role1/meta/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-dependencies:
- - test_listen_role_dedup_global
diff --git a/test/integration/targets/handlers/roles/test_listen_role_dedup_role1/tasks/main.yml b/test/integration/targets/handlers/roles/test_listen_role_dedup_role1/tasks/main.yml
deleted file mode 100644
index 42911e56..00000000
--- a/test/integration/targets/handlers/roles/test_listen_role_dedup_role1/tasks/main.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- name: a task from role1
- command: echo
- notify: role_handler
diff --git a/test/integration/targets/handlers/roles/test_listen_role_dedup_role2/meta/main.yml b/test/integration/targets/handlers/roles/test_listen_role_dedup_role2/meta/main.yml
deleted file mode 100644
index b6a70c22..00000000
--- a/test/integration/targets/handlers/roles/test_listen_role_dedup_role2/meta/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-dependencies:
- - test_listen_role_dedup_global
diff --git a/test/integration/targets/handlers/roles/test_listen_role_dedup_role2/tasks/main.yml b/test/integration/targets/handlers/roles/test_listen_role_dedup_role2/tasks/main.yml
deleted file mode 100644
index 3d5e5446..00000000
--- a/test/integration/targets/handlers/roles/test_listen_role_dedup_role2/tasks/main.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- name: a task from role2
- command: echo
- notify: role_handler
diff --git a/test/integration/targets/handlers/roles/two_tasks_files_role/handlers/main.yml b/test/integration/targets/handlers/roles/two_tasks_files_role/handlers/main.yml
deleted file mode 100644
index 3fd13187..00000000
--- a/test/integration/targets/handlers/roles/two_tasks_files_role/handlers/main.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- name: handler
- debug:
- msg: handler ran
diff --git a/test/integration/targets/handlers/roles/two_tasks_files_role/tasks/main.yml b/test/integration/targets/handlers/roles/two_tasks_files_role/tasks/main.yml
deleted file mode 100644
index e6c12397..00000000
--- a/test/integration/targets/handlers/roles/two_tasks_files_role/tasks/main.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- name: main.yml task
- command: echo
- notify: handler
diff --git a/test/integration/targets/handlers/roles/two_tasks_files_role/tasks/other.yml b/test/integration/targets/handlers/roles/two_tasks_files_role/tasks/other.yml
deleted file mode 100644
index d90d46e0..00000000
--- a/test/integration/targets/handlers/roles/two_tasks_files_role/tasks/other.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- name: other.yml task
- command: echo
- notify: handler
diff --git a/test/integration/targets/handlers/runme.sh b/test/integration/targets/handlers/runme.sh
index 368ca44d..76fc99d8 100755
--- a/test/integration/targets/handlers/runme.sh
+++ b/test/integration/targets/handlers/runme.sh
@@ -50,9 +50,6 @@ for strategy in linear free; do
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags force_false_in_play --force-handlers \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_B" ]
- # https://github.com/ansible/ansible/pull/80898
- [ "$(ansible-playbook 80880.yml -i inventory.handlers -vv "$@" 2>&1)" ]
-
unset ANSIBLE_STRATEGY
done
@@ -69,9 +66,6 @@ done
# Notify handler listen
ansible-playbook test_handlers_listen.yml -i inventory.handlers -v "$@"
-# https://github.com/ansible/ansible/issues/82363
-ansible-playbook test_multiple_handlers_with_recursive_notification.yml -i inventory.handlers -v "$@"
-
# Notify inexistent handlers results in error
set +e
result="$(ansible-playbook test_handlers_inexistent_notify.yml -i inventory.handlers "$@" 2>&1)"
@@ -187,24 +181,3 @@ grep out.txt -e "ERROR! Using a block as a handler is not supported."
ansible-playbook test_block_as_handler-import.yml "$@" 2>&1 | tee out.txt
grep out.txt -e "ERROR! Using a block as a handler is not supported."
-
-ansible-playbook test_include_role_handler_once.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
-[ "$(grep out.txt -ce 'handler ran')" = "1" ]
-
-ansible-playbook test_listen_role_dedup.yml "$@" 2>&1 | tee out.txt
-[ "$(grep out.txt -ce 'a handler from a role')" = "1" ]
-
-ansible localhost -m include_role -a "name=r1-dep_chain-vars" "$@"
-
-ansible-playbook test_include_tasks_in_include_role.yml "$@" 2>&1 | tee out.txt
-[ "$(grep out.txt -ce 'handler ran')" = "1" ]
-
-ansible-playbook test_run_once.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
-[ "$(grep out.txt -ce 'handler ran once')" = "1" ]
-
-ansible-playbook 82241.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
-[ "$(grep out.txt -ce 'included_task_from_tasks_dir')" = "1" ]
-
-ansible-playbook nested_flush_handlers_failure_force.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
-[ "$(grep out.txt -ce 'flush_handlers_rescued')" = "1" ]
-[ "$(grep out.txt -ce 'flush_handlers_always')" = "2" ]
diff --git a/test/integration/targets/handlers/test_include_role_handler_once.yml b/test/integration/targets/handlers/test_include_role_handler_once.yml
deleted file mode 100644
index 764aef64..00000000
--- a/test/integration/targets/handlers/test_include_role_handler_once.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-- hosts: localhost
- gather_facts: false
- tasks:
- - name: "Call main entry point"
- include_role:
- name: two_tasks_files_role
-
- - name: "Call main entry point again"
- include_role:
- name: two_tasks_files_role
-
- - name: "Call other entry point"
- include_role:
- name: two_tasks_files_role
- tasks_from: other
-
- - name: "Call other entry point again"
- include_role:
- name: two_tasks_files_role
- tasks_from: other
diff --git a/test/integration/targets/handlers/test_include_tasks_in_include_role.yml b/test/integration/targets/handlers/test_include_tasks_in_include_role.yml
deleted file mode 100644
index 405e4b50..00000000
--- a/test/integration/targets/handlers/test_include_tasks_in_include_role.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-- hosts: localhost
- gather_facts: false
- tasks:
- - include_role:
- name: include_role_include_tasks_handler
diff --git a/test/integration/targets/handlers/test_listen_role_dedup.yml b/test/integration/targets/handlers/test_listen_role_dedup.yml
deleted file mode 100644
index 508eaf56..00000000
--- a/test/integration/targets/handlers/test_listen_role_dedup.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-- hosts: localhost
- gather_facts: false
- roles:
- - test_listen_role_dedup_role1
- - test_listen_role_dedup_role2
diff --git a/test/integration/targets/handlers/test_multiple_handlers_with_recursive_notification.yml b/test/integration/targets/handlers/test_multiple_handlers_with_recursive_notification.yml
deleted file mode 100644
index c4b69831..00000000
--- a/test/integration/targets/handlers/test_multiple_handlers_with_recursive_notification.yml
+++ /dev/null
@@ -1,36 +0,0 @@
----
-- name: test multiple handlers with recursive notification
- hosts: localhost
- gather_facts: false
-
- tasks:
- - name: notify handler 1
- command: echo
- changed_when: true
- notify: handler 1
-
- - meta: flush_handlers
-
- - name: verify handlers
- assert:
- that:
- - "ran_handler_1 is defined"
- - "ran_handler_2a is defined"
- - "ran_handler_2b is defined"
-
- handlers:
- - name: handler 1
- set_fact:
- ran_handler_1: True
- changed_when: true
- notify: handler_2
-
- - name: handler 2a
- set_fact:
- ran_handler_2a: True
- listen: handler_2
-
- - name: handler 2b
- set_fact:
- ran_handler_2b: True
- listen: handler_2
diff --git a/test/integration/targets/handlers/test_run_once.yml b/test/integration/targets/handlers/test_run_once.yml
deleted file mode 100644
index 5418b46a..00000000
--- a/test/integration/targets/handlers/test_run_once.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-- hosts: A,B,C
- gather_facts: false
- tasks:
- - command: echo
- notify: handler
- handlers:
- - name: handler
- run_once: true
- debug:
- msg: handler ran once
diff --git a/test/integration/targets/include_vars/files/test_depth/sub1/sub11.yml b/test/integration/targets/include_vars/files/test_depth/sub1/sub11.yml
deleted file mode 100644
index 9a5ecb80..00000000
--- a/test/integration/targets/include_vars/files/test_depth/sub1/sub11.yml
+++ /dev/null
@@ -1 +0,0 @@
-sub11: defined
diff --git a/test/integration/targets/include_vars/files/test_depth/sub1/sub11/config11.yml b/test/integration/targets/include_vars/files/test_depth/sub1/sub11/config11.yml
deleted file mode 100644
index 02c28979..00000000
--- a/test/integration/targets/include_vars/files/test_depth/sub1/sub11/config11.yml
+++ /dev/null
@@ -1 +0,0 @@
-config11: defined
diff --git a/test/integration/targets/include_vars/files/test_depth/sub1/sub11/config112.yml b/test/integration/targets/include_vars/files/test_depth/sub1/sub11/config112.yml
deleted file mode 100644
index e8bc9d94..00000000
--- a/test/integration/targets/include_vars/files/test_depth/sub1/sub11/config112.yml
+++ /dev/null
@@ -1 +0,0 @@
-config112: defined
diff --git a/test/integration/targets/include_vars/files/test_depth/sub1/sub12.yml b/test/integration/targets/include_vars/files/test_depth/sub1/sub12.yml
deleted file mode 100644
index 9aff2876..00000000
--- a/test/integration/targets/include_vars/files/test_depth/sub1/sub12.yml
+++ /dev/null
@@ -1 +0,0 @@
-sub12: defined
diff --git a/test/integration/targets/include_vars/files/test_depth/sub2/sub21.yml b/test/integration/targets/include_vars/files/test_depth/sub2/sub21.yml
deleted file mode 100644
index 1f7c455e..00000000
--- a/test/integration/targets/include_vars/files/test_depth/sub2/sub21.yml
+++ /dev/null
@@ -1 +0,0 @@
-sub21: defined
diff --git a/test/integration/targets/include_vars/files/test_depth/sub2/sub21/config211.yml b/test/integration/targets/include_vars/files/test_depth/sub2/sub21/config211.yml
deleted file mode 100644
index a5126a7b..00000000
--- a/test/integration/targets/include_vars/files/test_depth/sub2/sub21/config211.yml
+++ /dev/null
@@ -1 +0,0 @@
-config211: defined
diff --git a/test/integration/targets/include_vars/files/test_depth/sub2/sub21/config212.yml b/test/integration/targets/include_vars/files/test_depth/sub2/sub21/config212.yml
deleted file mode 100644
index 633841df..00000000
--- a/test/integration/targets/include_vars/files/test_depth/sub2/sub21/config212.yml
+++ /dev/null
@@ -1 +0,0 @@
-config212: defined
diff --git a/test/integration/targets/include_vars/files/test_depth/sub3/config3.yml b/test/integration/targets/include_vars/files/test_depth/sub3/config3.yml
deleted file mode 100644
index d6a8192d..00000000
--- a/test/integration/targets/include_vars/files/test_depth/sub3/config3.yml
+++ /dev/null
@@ -1 +0,0 @@
-config3: defined
diff --git a/test/integration/targets/include_vars/tasks/main.yml b/test/integration/targets/include_vars/tasks/main.yml
index 97636d9d..6fc4e85a 100644
--- a/test/integration/targets/include_vars/tasks/main.yml
+++ b/test/integration/targets/include_vars/tasks/main.yml
@@ -208,21 +208,6 @@
- "config.key2.b == 22"
- "config.key3 == 3"
-- name: Include a vars dir with hash variables
- include_vars:
- dir: "{{ role_path }}/vars2/hashes/"
- hash_behaviour: merge
-
-- name: Verify that the hash is merged after vars files are accumulated
- assert:
- that:
- - "config | length == 3"
- - "config.key0 is undefined"
- - "config.key1 == 1"
- - "config.key2 | length == 1"
- - "config.key2.b == 22"
- - "config.key3 == 3"
-
- include_vars:
file: no_auto_unsafe.yml
register: baz
@@ -230,40 +215,3 @@
- assert:
that:
- baz.ansible_facts.foo|type_debug != "AnsibleUnsafeText"
-
-- name: setup test following symlinks
- delegate_to: localhost
- block:
- - name: create directory to test following symlinks
- file:
- path: "{{ role_path }}/test_symlink"
- state: directory
-
- - name: create symlink to the vars2 dir
- file:
- src: "{{ role_path }}/vars2"
- dest: "{{ role_path }}/test_symlink/symlink"
- state: link
-
-- name: include vars by following the symlink
- include_vars:
- dir: "{{ role_path }}/test_symlink"
- register: follow_sym
-
-- assert:
- that: follow_sym.ansible_included_var_files | sort == [hash1, hash2]
- vars:
- hash1: "{{ role_path }}/test_symlink/symlink/hashes/hash1.yml"
- hash2: "{{ role_path }}/test_symlink/symlink/hashes/hash2.yml"
-
-- name: Test include_vars includes everything to the correct depth
- ansible.builtin.include_vars:
- dir: "{{ role_path }}/files/test_depth"
- depth: 3
- name: test_depth_var
- register: test_depth
-
-- assert:
- that:
- - "test_depth.ansible_included_var_files|length == 8"
- - "test_depth_var.keys()|length == 8"
diff --git a/test/integration/targets/include_vars/vars/services/service_vars.yml b/test/integration/targets/include_vars/vars/services/service_vars.yml
index bcac7646..96b05d6c 100644
--- a/test/integration/targets/include_vars/vars/services/service_vars.yml
+++ b/test/integration/targets/include_vars/vars/services/service_vars.yml
@@ -1,2 +1,2 @@
---
-service_name: 'my_custom_service'
+service_name: 'my_custom_service' \ No newline at end of file
diff --git a/test/integration/targets/include_vars/vars/services/service_vars_fqcn.yml b/test/integration/targets/include_vars/vars/services/service_vars_fqcn.yml
index cd82eca5..2c04fee5 100644
--- a/test/integration/targets/include_vars/vars/services/service_vars_fqcn.yml
+++ b/test/integration/targets/include_vars/vars/services/service_vars_fqcn.yml
@@ -1,3 +1,3 @@
---
service_name_fqcn: 'my_custom_service'
-service_name_tmpl_fqcn: '{{ service_name_fqcn }}'
+service_name_tmpl_fqcn: '{{ service_name_fqcn }}' \ No newline at end of file
diff --git a/test/integration/targets/include_when_parent_is_dynamic/tasks.yml b/test/integration/targets/include_when_parent_is_dynamic/tasks.yml
index d500f0df..6831245c 100644
--- a/test/integration/targets/include_when_parent_is_dynamic/tasks.yml
+++ b/test/integration/targets/include_when_parent_is_dynamic/tasks.yml
@@ -9,4 +9,4 @@
# perform an include task which should be static if all of the task's parents are static, otherwise it should be dynamic
# this file was loaded using include_tasks, which is dynamic, so this include should also be dynamic
-- include_tasks: syntax_error.yml
+- include: syntax_error.yml
diff --git a/test/integration/targets/include_when_parent_is_static/tasks.yml b/test/integration/targets/include_when_parent_is_static/tasks.yml
index 50dd2341..a234a3dd 100644
--- a/test/integration/targets/include_when_parent_is_static/tasks.yml
+++ b/test/integration/targets/include_when_parent_is_static/tasks.yml
@@ -9,4 +9,4 @@
# perform an include task which should be static if all of the task's parents are static, otherwise it should be dynamic
# this file was loaded using import_tasks, which is static, so this include should also be static
-- import_tasks: syntax_error.yml
+- include: syntax_error.yml
diff --git a/test/integration/targets/includes/include_on_playbook_should_fail.yml b/test/integration/targets/includes/include_on_playbook_should_fail.yml
index c9b1e81a..953459dc 100644
--- a/test/integration/targets/includes/include_on_playbook_should_fail.yml
+++ b/test/integration/targets/includes/include_on_playbook_should_fail.yml
@@ -1 +1 @@
-- include_tasks: test_includes3.yml
+- include: test_includes3.yml
diff --git a/test/integration/targets/includes/roles/test_includes/handlers/main.yml b/test/integration/targets/includes/roles/test_includes/handlers/main.yml
index 453fa96d..7d3e625f 100644
--- a/test/integration/targets/includes/roles/test_includes/handlers/main.yml
+++ b/test/integration/targets/includes/roles/test_includes/handlers/main.yml
@@ -1 +1 @@
-- import_tasks: more_handlers.yml
+- include: more_handlers.yml
diff --git a/test/integration/targets/includes/roles/test_includes/tasks/main.yml b/test/integration/targets/includes/roles/test_includes/tasks/main.yml
index 2ba1ae63..83ca468b 100644
--- a/test/integration/targets/includes/roles/test_includes/tasks/main.yml
+++ b/test/integration/targets/includes/roles/test_includes/tasks/main.yml
@@ -17,9 +17,47 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+- include: included_task1.yml a=1 b=2 c=3
+
+- name: verify non-variable include params
+ assert:
+ that:
+ - "ca == '1'"
+ - "cb == '2'"
+ - "cc == '3'"
+
+- set_fact:
+ a: 101
+ b: 102
+ c: 103
+
+- include: included_task1.yml a={{a}} b={{b}} c=103
+
+- name: verify variable include params
+ assert:
+ that:
+ - "ca == 101"
+ - "cb == 102"
+ - "cc == 103"
+
+# Test that strings are not turned into numbers
+- set_fact:
+ a: "101"
+ b: "102"
+ c: "103"
+
+- include: included_task1.yml a={{a}} b={{b}} c=103
+
+- name: verify variable include params
+ assert:
+ that:
+ - "ca == '101'"
+ - "cb == '102'"
+ - "cc == '103'"
+
# now try long form includes
-- include_tasks: included_task1.yml
+- include: included_task1.yml
vars:
a: 201
b: 202
diff --git a/test/integration/targets/includes/roles/test_includes_free/tasks/main.yml b/test/integration/targets/includes/roles/test_includes_free/tasks/main.yml
index d7bcf8eb..5ae7882f 100644
--- a/test/integration/targets/includes/roles/test_includes_free/tasks/main.yml
+++ b/test/integration/targets/includes/roles/test_includes_free/tasks/main.yml
@@ -1,9 +1,9 @@
- name: this needs to be here
debug:
msg: "hello"
-- include_tasks: inner.yml
+- include: inner.yml
with_items:
- '1'
-- ansible.builtin.include_tasks: inner_fqcn.yml
+- ansible.builtin.include: inner_fqcn.yml
with_items:
- '1'
diff --git a/test/integration/targets/includes/roles/test_includes_host_pinned/tasks/main.yml b/test/integration/targets/includes/roles/test_includes_host_pinned/tasks/main.yml
index c06d3feb..7bc19faa 100644
--- a/test/integration/targets/includes/roles/test_includes_host_pinned/tasks/main.yml
+++ b/test/integration/targets/includes/roles/test_includes_host_pinned/tasks/main.yml
@@ -1,6 +1,6 @@
- name: this needs to be here
debug:
msg: "hello"
-- include_tasks: inner.yml
+- include: inner.yml
with_items:
- '1'
diff --git a/test/integration/targets/includes/runme.sh b/test/integration/targets/includes/runme.sh
index 8622cf66..e619feaf 100755
--- a/test/integration/targets/includes/runme.sh
+++ b/test/integration/targets/includes/runme.sh
@@ -10,7 +10,7 @@ echo "EXPECTED ERROR: Ensure we fail if using 'include' to include a playbook."
set +e
result="$(ansible-playbook -i ../../inventory include_on_playbook_should_fail.yml -v "$@" 2>&1)"
set -e
-grep -q "ERROR! 'include_tasks' is not a valid attribute for a Play" <<< "$result"
+grep -q "ERROR! 'include' is not a valid attribute for a Play" <<< "$result"
ansible-playbook includes_loop_rescue.yml --extra-vars strategy=linear "$@"
ansible-playbook includes_loop_rescue.yml --extra-vars strategy=free "$@"
diff --git a/test/integration/targets/includes/test_includes2.yml b/test/integration/targets/includes/test_includes2.yml
index da6b914f..a32e8513 100644
--- a/test/integration/targets/includes/test_includes2.yml
+++ b/test/integration/targets/includes/test_includes2.yml
@@ -13,8 +13,8 @@
- role: test_includes
tags: test_includes
tasks:
- - include_tasks: roles/test_includes/tasks/not_a_role_task.yml
- - include_tasks: roles/test_includes/tasks/empty.yml
+ - include: roles/test_includes/tasks/not_a_role_task.yml
+ - include: roles/test_includes/tasks/empty.yml
- assert:
that:
- "ca == 33000"
diff --git a/test/integration/targets/includes/test_includes3.yml b/test/integration/targets/includes/test_includes3.yml
index f3c4964e..0b4c6312 100644
--- a/test/integration/targets/includes/test_includes3.yml
+++ b/test/integration/targets/includes/test_includes3.yml
@@ -1,6 +1,6 @@
- hosts: testhost
tasks:
- - include_tasks: test_includes4.yml
+ - include: test_includes4.yml
with_items: ["a"]
loop_control:
loop_var: r
diff --git a/test/integration/targets/inventory/inventory_plugins/contructed_with_hostvars.py b/test/integration/targets/inventory/inventory_plugins/contructed_with_hostvars.py
index 43cad4fc..7ca445a3 100644
--- a/test/integration/targets/inventory/inventory_plugins/contructed_with_hostvars.py
+++ b/test/integration/targets/inventory/inventory_plugins/contructed_with_hostvars.py
@@ -14,7 +14,7 @@ DOCUMENTATION = '''
'''
from ansible.errors import AnsibleParserError
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable
diff --git a/test/integration/targets/inventory_ini/inventory.ini b/test/integration/targets/inventory_ini/inventory.ini
index a5de4211..a0c99ade 100644
--- a/test/integration/targets/inventory_ini/inventory.ini
+++ b/test/integration/targets/inventory_ini/inventory.ini
@@ -1,5 +1,3 @@
-gitlab-runner-01 ansible_host=gitlab-runner-01.internal.example.net ansible_user=root
-
[local]
testhost ansible_connection=local ansible_become=no ansible_become_user=ansibletest1
diff --git a/test/integration/targets/inventory_ini/runme.sh b/test/integration/targets/inventory_ini/runme.sh
index 919e1884..81bf1475 100755
--- a/test/integration/targets/inventory_ini/runme.sh
+++ b/test/integration/targets/inventory_ini/runme.sh
@@ -3,6 +3,3 @@
set -eux
ansible-playbook -v -i inventory.ini test_ansible_become.yml
-
-ansible-inventory -v -i inventory.ini --list 2> out
-test "$(grep -c 'SyntaxWarning' out)" -eq 0
diff --git a/test/integration/targets/iptables/aliases b/test/integration/targets/iptables/aliases
index 73df8aad..7d66ecf8 100644
--- a/test/integration/targets/iptables/aliases
+++ b/test/integration/targets/iptables/aliases
@@ -1,4 +1,5 @@
shippable/posix/group2
skip/freebsd
+skip/osx
skip/macos
skip/docker
diff --git a/test/integration/targets/iptables/tasks/chain_management.yml b/test/integration/targets/iptables/tasks/chain_management.yml
index dae4103a..03551228 100644
--- a/test/integration/targets/iptables/tasks/chain_management.yml
+++ b/test/integration/targets/iptables/tasks/chain_management.yml
@@ -45,26 +45,6 @@
- result is not failed
- '"FOOBAR-CHAIN" in result.stdout'
-- name: add rule to foobar chain
- become: true
- iptables:
- chain: FOOBAR-CHAIN
- source: 0.0.0.0
- destination: 0.0.0.0
- jump: DROP
- comment: "FOOBAR-CHAIN RULE"
-
-- name: get the state of the iptable rules after rule is added to foobar chain
- become: true
- shell: "{{ iptables_bin }} -L"
- register: result
-
-- name: assert rule is present in foobar chain
- assert:
- that:
- - result is not failed
- - '"FOOBAR-CHAIN RULE" in result.stdout'
-
- name: flush the foobar chain
become: true
iptables:
@@ -88,3 +68,4 @@
that:
- result is not failed
- '"FOOBAR-CHAIN" not in result.stdout'
+ - '"FOOBAR-RULE" not in result.stdout'
diff --git a/test/integration/targets/known_hosts/defaults/main.yml b/test/integration/targets/known_hosts/defaults/main.yml
index cd438430..b1b56ac7 100644
--- a/test/integration/targets/known_hosts/defaults/main.yml
+++ b/test/integration/targets/known_hosts/defaults/main.yml
@@ -3,4 +3,4 @@ example_org_rsa_key: >
example.org ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAglyZmHHWskQ9wkh8LYbIqzvg99/oloneH7BaZ02ripJUy/2Zynv4tgUfm9fdXvAb1XXCEuTRnts9FBer87+voU0FPRgx3CfY9Sgr0FspUjnm4lqs53FIab1psddAaS7/F7lrnjl6VqBtPwMRQZG7qlml5uogGJwYJHxX0PGtsdoTJsM=
example_org_ed25519_key: >
- example.org ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIzlnSq5ESxLgW0avvPk3j7zLV59hcAPkxrMNdnZMKP2
+ example.org ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIzlnSq5ESxLgW0avvPk3j7zLV59hcAPkxrMNdnZMKP2 \ No newline at end of file
diff --git a/test/integration/targets/known_hosts/tasks/main.yml b/test/integration/targets/known_hosts/tasks/main.yml
index d5ffec4d..dc00dedd 100644
--- a/test/integration/targets/known_hosts/tasks/main.yml
+++ b/test/integration/targets/known_hosts/tasks/main.yml
@@ -99,7 +99,7 @@
# https://github.com/ansible/ansible/issues/78598
# test removing nonexistent host key when the other keys exist for the host
- name: remove different key
- known_hosts:
+ known_hosts:
name: example.org
key: "{{ example_org_ed25519_key }}"
state: absent
diff --git a/test/integration/targets/lookup-option-name/aliases b/test/integration/targets/lookup-option-name/aliases
deleted file mode 100644
index 498fedd5..00000000
--- a/test/integration/targets/lookup-option-name/aliases
+++ /dev/null
@@ -1,2 +0,0 @@
-shippable/posix/group4
-context/controller
diff --git a/test/integration/targets/lookup-option-name/tasks/main.yml b/test/integration/targets/lookup-option-name/tasks/main.yml
deleted file mode 100644
index 4f248c84..00000000
--- a/test/integration/targets/lookup-option-name/tasks/main.yml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- debug:
- msg: "{{ lookup('vars', name='test') }}"
-
-- debug:
- msg: "{{ query('vars', name='test') }}"
diff --git a/test/integration/targets/lookup_config/tasks/main.yml b/test/integration/targets/lookup_config/tasks/main.yml
index e5699d34..356d2f80 100644
--- a/test/integration/targets/lookup_config/tasks/main.yml
+++ b/test/integration/targets/lookup_config/tasks/main.yml
@@ -42,7 +42,6 @@
- name: remote user and port for ssh connection
set_fact:
ssh_user_and_port: '{{q("config", "remote_user", "port", plugin_type="connection", plugin_name="ssh")}}'
- ssh_user_and_port_and_origin: '{{q("config", "remote_user", "port", plugin_type="connection", plugin_name="ssh", show_origin=True)}}'
vars:
ansible_ssh_user: lola
ansible_ssh_port: 2022
@@ -72,5 +71,4 @@
- lookup_config_7 is failed
- '"Invalid setting" in lookup_config_7.msg'
- ssh_user_and_port == ['lola', 2022]
- - "ssh_user_and_port_and_origin == [['lola', 'var: ansible_ssh_user'], [2022, 'var: ansible_ssh_port']]"
- yolo_remote == ["yolo"]
diff --git a/test/integration/targets/lookup_fileglob/issue72873/test.yml b/test/integration/targets/lookup_fileglob/issue72873/test.yml
index 92d93d45..218ee58d 100644
--- a/test/integration/targets/lookup_fileglob/issue72873/test.yml
+++ b/test/integration/targets/lookup_fileglob/issue72873/test.yml
@@ -5,7 +5,7 @@
dir: files
tasks:
- file: path='{{ dir }}' state=directory
-
+
- file: path='setvars.bat' state=touch # in current directory!
- file: path='{{ dir }}/{{ item }}' state=touch
@@ -20,11 +20,11 @@
- name: Get working order results and sort them
set_fact:
- working: '{{ query("fileglob", "setvars.bat", dir ~ "/*.[ch]") | sort }}'
+ working: '{{ query("fileglob", "setvars.bat", "{{ dir }}/*.[ch]") | sort }}'
- name: Get broken order results and sort them
set_fact:
- broken: '{{ query("fileglob", dir ~ "/*.[ch]", "setvars.bat") | sort }}'
+ broken: '{{ query("fileglob", "{{ dir }}/*.[ch]", "setvars.bat") | sort }}'
- assert:
that:
diff --git a/test/integration/targets/lookup_first_found/tasks/main.yml b/test/integration/targets/lookup_first_found/tasks/main.yml
index ba248bd5..9aeaf1d1 100644
--- a/test/integration/targets/lookup_first_found/tasks/main.yml
+++ b/test/integration/targets/lookup_first_found/tasks/main.yml
@@ -94,56 +94,3 @@
- assert:
that:
- foo is defined
-
-# TODO: no 'terms' test
-- name: test first_found lookup with no terms
- set_fact:
- no_terms: "{{ query('first_found', files=['missing1', 'hosts', 'missing2'], paths=['/etc'], errors='ignore') }}"
-
-- assert:
- that: "no_terms|first == '/etc/hosts'"
-
-- name: handle templatable dictionary entries
- block:
-
- - name: Load variables specific for OS family
- assert:
- that:
- - "item is file"
- - "item|basename == 'itworks.yml'"
- with_first_found:
- - files:
- - "{{ansible_id}}-{{ansible_lsb.major_release}}.yml" # invalid var, should be skipped
- - "{{ansible_lsb.id}}-{{ansible_lsb.major_release}}.yml" # does not exist, but should try
- - "{{ansible_distribution}}-{{ansible_distribution_major_version}}.yml" # does not exist, but should try
- - itworks.yml
- - ishouldnotbefound.yml # this exist, but should not be found
- paths:
- - "{{role_path}}/vars"
-
- - name: Load variables specific for OS family, but now as list of dicts, same options as above
- assert:
- that:
- - "item is file"
- - "item|basename == 'itworks.yml'"
- with_first_found:
- - files:
- - "{{ansible_id}}-{{ansible_lsb.major_release}}.yml"
- paths:
- - "{{role_path}}/vars"
- - files:
- - "{{ansible_lsb.id}}-{{ansible_lsb.major_release}}.yml"
- paths:
- - "{{role_path}}/vars"
- - files:
- - "{{ansible_distribution}}-{{ansible_distribution_major_version}}.yml"
- paths:
- - "{{role_path}}/vars"
- - files:
- - itworks.yml
- paths:
- - "{{role_path}}/vars"
- - files:
- - ishouldnotbefound.yml
- paths:
- - "{{role_path}}/vars"
diff --git a/test/integration/targets/lookup_first_found/vars/ishouldnotbefound.yml b/test/integration/targets/lookup_first_found/vars/ishouldnotbefound.yml
deleted file mode 100644
index e4cc6d5d..00000000
--- a/test/integration/targets/lookup_first_found/vars/ishouldnotbefound.yml
+++ /dev/null
@@ -1 +0,0 @@
-really: i hide
diff --git a/test/integration/targets/lookup_first_found/vars/itworks.yml b/test/integration/targets/lookup_first_found/vars/itworks.yml
deleted file mode 100644
index 8f8a21a4..00000000
--- a/test/integration/targets/lookup_first_found/vars/itworks.yml
+++ /dev/null
@@ -1 +0,0 @@
-doesit: yes it does
diff --git a/test/integration/targets/lookup_sequence/tasks/main.yml b/test/integration/targets/lookup_sequence/tasks/main.yml
index e64801d3..bd0a4d80 100644
--- a/test/integration/targets/lookup_sequence/tasks/main.yml
+++ b/test/integration/targets/lookup_sequence/tasks/main.yml
@@ -195,4 +195,4 @@
- ansible_failed_task.name == "EXPECTED FAILURE - test bad format string message"
- ansible_failed_result.msg == expected
vars:
- expected: "bad formatting string: d"
+ expected: "bad formatting string: d" \ No newline at end of file
diff --git a/test/integration/targets/lookup_together/tasks/main.yml b/test/integration/targets/lookup_together/tasks/main.yml
index 115c9e52..71365a15 100644
--- a/test/integration/targets/lookup_together/tasks/main.yml
+++ b/test/integration/targets/lookup_together/tasks/main.yml
@@ -26,4 +26,4 @@
- assert:
that:
- ansible_failed_task.name == "EXPECTED FAILURE - test empty list"
- - ansible_failed_result.msg == "with_together requires at least one element in each list"
+ - ansible_failed_result.msg == "with_together requires at least one element in each list" \ No newline at end of file
diff --git a/test/integration/targets/lookup_url/aliases b/test/integration/targets/lookup_url/aliases
index 19b7d98f..ef37fce1 100644
--- a/test/integration/targets/lookup_url/aliases
+++ b/test/integration/targets/lookup_url/aliases
@@ -1,11 +1,4 @@
destructive
shippable/posix/group3
needs/httptester
-skip/macos # This test crashes Python due to https://wefearchange.org/2018/11/forkmacos.rst.html
-# Example failure:
-#
-# TASK [lookup_url : Test that retrieving a url works] ***************************
-# objc[15394]: +[__NSCFConstantString initialize] may have been in progress in another thread when fork() was called.
-# objc[15394]: +[__NSCFConstantString initialize] may have been in progress in another thread when fork() was called. We cannot safely call it or ignore it in t
-# he fork() child process. Crashing instead. Set a breakpoint on objc_initializeAfterForkError to debug.
-# ERROR! A worker was found in a dead state
+skip/macos/12.0 # This test crashes Python due to https://wefearchange.org/2018/11/forkmacos.rst.html
diff --git a/test/integration/targets/lookup_url/meta/main.yml b/test/integration/targets/lookup_url/meta/main.yml
index 6853708f..374b5fdf 100644
--- a/test/integration/targets/lookup_url/meta/main.yml
+++ b/test/integration/targets/lookup_url/meta/main.yml
@@ -1,2 +1,2 @@
-dependencies:
+dependencies:
- prepare_http_tests
diff --git a/test/integration/targets/lookup_url/tasks/main.yml b/test/integration/targets/lookup_url/tasks/main.yml
index 83fd5db6..2fb227ad 100644
--- a/test/integration/targets/lookup_url/tasks/main.yml
+++ b/test/integration/targets/lookup_url/tasks/main.yml
@@ -1,6 +1,6 @@
- name: Test that retrieving a url works
set_fact:
- web_data: "{{ lookup('url', 'https://' ~ httpbin_host ~ '/get?one') }}"
+ web_data: "{{ lookup('url', 'https://{{ httpbin_host }}/get?one') }}"
- name: Assert that the url was retrieved
assert:
@@ -9,7 +9,7 @@
- name: Test that retrieving a url with invalid cert fails
set_fact:
- web_data: "{{ lookup('url', 'https://' ~ badssl_host ~ '/') }}"
+ web_data: "{{ lookup('url', 'https://{{ badssl_host }}/') }}"
ignore_errors: True
register: url_invalid_cert
@@ -20,12 +20,12 @@
- name: Test that retrieving a url with invalid cert with validate_certs=False works
set_fact:
- web_data: "{{ lookup('url', 'https://' ~ badssl_host ~ '/', validate_certs=False) }}"
+ web_data: "{{ lookup('url', 'https://{{ badssl_host }}/', validate_certs=False) }}"
register: url_no_validate_cert
- assert:
that:
- - badssl_host_substring in web_data
+ - "'{{ badssl_host_substring }}' in web_data"
- vars:
url: https://{{ httpbin_host }}/get
@@ -52,27 +52,3 @@
- name: Test use_netrc=False
import_tasks: use_netrc.yml
-
-- vars:
- ansible_lookup_url_agent: ansible-test-lookup-url-agent
- block:
- - name: Test user agent
- set_fact:
- web_data: "{{ lookup('url', 'https://' ~ httpbin_host ~ '/user-agent') }}"
-
- - name: Assert that user agent is set
- assert:
- that:
- - ansible_lookup_url_agent in web_data['user-agent']
-
-- vars:
- ansible_lookup_url_force_basic_auth: yes
- block:
- - name: Test force basic auth
- set_fact:
- web_data: "{{ lookup('url', 'https://' ~ httpbin_host ~ '/headers', username='abc') }}"
-
- - name: Assert that Authorization header is set
- assert:
- that:
- - "'Authorization' in web_data.headers"
diff --git a/test/integration/targets/lookup_url/tasks/use_netrc.yml b/test/integration/targets/lookup_url/tasks/use_netrc.yml
index b90d05dc..68dc8934 100644
--- a/test/integration/targets/lookup_url/tasks/use_netrc.yml
+++ b/test/integration/targets/lookup_url/tasks/use_netrc.yml
@@ -10,7 +10,7 @@
- name: test Url lookup with ~/.netrc forced Basic auth
set_fact:
- web_data: "{{ lookup('ansible.builtin.url', 'https://' ~ httpbin_host ~ '/bearer', headers={'Authorization':'Bearer foobar'}) }}"
+ web_data: "{{ lookup('ansible.builtin.url', 'https://{{ httpbin_host }}/bearer', headers={'Authorization':'Bearer foobar'}) }}"
ignore_errors: yes
- name: assert test Url lookup with ~/.netrc forced Basic auth
@@ -18,11 +18,11 @@
that:
- "web_data.token.find('v=' ~ 'Zm9vOmJhcg==') == -1"
fail_msg: "Was expecting 'foo:bar' in base64, but received: {{ web_data }}"
- success_msg: "Expected Basic authentication even Bearer headers were sent"
+ success_msg: "Expected Basic authentication even Bearer headers were sent"
- name: test Url lookup with use_netrc=False
set_fact:
- web_data: "{{ lookup('ansible.builtin.url', 'https://' ~ httpbin_host ~ '/bearer', headers={'Authorization':'Bearer foobar'}, use_netrc='False') }}"
+ web_data: "{{ lookup('ansible.builtin.url', 'https://{{ httpbin_host }}/bearer', headers={'Authorization':'Bearer foobar'}, use_netrc='False') }}"
- name: assert test Url lookup with netrc=False used Bearer authentication
assert:
@@ -34,4 +34,4 @@
- name: Clean up. Removing ~/.netrc
file:
path: ~/.netrc
- state: absent
+ state: absent \ No newline at end of file
diff --git a/test/integration/targets/loop-connection/collections/ansible_collections/ns/name/meta/runtime.yml b/test/integration/targets/loop-connection/collections/ansible_collections/ns/name/meta/runtime.yml
index bd892de9..09322a9d 100644
--- a/test/integration/targets/loop-connection/collections/ansible_collections/ns/name/meta/runtime.yml
+++ b/test/integration/targets/loop-connection/collections/ansible_collections/ns/name/meta/runtime.yml
@@ -1,4 +1,4 @@
plugin_routing:
connection:
redirected_dummy:
- redirect: ns.name.dummy
+ redirect: ns.name.dummy \ No newline at end of file
diff --git a/test/integration/targets/loop-connection/main.yml b/test/integration/targets/loop-connection/main.yml
index ba60e649..fbffe309 100644
--- a/test/integration/targets/loop-connection/main.yml
+++ b/test/integration/targets/loop-connection/main.yml
@@ -30,4 +30,4 @@
- assert:
that:
- connected_test.results[0].stderr == "ran - 1"
- - connected_test.results[1].stderr == "ran - 2"
+ - connected_test.results[1].stderr == "ran - 2" \ No newline at end of file
diff --git a/test/integration/targets/missing_required_lib/library/missing_required_lib.py b/test/integration/targets/missing_required_lib/library/missing_required_lib.py
index 8c7ba884..480ea001 100644
--- a/test/integration/targets/missing_required_lib/library/missing_required_lib.py
+++ b/test/integration/targets/missing_required_lib/library/missing_required_lib.py
@@ -8,7 +8,7 @@ __metaclass__ = type
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
try:
- import ansible_missing_lib # pylint: disable=unused-import
+ import ansible_missing_lib
HAS_LIB = True
except ImportError as e:
HAS_LIB = False
diff --git a/test/integration/targets/module_defaults/action_plugins/debug.py b/test/integration/targets/module_defaults/action_plugins/debug.py
index 0c43201c..2584fd3d 100644
--- a/test/integration/targets/module_defaults/action_plugins/debug.py
+++ b/test/integration/targets/module_defaults/action_plugins/debug.py
@@ -20,7 +20,7 @@ __metaclass__ = type
from ansible.errors import AnsibleUndefinedVariable
from ansible.module_utils.six import string_types
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
from ansible.plugins.action import ActionBase
diff --git a/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/action/eos.py b/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/action/eos.py
index 174f3725..0d39f26d 100644
--- a/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/action/eos.py
+++ b/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/action/eos.py
@@ -5,6 +5,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action.normal import ActionModule as ActionBase
+from ansible.utils.vars import merge_hash
class ActionModule(ActionBase):
diff --git a/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/action/ios.py b/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/action/ios.py
index 7ba24348..20284fd1 100644
--- a/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/action/ios.py
+++ b/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/action/ios.py
@@ -5,6 +5,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action.normal import ActionModule as ActionBase
+from ansible.utils.vars import merge_hash
class ActionModule(ActionBase):
diff --git a/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/action/vyos.py b/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/action/vyos.py
index 67050fbd..b0e1904b 100644
--- a/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/action/vyos.py
+++ b/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/action/vyos.py
@@ -5,6 +5,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action.normal import ActionModule as ActionBase
+from ansible.utils.vars import merge_hash
class ActionModule(ActionBase):
diff --git a/test/integration/targets/module_no_log/aliases b/test/integration/targets/module_no_log/aliases
index afa1c9c3..9e84f636 100644
--- a/test/integration/targets/module_no_log/aliases
+++ b/test/integration/targets/module_no_log/aliases
@@ -1,4 +1,5 @@
shippable/posix/group3
context/controller
skip/freebsd # not configured to log user.info to /var/log/syslog
+skip/osx # not configured to log user.info to /var/log/syslog
skip/macos # not configured to log user.info to /var/log/syslog
diff --git a/test/integration/targets/module_no_log/library/module_that_has_secret.py b/test/integration/targets/module_no_log/library/module_that_has_secret.py
deleted file mode 100644
index 035228c8..00000000
--- a/test/integration/targets/module_no_log/library/module_that_has_secret.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/python
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from ansible.module_utils.basic import AnsibleModule
-
-
-def main():
- module = AnsibleModule(argument_spec=dict(
- secret=dict(no_log=True),
- notsecret=dict(no_log=False),
- ))
-
- msg = "My secret is: (%s), but don't tell %s" % (module.params['secret'], module.params['notsecret'])
- module.exit_json(msg=msg, changed=bool(module.params['secret'] == module.params['notsecret']))
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/integration/targets/module_no_log/tasks/main.yml b/test/integration/targets/module_no_log/tasks/main.yml
index bf024105..cf9e5802 100644
--- a/test/integration/targets/module_no_log/tasks/main.yml
+++ b/test/integration/targets/module_no_log/tasks/main.yml
@@ -59,41 +59,3 @@
# 2) the AnsibleModule.log method is not working
- good_message in grep.stdout
- bad_message not in grep.stdout
-
-- name: Ensure we do not obscure what we should not
- block:
- - module_that_has_secret:
- secret: u
- notsecret: u
- register: ouch
- ignore_errors: true
-
- - name: no log wont obscure booleans when True, but still hide in msg
- assert:
- that:
- - ouch['changed'] is boolean
- - "'*' in ouch['msg']"
-
- - module_that_has_secret:
- secret: a
- notsecret: b
- register: ouch
- ignore_errors: true
-
- - name: no log wont obscure booleans when False, but still hide in msg
- assert:
- that:
- - ouch['changed'] is boolean
- - "'*' in ouch['msg']"
-
- - module_that_has_secret:
- secret: True
- notsecret: False
- register: ouch
- ignore_errors: true
-
- - name: no log does not hide bool values
- assert:
- that:
- - ouch['changed'] is boolean
- - "'*' not in ouch['msg']"
diff --git a/test/integration/targets/module_utils/library/test.py b/test/integration/targets/module_utils/library/test.py
index 857d3d8e..fb6c8a81 100644
--- a/test/integration/targets/module_utils/library/test.py
+++ b/test/integration/targets/module_utils/library/test.py
@@ -11,8 +11,8 @@ import ansible.module_utils.foo0
results['foo0'] = ansible.module_utils.foo0.data
# Test depthful import with no from
-import ansible.module_utils.bar0.foo3
-results['bar0'] = ansible.module_utils.bar0.foo3.data
+import ansible.module_utils.bar0.foo
+results['bar0'] = ansible.module_utils.bar0.foo.data
# Test import of module_utils/foo1.py
from ansible.module_utils import foo1
@@ -72,12 +72,12 @@ from ansible.module_utils.spam8.ham import eggs
results['spam8'] = (bacon.data, eggs)
# Test that import of module_utils/qux1/quux.py using as works
-from ansible.module_utils.qux1 import quux as two
-results['qux1'] = two.data
+from ansible.module_utils.qux1 import quux as one
+results['qux1'] = one.data
# Test that importing qux2/quux.py and qux2/quuz.py using as works
-from ansible.module_utils.qux2 import quux as three, quuz as four
-results['qux2'] = (three.data, four.data)
+from ansible.module_utils.qux2 import quux as one, quuz as two
+results['qux2'] = (one.data, two.data)
# Test depth
from ansible.module_utils.a.b.c.d.e.f.g.h import data
diff --git a/test/integration/targets/module_utils/library/test_failure.py b/test/integration/targets/module_utils/library/test_failure.py
index ab80ceae..efb3ddae 100644
--- a/test/integration/targets/module_utils/library/test_failure.py
+++ b/test/integration/targets/module_utils/library/test_failure.py
@@ -6,9 +6,9 @@ results = {}
# Test that we are rooted correctly
# Following files:
# module_utils/yak/zebra/foo.py
-from ansible.module_utils.zebra import foo4
+from ansible.module_utils.zebra import foo
-results['zebra'] = foo4.data
+results['zebra'] = foo.data
from ansible.module_utils.basic import AnsibleModule
AnsibleModule(argument_spec=dict()).exit_json(**results)
diff --git a/test/integration/targets/module_utils/module_utils/bar0/foo3.py b/test/integration/targets/module_utils/module_utils/bar0/foo.py
index 1072dcc2..1072dcc2 100644
--- a/test/integration/targets/module_utils/module_utils/bar0/foo3.py
+++ b/test/integration/targets/module_utils/module_utils/bar0/foo.py
diff --git a/test/integration/targets/module_utils/module_utils/foo.py b/test/integration/targets/module_utils/module_utils/foo.py
new file mode 100644
index 00000000..20698f1f
--- /dev/null
+++ b/test/integration/targets/module_utils/module_utils/foo.py
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+
+foo = "FOO FROM foo.py"
diff --git a/test/units/module_utils/compat/__init__.py b/test/integration/targets/module_utils/module_utils/sub/bar/__init__.py
index e69de29b..e69de29b 100644
--- a/test/units/module_utils/compat/__init__.py
+++ b/test/integration/targets/module_utils/module_utils/sub/bar/__init__.py
diff --git a/test/integration/targets/module_utils/module_utils/sub/bar/bam.py b/test/integration/targets/module_utils/module_utils/sub/bar/bam.py
new file mode 100644
index 00000000..02fafd40
--- /dev/null
+++ b/test/integration/targets/module_utils/module_utils/sub/bar/bam.py
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+
+bam = "BAM FROM sub/bar/bam.py"
diff --git a/test/integration/targets/module_utils/module_utils/sub/bar/bar.py b/test/integration/targets/module_utils/module_utils/sub/bar/bar.py
new file mode 100644
index 00000000..8566901f
--- /dev/null
+++ b/test/integration/targets/module_utils/module_utils/sub/bar/bar.py
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+
+bar = "BAR FROM sub/bar/bar.py"
diff --git a/test/integration/targets/module_utils/module_utils/yak/zebra/foo4.py b/test/integration/targets/module_utils/module_utils/yak/zebra/foo.py
index 89b2bfe8..89b2bfe8 100644
--- a/test/integration/targets/module_utils/module_utils/yak/zebra/foo4.py
+++ b/test/integration/targets/module_utils/module_utils/yak/zebra/foo.py
diff --git a/test/integration/targets/module_utils/module_utils_test.yml b/test/integration/targets/module_utils/module_utils_test.yml
index 352bc582..4e948bd6 100644
--- a/test/integration/targets/module_utils/module_utils_test.yml
+++ b/test/integration/targets/module_utils/module_utils_test.yml
@@ -47,7 +47,7 @@
assert:
that:
- result is failed
- - result['msg'] == "Could not find imported module support code for ansible.modules.test_failure. Looked for (['ansible.module_utils.zebra.foo4', 'ansible.module_utils.zebra'])"
+ - result['msg'] == "Could not find imported module support code for ansible.modules.test_failure. Looked for (['ansible.module_utils.zebra.foo', 'ansible.module_utils.zebra'])"
- name: Test that alias deprecation works
test_alias_deprecation:
diff --git a/test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1 b/test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1
index 9644df93..6170f046 100644
--- a/test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1
+++ b/test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1
@@ -87,7 +87,7 @@ Function Assert-DictionaryEqual {
}
Function Exit-Module {
- # Make sure Exit actually calls exit and not our overridden test behaviour
+ # Make sure Exit actually calls exit and not our overriden test behaviour
[Ansible.Basic.AnsibleModule]::Exit = { param([Int32]$rc) exit $rc }
Write-Output -InputObject (ConvertTo-Json -InputObject $module.Result -Compress -Depth 99)
$module.ExitJson()
diff --git a/test/integration/targets/module_utils_Ansible.ModuleUtils.AddType/library/add_type_test.ps1 b/test/integration/targets/module_utils_Ansible.ModuleUtils.AddType/library/add_type_test.ps1
index 5cb1a72d..d18c42d7 100644
--- a/test/integration/targets/module_utils_Ansible.ModuleUtils.AddType/library/add_type_test.ps1
+++ b/test/integration/targets/module_utils_Ansible.ModuleUtils.AddType/library/add_type_test.ps1
@@ -328,73 +328,5 @@ finally {
}
Assert-Equal -actual ([Namespace12.Class12]::GetString()) -expected "b"
-$unsafe_code_fail = @'
-using System;
-
-namespace Namespace13
-{
- public class Class13
- {
-
- public static int GetNumber()
- {
- int num = 2;
- int* numPtr = &num;
-
- DoubleNumber(numPtr);
-
- return num;
- }
-
- private unsafe static void DoubleNumber(int* num)
- {
- *num = *num * 3;
- }
- }
-}
-'@
-$failed = $false
-try {
- Add-CSharpType -Reference $unsafe_code_fail
-}
-catch {
- $failed = $true
- $actual = $_.Exception.Message.Contains("error CS0227: Unsafe code may only appear if compiling with /unsafe")
- Assert-Equal -actual $actual -expected $true
-}
-Assert-Equal -actual $failed -expected $true
-
-$unsafe_code = @'
-using System;
-
-//AllowUnsafe
-
-namespace Namespace13
-{
- public class Class13
- {
- public static int GetNumber()
- {
- int num = 2;
- unsafe
- {
- int* numPtr = &num;
-
- DoubleNumber(numPtr);
- }
-
- return num;
- }
-
- private unsafe static void DoubleNumber(int* num)
- {
- *num = *num * 2;
- }
- }
-}
-'@
-Add-CSharpType -Reference $unsafe_code
-Assert-Equal -actual ([Namespace13.Class13]::GetNumber()) -expected 4
-
$result.res = "success"
Exit-Json -obj $result
diff --git a/test/integration/targets/no_log/no_log_config.yml b/test/integration/targets/no_log/no_log_config.yml
deleted file mode 100644
index 8a508805..00000000
--- a/test/integration/targets/no_log/no_log_config.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-- hosts: testhost
- gather_facts: false
- tasks:
- - debug:
- no_log: true
-
- - debug:
- no_log: false
-
- - debug:
-
- - debug:
- loop: '{{ range(3) }}'
diff --git a/test/integration/targets/no_log/runme.sh b/test/integration/targets/no_log/runme.sh
index bf764bf9..bb5c048f 100755
--- a/test/integration/targets/no_log/runme.sh
+++ b/test/integration/targets/no_log/runme.sh
@@ -5,7 +5,7 @@ set -eux
# This test expects 7 loggable vars and 0 non-loggable ones.
# If either mismatches it fails, run the ansible-playbook command to debug.
[ "$(ansible-playbook no_log_local.yml -i ../../inventory -vvvvv "$@" | awk \
-'BEGIN { logme = 0; nolog = 0; } /LOG_ME/ { logme += 1;} /DO_NOT_LOG/ { nolog += 1;} END { printf "%d/%d", logme, nolog; }')" = "27/0" ]
+'BEGIN { logme = 0; nolog = 0; } /LOG_ME/ { logme += 1;} /DO_NOT_LOG/ { nolog += 1;} END { printf "%d/%d", logme, nolog; }')" = "26/0" ]
# deal with corner cases with no log and loops
# no log enabled, should produce 6 censored messages
@@ -19,8 +19,3 @@ set -eux
# test invalid data passed to a suboption
[ "$(ansible-playbook no_log_suboptions_invalid.yml -i ../../inventory -vvvvv "$@" | grep -Ec '(SUPREME|IDIOM|MOCKUP|EDUCATED|FOOTREST|CRAFTY|FELINE|CRYSTAL|EXPECTANT|AGROUND|GOLIATH|FREEFALL)')" = "0" ]
-
-# test variations on ANSIBLE_NO_LOG
-[ "$(ansible-playbook no_log_config.yml -i ../../inventory -vvvvv "$@" | grep -Ec 'the output has been hidden')" = "1" ]
-[ "$(ANSIBLE_NO_LOG=0 ansible-playbook no_log_config.yml -i ../../inventory -vvvvv "$@" | grep -Ec 'the output has been hidden')" = "1" ]
-[ "$(ANSIBLE_NO_LOG=1 ansible-playbook no_log_config.yml -i ../../inventory -vvvvv "$@" | grep -Ec 'the output has been hidden')" = "6" ]
diff --git a/test/integration/targets/old_style_cache_plugins/aliases b/test/integration/targets/old_style_cache_plugins/aliases
index 163129e2..37773831 100644
--- a/test/integration/targets/old_style_cache_plugins/aliases
+++ b/test/integration/targets/old_style_cache_plugins/aliases
@@ -2,4 +2,5 @@ destructive
needs/root
shippable/posix/group5
context/controller
+skip/osx
skip/macos
diff --git a/test/integration/targets/old_style_cache_plugins/plugins/cache/configurable_redis.py b/test/integration/targets/old_style_cache_plugins/plugins/cache/configurable_redis.py
index 23c7789b..44b6cf93 100644
--- a/test/integration/targets/old_style_cache_plugins/plugins/cache/configurable_redis.py
+++ b/test/integration/targets/old_style_cache_plugins/plugins/cache/configurable_redis.py
@@ -44,6 +44,7 @@ DOCUMENTATION = '''
import time
import json
+from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder
from ansible.plugins.cache import BaseCacheModule
diff --git a/test/integration/targets/old_style_cache_plugins/setup_redis_cache.yml b/test/integration/targets/old_style_cache_plugins/setup_redis_cache.yml
index b7cd4831..8aad37a3 100644
--- a/test/integration/targets/old_style_cache_plugins/setup_redis_cache.yml
+++ b/test/integration/targets/old_style_cache_plugins/setup_redis_cache.yml
@@ -20,9 +20,8 @@
- name: get the latest stable redis server release
get_url:
- url: https://download.redis.io/redis-stable.tar.gz
+ url: http://download.redis.io/redis-stable.tar.gz
dest: ./
- timeout: 60
- name: unzip download
unarchive:
diff --git a/test/integration/targets/old_style_vars_plugins/deprecation_warning/v2_vars_plugin.py b/test/integration/targets/old_style_vars_plugins/deprecation_warning/v2_vars_plugin.py
deleted file mode 100644
index f342b698..00000000
--- a/test/integration/targets/old_style_vars_plugins/deprecation_warning/v2_vars_plugin.py
+++ /dev/null
@@ -1,6 +0,0 @@
-class VarsModule:
- def get_host_vars(self, entity):
- return {}
-
- def get_group_vars(self, entity):
- return {}
diff --git a/test/integration/targets/old_style_vars_plugins/deprecation_warning/vars.py b/test/integration/targets/old_style_vars_plugins/deprecation_warning/vars.py
index f554be04..d5c9a422 100644
--- a/test/integration/targets/old_style_vars_plugins/deprecation_warning/vars.py
+++ b/test/integration/targets/old_style_vars_plugins/deprecation_warning/vars.py
@@ -2,7 +2,7 @@ from ansible.plugins.vars import BaseVarsPlugin
class VarsModule(BaseVarsPlugin):
- REQUIRES_WHITELIST = True
+ REQUIRES_WHITELIST = False
def get_vars(self, loader, path, entities):
return {}
diff --git a/test/integration/targets/old_style_vars_plugins/roles/a/tasks/main.yml b/test/integration/targets/old_style_vars_plugins/roles/a/tasks/main.yml
deleted file mode 100644
index 8e0742a5..00000000
--- a/test/integration/targets/old_style_vars_plugins/roles/a/tasks/main.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- assert:
- that:
- - auto_role_var is defined
diff --git a/test/integration/targets/old_style_vars_plugins/roles/a/vars_plugins/auto_role_vars.py b/test/integration/targets/old_style_vars_plugins/roles/a/vars_plugins/auto_role_vars.py
deleted file mode 100644
index a1cd30d3..00000000
--- a/test/integration/targets/old_style_vars_plugins/roles/a/vars_plugins/auto_role_vars.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from __future__ import annotations
-
-from ansible.plugins.vars import BaseVarsPlugin
-
-
-class VarsModule(BaseVarsPlugin):
- # Implicitly
- # REQUIRES_ENABLED = False
-
- def get_vars(self, loader, path, entities):
- return {'auto_role_var': True}
diff --git a/test/integration/targets/old_style_vars_plugins/runme.sh b/test/integration/targets/old_style_vars_plugins/runme.sh
index 9f416235..4cd19168 100755
--- a/test/integration/targets/old_style_vars_plugins/runme.sh
+++ b/test/integration/targets/old_style_vars_plugins/runme.sh
@@ -12,39 +12,9 @@ export ANSIBLE_VARS_PLUGINS=./vars_plugins
export ANSIBLE_VARS_ENABLED=require_enabled
[ "$(ansible-inventory -i localhost, --list --yaml all "$@" | grep -c 'require_enabled')" = "1" ]
-# Test deprecated features
+# Test the deprecated class attribute
export ANSIBLE_VARS_PLUGINS=./deprecation_warning
-WARNING_1="The VarsModule class variable 'REQUIRES_WHITELIST' is deprecated. Use 'REQUIRES_ENABLED' instead."
-WARNING_2="The vars plugin v2_vars_plugin .* is relying on the deprecated entrypoints 'get_host_vars' and 'get_group_vars'"
+WARNING="The VarsModule class variable 'REQUIRES_WHITELIST' is deprecated. Use 'REQUIRES_ENABLED' instead."
ANSIBLE_DEPRECATION_WARNINGS=True ANSIBLE_NOCOLOR=True ANSIBLE_FORCE_COLOR=False \
- ansible-inventory -i localhost, --list all "$@" 2> err.txt
-for WARNING in "$WARNING_1" "$WARNING_2"; do
- ansible localhost -m debug -a "msg={{ lookup('file', 'err.txt') | regex_replace('\n', '') }}" | grep "$WARNING"
-done
-
-# Test how many times vars plugins are loaded for a simple play containing a task
-# host_group_vars is stateless, so we can load it once and reuse it, every other vars plugin should be instantiated before it runs
-cat << EOF > "test_task_vars.yml"
----
-- hosts: localhost
- connection: local
- gather_facts: no
- tasks:
- - debug:
-EOF
-
-# hide the debug noise by dumping to a file
-trap 'rm -rf -- "out.txt"' EXIT
-
-ANSIBLE_DEBUG=True ansible-playbook test_task_vars.yml > out.txt
-[ "$(grep -c "Loading VarsModule 'host_group_vars'" out.txt)" -eq 1 ]
-[ "$(grep -c "Loading VarsModule 'require_enabled'" out.txt)" -gt 50 ]
-[ "$(grep -c "Loading VarsModule 'auto_enabled'" out.txt)" -gt 50 ]
-
-export ANSIBLE_VARS_ENABLED=ansible.builtin.host_group_vars
-ANSIBLE_DEBUG=True ansible-playbook test_task_vars.yml > out.txt
-[ "$(grep -c "Loading VarsModule 'host_group_vars'" out.txt)" -eq 1 ]
-[ "$(grep -c "Loading VarsModule 'require_enabled'" out.txt)" -lt 3 ]
-[ "$(grep -c "Loading VarsModule 'auto_enabled'" out.txt)" -gt 50 ]
-
-ansible localhost -m include_role -a 'name=a' "$@"
+ ansible-inventory -i localhost, --list all 2> err.txt
+ansible localhost -m debug -a "msg={{ lookup('file', 'err.txt') | regex_replace('\n', '') }}" | grep "$WARNING"
diff --git a/test/integration/targets/omit/75692.yml b/test/integration/targets/omit/75692.yml
index 5ba8a2df..b4000c97 100644
--- a/test/integration/targets/omit/75692.yml
+++ b/test/integration/targets/omit/75692.yml
@@ -2,10 +2,10 @@
hosts: testhost
gather_facts: false
become: yes
- # become_user needed at play level for testing this behavior
become_user: nobody
roles:
- name: setup_test_user
+ become: yes
become_user: root
tasks:
- shell: whoami
diff --git a/test/integration/targets/package/tasks/main.yml b/test/integration/targets/package/tasks/main.yml
index 37267aa6..c17525d8 100644
--- a/test/integration/targets/package/tasks/main.yml
+++ b/test/integration/targets/package/tasks/main.yml
@@ -239,4 +239,4 @@
that:
- "result is changed"
- when: ansible_distribution == "Fedora"
+ when: ansible_distribution == "Fedora" \ No newline at end of file
diff --git a/test/integration/targets/package_facts/aliases b/test/integration/targets/package_facts/aliases
index f5edf4b1..5a5e4646 100644
--- a/test/integration/targets/package_facts/aliases
+++ b/test/integration/targets/package_facts/aliases
@@ -1,2 +1,3 @@
shippable/posix/group2
+skip/osx
skip/macos
diff --git a/test/integration/targets/parsing/bad_parsing.yml b/test/integration/targets/parsing/bad_parsing.yml
new file mode 100644
index 00000000..953ec072
--- /dev/null
+++ b/test/integration/targets/parsing/bad_parsing.yml
@@ -0,0 +1,12 @@
+- hosts: testhost
+
+ # the following commands should all parse fine and execute fine
+ # and represent quoting scenarios that should be legit
+
+ gather_facts: False
+
+ roles:
+
+ # this one has a lot of things that should fail, see makefile for operation w/ tags
+
+ - { role: test_bad_parsing }
diff --git a/test/integration/targets/parsing/parsing.yml b/test/integration/targets/parsing/parsing.yml
deleted file mode 100644
index 9d5ff41a..00000000
--- a/test/integration/targets/parsing/parsing.yml
+++ /dev/null
@@ -1,35 +0,0 @@
-- hosts: testhost
- gather_facts: no
- tasks:
- - name: test that a variable cannot inject raw arguments
- shell: echo hi {{ chdir }}
- vars:
- chdir: mom chdir=/tmp
- register: raw_injection
-
- - name: test that a variable cannot inject kvp arguments as a kvp
- file: path={{ test_file }} {{ test_input }}
- vars:
- test_file: "{{ output_dir }}/ansible_test_file"
- test_input: "owner=test"
- register: kvp_kvp_injection
- ignore_errors: yes
-
- - name: test that a variable cannot inject kvp arguments as a value
- file: state=absent path='{{ kvp_in_var }}'
- vars:
- kvp_in_var: "{{ output_dir }}' owner='test"
- register: kvp_value_injection
-
- - name: test that a missing filter fails
- debug:
- msg: "{{ output_dir | badfiltername }}"
- register: filter_missing
- ignore_errors: yes
-
- - assert:
- that:
- - raw_injection.stdout == 'hi mom chdir=/tmp'
- - kvp_kvp_injection is failed
- - kvp_value_injection.path.endswith("' owner='test")
- - filter_missing is failed
diff --git a/test/integration/targets/parsing/roles/test_bad_parsing/tasks/main.yml b/test/integration/targets/parsing/roles/test_bad_parsing/tasks/main.yml
new file mode 100644
index 00000000..f1b2ec6a
--- /dev/null
+++ b/test/integration/targets/parsing/roles/test_bad_parsing/tasks/main.yml
@@ -0,0 +1,60 @@
+# test code for the ping module
+# (c) 2014, Michael DeHaan <michael@ansible.com>
+
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# the following tests all raise errors, to use them in a Makefile, we run them with different flags, as
+# otherwise ansible stops at the first one and we want to ensure STOP conditions for each
+
+- set_fact:
+ test_file: "{{ output_dir }}/ansible_test_file" # FIXME, use set tempdir
+ test_input: "owner=test"
+ bad_var: "{{ output_dir }}' owner=test"
+ chdir: "mom chdir=/tmp"
+ tags: common
+
+- file: name={{test_file}} state=touch
+ tags: common
+
+- name: remove touched file
+ file: name={{test_file}} state=absent
+ tags: common
+
+- name: include test that we cannot insert arguments
+ include: scenario1.yml
+ tags: scenario1
+
+- name: include test that we cannot duplicate arguments
+ include: scenario2.yml
+ tags: scenario2
+
+- name: include test that we can't do this for the shell module
+ include: scenario3.yml
+ tags: scenario3
+
+- name: include test that we can't go all Little Bobby Droptables on a quoted var to add more
+ include: scenario4.yml
+ tags: scenario4
+
+- name: test that a missing/malformed jinja2 filter fails
+ debug: msg="{{output_dir|badfiltername}}"
+ tags: scenario5
+ register: filter_fail
+ ignore_errors: yes
+
+- assert:
+ that:
+ - filter_fail is failed
diff --git a/test/integration/targets/parsing/roles/test_bad_parsing/tasks/scenario1.yml b/test/integration/targets/parsing/roles/test_bad_parsing/tasks/scenario1.yml
new file mode 100644
index 00000000..8a82fb95
--- /dev/null
+++ b/test/integration/targets/parsing/roles/test_bad_parsing/tasks/scenario1.yml
@@ -0,0 +1,4 @@
+- name: test that we cannot insert arguments
+ file: path={{ test_file }} {{ test_input }}
+ failed_when: False # ignore the module, just test the parser
+ tags: scenario1
diff --git a/test/integration/targets/parsing/roles/test_bad_parsing/tasks/scenario2.yml b/test/integration/targets/parsing/roles/test_bad_parsing/tasks/scenario2.yml
new file mode 100644
index 00000000..c3b4b13c
--- /dev/null
+++ b/test/integration/targets/parsing/roles/test_bad_parsing/tasks/scenario2.yml
@@ -0,0 +1,4 @@
+- name: test that we cannot duplicate arguments
+ file: path={{ test_file }} owner=test2 {{ test_input }}
+ failed_when: False # ignore the module, just test the parser
+ tags: scenario2
diff --git a/test/integration/targets/parsing/roles/test_bad_parsing/tasks/scenario3.yml b/test/integration/targets/parsing/roles/test_bad_parsing/tasks/scenario3.yml
new file mode 100644
index 00000000..a228f70e
--- /dev/null
+++ b/test/integration/targets/parsing/roles/test_bad_parsing/tasks/scenario3.yml
@@ -0,0 +1,4 @@
+- name: test that we can't do this for the shell module
+ shell: echo hi {{ chdir }}
+ failed_when: False
+ tags: scenario3
diff --git a/test/integration/targets/parsing/roles/test_bad_parsing/tasks/scenario4.yml b/test/integration/targets/parsing/roles/test_bad_parsing/tasks/scenario4.yml
new file mode 100644
index 00000000..2845adca
--- /dev/null
+++ b/test/integration/targets/parsing/roles/test_bad_parsing/tasks/scenario4.yml
@@ -0,0 +1,4 @@
+- name: test that we can't go all Little Bobby Droptables on a quoted var to add more
+ file: "name={{ bad_var }}"
+ failed_when: False
+ tags: scenario4
diff --git a/test/integration/targets/parsing/roles/test_bad_parsing/vars/main.yml b/test/integration/targets/parsing/roles/test_bad_parsing/vars/main.yml
new file mode 100644
index 00000000..1aaeac77
--- /dev/null
+++ b/test/integration/targets/parsing/roles/test_bad_parsing/vars/main.yml
@@ -0,0 +1,2 @@
+---
+output_dir: .
diff --git a/test/integration/targets/parsing/roles/test_good_parsing/tasks/main.yml b/test/integration/targets/parsing/roles/test_good_parsing/tasks/main.yml
index 25e91f28..d225c0f9 100644
--- a/test/integration/targets/parsing/roles/test_good_parsing/tasks/main.yml
+++ b/test/integration/targets/parsing/roles/test_good_parsing/tasks/main.yml
@@ -121,10 +121,7 @@
- result.cmd == "echo foo --arg=a --arg=b"
- name: test includes with params
- include_tasks: test_include.yml
- vars:
- fact_name: include_params
- param: "{{ test_input }}"
+ include: test_include.yml fact_name=include_params param="{{ test_input }}"
- name: assert the include set the correct fact for the param
assert:
@@ -132,10 +129,7 @@
- include_params == test_input
- name: test includes with quoted params
- include_tasks: test_include.yml
- vars:
- fact_name: double_quoted_param
- param: "this is a param with double quotes"
+ include: test_include.yml fact_name=double_quoted_param param="this is a param with double quotes"
- name: assert the include set the correct fact for the double quoted param
assert:
@@ -143,10 +137,7 @@
- double_quoted_param == "this is a param with double quotes"
- name: test includes with single quoted params
- include_tasks: test_include.yml
- vars:
- fact_name: single_quoted_param
- param: 'this is a param with single quotes'
+ include: test_include.yml fact_name=single_quoted_param param='this is a param with single quotes'
- name: assert the include set the correct fact for the single quoted param
assert:
@@ -154,7 +145,7 @@
- single_quoted_param == "this is a param with single quotes"
- name: test includes with quoted params in complex args
- include_tasks: test_include.yml
+ include: test_include.yml
vars:
fact_name: complex_param
param: "this is a param in a complex arg with double quotes"
@@ -174,7 +165,7 @@
- result.msg == "this should be debugged"
- name: test conditional includes
- include_tasks: test_include_conditional.yml
+ include: test_include_conditional.yml
when: false
- name: assert the nested include from test_include_conditional was not set
diff --git a/test/integration/targets/parsing/roles/test_good_parsing/tasks/test_include_conditional.yml b/test/integration/targets/parsing/roles/test_good_parsing/tasks/test_include_conditional.yml
index a1d8b7ce..070888da 100644
--- a/test/integration/targets/parsing/roles/test_good_parsing/tasks/test_include_conditional.yml
+++ b/test/integration/targets/parsing/roles/test_good_parsing/tasks/test_include_conditional.yml
@@ -1 +1 @@
-- include_tasks: test_include_nested.yml
+- include: test_include_nested.yml
diff --git a/test/integration/targets/parsing/runme.sh b/test/integration/targets/parsing/runme.sh
index 2d550082..022ce4cf 100755
--- a/test/integration/targets/parsing/runme.sh
+++ b/test/integration/targets/parsing/runme.sh
@@ -2,5 +2,5 @@
set -eux
-ansible-playbook parsing.yml -i ../../inventory "$@" -e "output_dir=${OUTPUT_DIR}"
-ansible-playbook good_parsing.yml -i ../../inventory "$@"
+ansible-playbook bad_parsing.yml -i ../../inventory -vvv "$@" --tags prepare,common,scenario5
+ansible-playbook good_parsing.yml -i ../../inventory -v "$@"
diff --git a/test/integration/targets/path_lookups/roles/showfile/tasks/notmain.yml b/test/integration/targets/path_lookups/roles/showfile/tasks/main.yml
index 1b380579..1b380579 100644
--- a/test/integration/targets/path_lookups/roles/showfile/tasks/notmain.yml
+++ b/test/integration/targets/path_lookups/roles/showfile/tasks/main.yml
diff --git a/test/integration/targets/path_lookups/testplay.yml b/test/integration/targets/path_lookups/testplay.yml
index bc05c7e5..8bf45532 100644
--- a/test/integration/targets/path_lookups/testplay.yml
+++ b/test/integration/targets/path_lookups/testplay.yml
@@ -4,11 +4,9 @@
pre_tasks:
- name: remove {{ remove }}
file: path={{ playbook_dir }}/{{ remove }} state=absent
- tasks:
- - import_role:
- name: showfile
- tasks_from: notmain.yml
-
+ roles:
+ - showfile
+ post_tasks:
- name: from play
set_fact: play_result="{{lookup('file', 'testfile')}}"
diff --git a/test/integration/targets/pause/pause-6.yml b/test/integration/targets/pause/pause-6.yml
deleted file mode 100644
index f7315bbc..00000000
--- a/test/integration/targets/pause/pause-6.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-- name: Test pause module input isn't captured with a timeout
- hosts: localhost
- become: no
- gather_facts: no
-
- tasks:
- - name: pause with the default message
- pause:
- seconds: 3
- register: default_msg_pause
-
- - name: pause with a custom message
- pause:
- prompt: Wait for three seconds
- seconds: 3
- register: custom_msg_pause
-
- - name: Ensure that input was not captured
- assert:
- that:
- - default_msg_pause.user_input == ''
- - custom_msg_pause.user_input == ''
-
- - debug:
- msg: Task after pause
diff --git a/test/integration/targets/pause/test-pause.py b/test/integration/targets/pause/test-pause.py
index ab771fa0..3703470d 100755
--- a/test/integration/targets/pause/test-pause.py
+++ b/test/integration/targets/pause/test-pause.py
@@ -168,9 +168,7 @@ pause_test = pexpect.spawn(
pause_test.logfile = log_buffer
pause_test.expect(r'Pausing for \d+ seconds')
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
-pause_test.send('\n') # test newline does not stop the prompt - waiting for a timeout or ctrl+C
pause_test.send('\x03')
-pause_test.expect("Press 'C' to continue the play or 'A' to abort")
pause_test.send('C')
pause_test.expect('Task after pause')
pause_test.expect(pexpect.EOF)
@@ -189,7 +187,6 @@ pause_test.logfile = log_buffer
pause_test.expect(r'Pausing for \d+ seconds')
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
pause_test.send('\x03')
-pause_test.expect("Press 'C' to continue the play or 'A' to abort")
pause_test.send('A')
pause_test.expect('user requested abort!')
pause_test.expect(pexpect.EOF)
@@ -228,7 +225,6 @@ pause_test.expect(r'Pausing for \d+ seconds')
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
pause_test.expect(r"Waiting for two seconds:")
pause_test.send('\x03')
-pause_test.expect("Press 'C' to continue the play or 'A' to abort")
pause_test.send('C')
pause_test.expect('Task after pause')
pause_test.expect(pexpect.EOF)
@@ -248,7 +244,6 @@ pause_test.expect(r'Pausing for \d+ seconds')
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
pause_test.expect(r"Waiting for two seconds:")
pause_test.send('\x03')
-pause_test.expect("Press 'C' to continue the play or 'A' to abort")
pause_test.send('A')
pause_test.expect('user requested abort!')
pause_test.expect(pexpect.EOF)
@@ -280,24 +275,6 @@ pause_test.send('\r')
pause_test.expect(pexpect.EOF)
pause_test.close()
-# Test input is not returned if a timeout is given
-
-playbook = 'pause-6.yml'
-
-pause_test = pexpect.spawn(
- 'ansible-playbook',
- args=[playbook] + args,
- timeout=10,
- env=os.environ
-)
-
-pause_test.logfile = log_buffer
-pause_test.expect(r'Wait for three seconds:')
-pause_test.send('ignored user input')
-pause_test.expect('Task after pause')
-pause_test.expect(pexpect.EOF)
-pause_test.close()
-
# Test that enter presses may not continue the play when a timeout is set.
diff --git a/test/integration/targets/pip/tasks/main.yml b/test/integration/targets/pip/tasks/main.yml
index a3770702..66992fd0 100644
--- a/test/integration/targets/pip/tasks/main.yml
+++ b/test/integration/targets/pip/tasks/main.yml
@@ -40,9 +40,6 @@
extra_args: "-c {{ remote_constraints }}"
- include_tasks: pip.yml
-
- - include_tasks: no_setuptools.yml
- when: ansible_python.version_info[:2] >= [3, 8]
always:
- name: platform specific cleanup
include_tasks: "{{ cleanup_filename }}"
diff --git a/test/integration/targets/pip/tasks/no_setuptools.yml b/test/integration/targets/pip/tasks/no_setuptools.yml
deleted file mode 100644
index 695605e8..00000000
--- a/test/integration/targets/pip/tasks/no_setuptools.yml
+++ /dev/null
@@ -1,48 +0,0 @@
-- name: Get coverage version
- pip:
- name: coverage
- check_mode: true
- register: pip_coverage
-
-- name: create a virtualenv for use without setuptools
- pip:
- name:
- - packaging
- # coverage is needed when ansible-test is invoked with --coverage
- # and using a custom ansible_python_interpreter below
- - '{{ pip_coverage.stdout_lines|select("match", "coverage==")|first }}'
- virtualenv: "{{ remote_tmp_dir }}/no_setuptools"
-
-- name: Remove setuptools
- pip:
- name:
- - setuptools
- - pkg_resources # This shouldn't be a thing, but ubuntu 20.04...
- virtualenv: "{{ remote_tmp_dir }}/no_setuptools"
- state: absent
-
-- name: Ensure pkg_resources is gone
- command: "{{ remote_tmp_dir }}/no_setuptools/bin/python -c 'import pkg_resources'"
- register: result
- failed_when: result.rc == 0
-
-- vars:
- ansible_python_interpreter: "{{ remote_tmp_dir }}/no_setuptools/bin/python"
- block:
- - name: Checkmode install pip
- pip:
- name: pip
- virtualenv: "{{ remote_tmp_dir }}/no_setuptools"
- check_mode: true
- register: pip_check_mode
-
- - assert:
- that:
- - pip_check_mode.stdout is contains "pip=="
- - pip_check_mode.stdout is not contains "setuptools=="
-
- - name: Install fallible
- pip:
- name: fallible==0.0.1a2
- virtualenv: "{{ remote_tmp_dir }}/no_setuptools"
- register: fallible_install
diff --git a/test/integration/targets/pip/tasks/pip.yml b/test/integration/targets/pip/tasks/pip.yml
index 9f1034d2..39480614 100644
--- a/test/integration/targets/pip/tasks/pip.yml
+++ b/test/integration/targets/pip/tasks/pip.yml
@@ -568,28 +568,6 @@
that:
- "version13 is success"
-- name: Test virtualenv command with venv formatting
- when: ansible_python.version.major > 2
- block:
- - name: Clean up the virtualenv
- file:
- state: absent
- name: "{{ remote_tmp_dir }}/pipenv"
-
- # ref: https://github.com/ansible/ansible/issues/76372
- - name: install using different venv formatting
- pip:
- name: "{{ pip_test_package }}"
- virtualenv: "{{ remote_tmp_dir }}/pipenv"
- virtualenv_command: "{{ ansible_python_interpreter ~ ' -mvenv' }}"
- state: present
- register: version14
-
- - name: ensure install using virtualenv_command with venv formatting
- assert:
- that:
- - "version14 is changed"
-
### test virtualenv_command end ###
# https://github.com/ansible/ansible/issues/68592
diff --git a/test/integration/targets/pkg_resources/lookup_plugins/check_pkg_resources.py b/test/integration/targets/pkg_resources/lookup_plugins/check_pkg_resources.py
index 44412f22..9f1c5c0b 100644
--- a/test/integration/targets/pkg_resources/lookup_plugins/check_pkg_resources.py
+++ b/test/integration/targets/pkg_resources/lookup_plugins/check_pkg_resources.py
@@ -11,7 +11,7 @@ __metaclass__ = type
# noinspection PyUnresolvedReferences
try:
- from pkg_resources import Requirement # pylint: disable=unused-import
+ from pkg_resources import Requirement
except ImportError:
Requirement = None
diff --git a/test/integration/targets/plugin_filtering/filter_lookup.yml b/test/integration/targets/plugin_filtering/filter_lookup.yml
index 5f183e9f..694ebfcb 100644
--- a/test/integration/targets/plugin_filtering/filter_lookup.yml
+++ b/test/integration/targets/plugin_filtering/filter_lookup.yml
@@ -1,6 +1,6 @@
---
filter_version: 1.0
-module_rejectlist:
+module_blacklist:
# Specify the name of a lookup plugin here. This should have no effect as
# this is only for filtering modules
- list
diff --git a/test/integration/targets/plugin_filtering/filter_modules.yml b/test/integration/targets/plugin_filtering/filter_modules.yml
index bef7d6d8..6cffa676 100644
--- a/test/integration/targets/plugin_filtering/filter_modules.yml
+++ b/test/integration/targets/plugin_filtering/filter_modules.yml
@@ -1,6 +1,6 @@
---
filter_version: 1.0
-module_rejectlist:
+module_blacklist:
# A pure action plugin
- pause
# A hybrid action plugin with module
diff --git a/test/integration/targets/plugin_filtering/filter_ping.yml b/test/integration/targets/plugin_filtering/filter_ping.yml
index 8604716e..08e56f24 100644
--- a/test/integration/targets/plugin_filtering/filter_ping.yml
+++ b/test/integration/targets/plugin_filtering/filter_ping.yml
@@ -1,5 +1,5 @@
---
filter_version: 1.0
-module_rejectlist:
+module_blacklist:
# Ping is special
- ping
diff --git a/test/integration/targets/plugin_filtering/filter_stat.yml b/test/integration/targets/plugin_filtering/filter_stat.yml
index 132bf03f..c1ce42ef 100644
--- a/test/integration/targets/plugin_filtering/filter_stat.yml
+++ b/test/integration/targets/plugin_filtering/filter_stat.yml
@@ -1,5 +1,5 @@
---
filter_version: 1.0
-module_rejectlist:
+module_blacklist:
# Stat is special
- stat
diff --git a/test/integration/targets/plugin_filtering/no_blacklist_module.ini b/test/integration/targets/plugin_filtering/no_blacklist_module.ini
new file mode 100644
index 00000000..65b51d67
--- /dev/null
+++ b/test/integration/targets/plugin_filtering/no_blacklist_module.ini
@@ -0,0 +1,3 @@
+[defaults]
+retry_files_enabled = False
+plugin_filters_cfg = ./no_blacklist_module.yml
diff --git a/test/integration/targets/plugin_filtering/no_rejectlist_module.yml b/test/integration/targets/plugin_filtering/no_blacklist_module.yml
index 91e60a1f..52a55dff 100644
--- a/test/integration/targets/plugin_filtering/no_rejectlist_module.yml
+++ b/test/integration/targets/plugin_filtering/no_blacklist_module.yml
@@ -1,3 +1,3 @@
---
filter_version: 1.0
-module_rejectlist:
+module_blacklist:
diff --git a/test/integration/targets/plugin_filtering/runme.sh b/test/integration/targets/plugin_filtering/runme.sh
index 03d78abc..aa0e2b0c 100755
--- a/test/integration/targets/plugin_filtering/runme.sh
+++ b/test/integration/targets/plugin_filtering/runme.sh
@@ -22,11 +22,11 @@ if test $? != 0 ; then
fi
#
-# Check that if no modules are rejected then Ansible should not through traceback
+# Check that if no modules are blacklisted then Ansible should not through traceback
#
-ANSIBLE_CONFIG=no_rejectlist_module.ini ansible-playbook tempfile.yml -i ../../inventory -vvv "$@"
+ANSIBLE_CONFIG=no_blacklist_module.ini ansible-playbook tempfile.yml -i ../../inventory -vvv "$@"
if test $? != 0 ; then
- echo "### Failed to run tempfile with no modules rejected"
+ echo "### Failed to run tempfile with no modules blacklisted"
exit 1
fi
@@ -87,7 +87,7 @@ fi
ANSIBLE_CONFIG=filter_lookup.ini ansible-playbook lookup.yml -i ../../inventory -vvv "$@"
if test $? != 0 ; then
- echo "### Failed to use a lookup plugin when it is incorrectly specified in the *module* reject list"
+ echo "### Failed to use a lookup plugin when it is incorrectly specified in the *module* blacklist"
exit 1
fi
@@ -107,10 +107,10 @@ ANSIBLE_CONFIG=filter_stat.ini
export ANSIBLE_CONFIG
CAPTURE=$(ansible-playbook copy.yml -i ../../inventory -vvv "$@" 2>&1)
if test $? = 0 ; then
- echo "### Copy ran even though stat is in the module reject list"
+ echo "### Copy ran even though stat is in the module blacklist"
exit 1
else
- echo "$CAPTURE" | grep 'The stat module was specified in the module reject list file,.*, but Ansible will not function without the stat module. Please remove stat from the reject list.'
+ echo "$CAPTURE" | grep 'The stat module was specified in the module blacklist file,.*, but Ansible will not function without the stat module. Please remove stat from the blacklist.'
if test $? != 0 ; then
echo "### Stat did not give us our custom error message"
exit 1
@@ -124,10 +124,10 @@ ANSIBLE_CONFIG=filter_stat.ini
export ANSIBLE_CONFIG
CAPTURE=$(ansible-playbook stat.yml -i ../../inventory -vvv "$@" 2>&1)
if test $? = 0 ; then
- echo "### Stat ran even though it is in the module reject list"
+ echo "### Stat ran even though it is in the module blacklist"
exit 1
else
- echo "$CAPTURE" | grep 'The stat module was specified in the module reject list file,.*, but Ansible will not function without the stat module. Please remove stat from the reject list.'
+ echo "$CAPTURE" | grep 'The stat module was specified in the module blacklist file,.*, but Ansible will not function without the stat module. Please remove stat from the blacklist.'
if test $? != 0 ; then
echo "### Stat did not give us our custom error message"
exit 1
diff --git a/test/integration/targets/plugin_loader/collections/ansible_collections/n/c/plugins/action/a.py b/test/integration/targets/plugin_loader/collections/ansible_collections/n/c/plugins/action/a.py
deleted file mode 100644
index 685b1597..00000000
--- a/test/integration/targets/plugin_loader/collections/ansible_collections/n/c/plugins/action/a.py
+++ /dev/null
@@ -1,6 +0,0 @@
-from ansible.plugins.action import ActionBase
-
-
-class ActionModule(ActionBase):
- def run(self, tmp=None, task_vars=None):
- return {"nca_executed": True}
diff --git a/test/integration/targets/plugin_loader/file_collision/play.yml b/test/integration/targets/plugin_loader/file_collision/play.yml
deleted file mode 100644
index cc55800c..00000000
--- a/test/integration/targets/plugin_loader/file_collision/play.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-- hosts: localhost
- gather_facts: false
- roles:
- - r1
- - r2
- tasks:
- - debug: msg={{'a'|filter1|filter2|filter3}}
diff --git a/test/integration/targets/plugin_loader/file_collision/roles/r1/filter_plugins/custom.py b/test/integration/targets/plugin_loader/file_collision/roles/r1/filter_plugins/custom.py
deleted file mode 100644
index 7adbf7dc..00000000
--- a/test/integration/targets/plugin_loader/file_collision/roles/r1/filter_plugins/custom.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from __future__ import annotations
-
-
-def do_nothing(myval):
- return myval
-
-
-class FilterModule(object):
- ''' Ansible core jinja2 filters '''
-
- def filters(self):
- return {
- 'filter1': do_nothing,
- 'filter3': do_nothing,
- }
diff --git a/test/integration/targets/plugin_loader/file_collision/roles/r1/filter_plugins/filter1.yml b/test/integration/targets/plugin_loader/file_collision/roles/r1/filter_plugins/filter1.yml
deleted file mode 100644
index 5bb3e345..00000000
--- a/test/integration/targets/plugin_loader/file_collision/roles/r1/filter_plugins/filter1.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-DOCUMENTATION:
- name: filter1
- version_added: "1.9"
- short_description: Does nothing
- description:
- - Really, does nothing
- notes:
- - This is a test filter
- positional: _input
- options:
- _input:
- description: the input
- required: true
-
-EXAMPLES: ''
-RETURN:
- _value:
- description: The input
diff --git a/test/integration/targets/plugin_loader/file_collision/roles/r1/filter_plugins/filter3.yml b/test/integration/targets/plugin_loader/file_collision/roles/r1/filter_plugins/filter3.yml
deleted file mode 100644
index 4270b32c..00000000
--- a/test/integration/targets/plugin_loader/file_collision/roles/r1/filter_plugins/filter3.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-DOCUMENTATION:
- name: filter3
- version_added: "1.9"
- short_description: Does nothing
- description:
- - Really, does nothing
- notes:
- - This is a test filter
- positional: _input
- options:
- _input:
- description: the input
- required: true
-
-EXAMPLES: ''
-RETURN:
- _value:
- description: The input
diff --git a/test/integration/targets/plugin_loader/file_collision/roles/r2/filter_plugins/custom.py b/test/integration/targets/plugin_loader/file_collision/roles/r2/filter_plugins/custom.py
deleted file mode 100644
index 8a7a4f52..00000000
--- a/test/integration/targets/plugin_loader/file_collision/roles/r2/filter_plugins/custom.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from __future__ import annotations
-
-
-def do_nothing(myval):
- return myval
-
-
-class FilterModule(object):
- ''' Ansible core jinja2 filters '''
-
- def filters(self):
- return {
- 'filter2': do_nothing,
- }
diff --git a/test/integration/targets/plugin_loader/file_collision/roles/r2/filter_plugins/filter2.yml b/test/integration/targets/plugin_loader/file_collision/roles/r2/filter_plugins/filter2.yml
deleted file mode 100644
index de9195e6..00000000
--- a/test/integration/targets/plugin_loader/file_collision/roles/r2/filter_plugins/filter2.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-DOCUMENTATION:
- name: filter2
- version_added: "1.9"
- short_description: Does nothing
- description:
- - Really, does nothing
- notes:
- - This is a test filter
- positional: _input
- options:
- _input:
- description: the input
- required: true
-
-EXAMPLES: ''
-RETURN:
- _value:
- description: The input
diff --git a/test/integration/targets/plugin_loader/override/filters.yml b/test/integration/targets/plugin_loader/override/filters.yml
index 569a4479..e51ab4e9 100644
--- a/test/integration/targets/plugin_loader/override/filters.yml
+++ b/test/integration/targets/plugin_loader/override/filters.yml
@@ -1,7 +1,7 @@
- hosts: testhost
gather_facts: false
tasks:
- - name: ensure local 'flag' filter works, 'flatten' is overridden and 'ternary' is still from core
+ - name: ensure local 'flag' filter works, 'flatten' is overriden and 'ternary' is still from core
assert:
that:
- a|flag == 'flagged'
diff --git a/test/integration/targets/plugin_loader/runme.sh b/test/integration/targets/plugin_loader/runme.sh
index e68f06ad..e30f6241 100755
--- a/test/integration/targets/plugin_loader/runme.sh
+++ b/test/integration/targets/plugin_loader/runme.sh
@@ -34,8 +34,3 @@ done
# test config loading
ansible-playbook use_coll_name.yml -i ../../inventory -e 'ansible_connection=ansible.builtin.ssh' "$@"
-
-# test filter loading ignoring duplicate file basename
-ansible-playbook file_collision/play.yml "$@"
-
-ANSIBLE_COLLECTIONS_PATH=$PWD/collections ansible-playbook unsafe_plugin_name.yml "$@"
diff --git a/test/integration/targets/plugin_loader/unsafe_plugin_name.yml b/test/integration/targets/plugin_loader/unsafe_plugin_name.yml
deleted file mode 100644
index 73cd4399..00000000
--- a/test/integration/targets/plugin_loader/unsafe_plugin_name.yml
+++ /dev/null
@@ -1,9 +0,0 @@
-- hosts: localhost
- gather_facts: false
- tasks:
- - action: !unsafe n.c.a
- register: r
-
- - assert:
- that:
- - r.nca_executed
diff --git a/test/integration/targets/rel_plugin_loading/subdir/inventory_plugins/notyaml.py b/test/integration/targets/rel_plugin_loading/subdir/inventory_plugins/notyaml.py
index 41a76d9b..e542913d 100644
--- a/test/integration/targets/rel_plugin_loading/subdir/inventory_plugins/notyaml.py
+++ b/test/integration/targets/rel_plugin_loading/subdir/inventory_plugins/notyaml.py
@@ -64,7 +64,7 @@ from collections.abc import MutableMapping
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.module_utils.six import string_types
-from ansible.module_utils.common.text.converters import to_native, to_text
+from ansible.module_utils._text import to_native, to_text
from ansible.plugins.inventory import BaseFileInventoryPlugin
NoneType = type(None)
diff --git a/test/integration/targets/remote_tmp/playbook.yml b/test/integration/targets/remote_tmp/playbook.yml
index 2d0db4e8..5adef626 100644
--- a/test/integration/targets/remote_tmp/playbook.yml
+++ b/test/integration/targets/remote_tmp/playbook.yml
@@ -30,43 +30,30 @@
- name: Test tempdir is removed
hosts: testhost
gather_facts: false
- vars:
- # These tests cannot be run with pipelining as it defeats the purpose of
- # ensuring remote_tmp is cleaned up. Pipelining is enabled in the test
- # inventory
- ansible_pipelining: false
- # Ensure that the remote_tmp_dir we create allows the unpriv connection user
- # to create the remote_tmp
- ansible_become: false
tasks:
- import_role:
name: ../setup_remote_tmp_dir
- - vars:
- # Isolate the remote_tmp used by these tests
- ansible_remote_tmp: "{{ remote_tmp_dir }}/remote_tmp"
- block:
- - file:
- state: touch
- path: "{{ remote_tmp_dir }}/65393"
+ - file:
+ state: touch
+ path: "{{ remote_tmp_dir }}/65393"
- - copy:
- src: "{{ remote_tmp_dir }}/65393"
- dest: "{{ remote_tmp_dir }}/65393.2"
- remote_src: true
+ - copy:
+ src: "{{ remote_tmp_dir }}/65393"
+ dest: "{{ remote_tmp_dir }}/65393.2"
+ remote_src: true
- - find:
- path: "{{ ansible_remote_tmp }}"
- use_regex: yes
- patterns: 'AnsiballZ_.+\.py'
- recurse: true
- register: result
+ - find:
+ path: "~/.ansible/tmp"
+ use_regex: yes
+ patterns: 'AnsiballZ_.+\.py'
+ recurse: true
+ register: result
- debug:
var: result
- assert:
that:
- # Should only be AnsiballZ_find.py because find is actively running
- - result.files|length == 1
- - result.files[0].path.endswith('/AnsiballZ_find.py')
+ # Should find nothing since pipelining is used
+ - result.files|length == 0
diff --git a/test/integration/targets/replace/tasks/main.yml b/test/integration/targets/replace/tasks/main.yml
index ca8b4ec1..d267b783 100644
--- a/test/integration/targets/replace/tasks/main.yml
+++ b/test/integration/targets/replace/tasks/main.yml
@@ -263,22 +263,3 @@
- replace_cat8.stdout_lines[1] == "9.9.9.9"
- replace_cat8.stdout_lines[7] == "0.0.0.0"
- replace_cat8.stdout_lines[13] == "0.0.0.0"
-
-# For Python 3.6 or greater - https://github.com/ansible/ansible/issues/79364
-- name: Handle bad escape character in regular expression
- replace:
- path: /dev/null
- after: ^
- before: $
- regexp: \.
- replace: '\D'
- ignore_errors: true
- register: replace_test9
- when: ansible_python.version.major == 3 and ansible_python.version.minor > 6
-
-- name: Validate the failure
- assert:
- that:
- - replace_test9 is failure
- - replace_test9.msg.startswith("Unable to process replace")
- when: ansible_python.version.major == 3 and ansible_python.version.minor > 6
diff --git a/test/integration/targets/result_pickle_error/action_plugins/result_pickle_error.py b/test/integration/targets/result_pickle_error/action_plugins/result_pickle_error.py
deleted file mode 100644
index e8d712a3..00000000
--- a/test/integration/targets/result_pickle_error/action_plugins/result_pickle_error.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright: Contributors to the Ansible project
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from ansible.plugins.action import ActionBase
-from jinja2 import Undefined
-
-
-class ActionModule(ActionBase):
-
- def run(self, tmp=None, task_vars=None):
- return {'obj': Undefined('obj')}
diff --git a/test/integration/targets/result_pickle_error/aliases b/test/integration/targets/result_pickle_error/aliases
deleted file mode 100644
index 70fbe57e..00000000
--- a/test/integration/targets/result_pickle_error/aliases
+++ /dev/null
@@ -1,3 +0,0 @@
-shippable/posix/group5
-context/controller
-needs/target/test_utils
diff --git a/test/integration/targets/result_pickle_error/runme.sh b/test/integration/targets/result_pickle_error/runme.sh
deleted file mode 100755
index e2ec37b8..00000000
--- a/test/integration/targets/result_pickle_error/runme.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env bash
-
-set -ux
-export ANSIBLE_ROLES_PATH=../
-
-is_timeout() {
- rv=$?
- if [ "$rv" == "124" ]; then
- echo "***hang detected, this likely means the strategy never received a result for the task***"
- fi
- exit $rv
-}
-
-trap "is_timeout" EXIT
-
-../test_utils/scripts/timeout.py -- 10 ansible-playbook -i ../../inventory runme.yml -v "$@"
diff --git a/test/integration/targets/result_pickle_error/runme.yml b/test/integration/targets/result_pickle_error/runme.yml
deleted file mode 100644
index 60508498..00000000
--- a/test/integration/targets/result_pickle_error/runme.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-- hosts: all
- gather_facts: false
- tasks:
- - include_role:
- name: result_pickle_error
- # Just for caution loop 3 times to ensure no issues
- loop: '{{ range(3) }}'
diff --git a/test/integration/targets/result_pickle_error/tasks/main.yml b/test/integration/targets/result_pickle_error/tasks/main.yml
deleted file mode 100644
index 895475dd..00000000
--- a/test/integration/targets/result_pickle_error/tasks/main.yml
+++ /dev/null
@@ -1,14 +0,0 @@
-- name: Ensure pickling error doesn't cause a hang
- result_pickle_error:
- ignore_errors: true
- register: result
-
-- assert:
- that:
- - result.msg == expected_msg
- - result is failed
- vars:
- expected_msg: "cannot pickle 'Undefined' object"
-
-- debug:
- msg: Success, no hang
diff --git a/test/integration/targets/roles/47023.yml b/test/integration/targets/roles/47023.yml
deleted file mode 100644
index 6b41b52f..00000000
--- a/test/integration/targets/roles/47023.yml
+++ /dev/null
@@ -1,5 +0,0 @@
----
-- hosts: all
- gather_facts: no
- tasks:
- - include_role: name=47023_role1
diff --git a/test/integration/targets/roles/dupe_inheritance.yml b/test/integration/targets/roles/dupe_inheritance.yml
deleted file mode 100644
index 6fda5baf..00000000
--- a/test/integration/targets/roles/dupe_inheritance.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-- name: Test
- hosts: testhost
- gather_facts: false
- roles:
- - role: top
- info: First definition
- testvar: abc
-
- - role: top
- info: Second definition
diff --git a/test/integration/targets/roles/privacy.yml b/test/integration/targets/roles/privacy.yml
deleted file mode 100644
index 2f671c07..00000000
--- a/test/integration/targets/roles/privacy.yml
+++ /dev/null
@@ -1,60 +0,0 @@
-# use this to debug issues
-#- debug: msg={{ is_private ~ ', ' ~ is_default ~ ', ' ~ privacy|default('nope')}}
-
-- hosts: localhost
- name: test global privacy setting
- gather_facts: false
- roles:
- - a
- pre_tasks:
-
- - name: 'test roles: privacy'
- assert:
- that:
- - is_private and privacy is undefined or not is_private and privacy is defined
- - not is_default or is_default and privacy is defined
-
-- hosts: localhost
- name: test import_role privacy
- gather_facts: false
- tasks:
- - import_role: name=a
-
- - name: role is private, var should be undefined
- assert:
- that:
- - is_private and privacy is undefined or not is_private and privacy is defined
- - not is_default or is_default and privacy is defined
-
-- hosts: localhost
- name: test global privacy setting on includes
- gather_facts: false
- tasks:
- - include_role: name=a
-
- - name: test include_role privacy
- assert:
- that:
- - not is_default and (is_private and privacy is undefined or not is_private and privacy is defined) or is_default and privacy is undefined
-
-- hosts: localhost
- name: test public yes always overrides global privacy setting on includes
- gather_facts: false
- tasks:
- - include_role: name=a public=yes
-
- - name: test include_role privacy
- assert:
- that:
- - privacy is defined
-
-- hosts: localhost
- name: test public no always overrides global privacy setting on includes
- gather_facts: false
- tasks:
- - include_role: name=a public=no
-
- - name: test include_role privacy
- assert:
- that:
- - privacy is undefined
diff --git a/test/integration/targets/roles/role_complete.yml b/test/integration/targets/roles/role_complete.yml
deleted file mode 100644
index 86cae772..00000000
--- a/test/integration/targets/roles/role_complete.yml
+++ /dev/null
@@ -1,47 +0,0 @@
-- name: test deduping allows for 1 successful execution of role after it is skipped
- hosts: testhost
- gather_facts: false
- tags: [ 'conditional_skipped' ]
- roles:
- # Skipped the first time it executes
- - role: a
- when: role_set_var is defined
-
- - role: set_var
-
- # No longer skipped
- - role: a
- when: role_set_var is defined
- # Deduplicated with the previous success
- - role: a
- when: role_set_var is defined
-
-- name: test deduping allows for successful execution of role after host is unreachable
- hosts: fake,testhost
- gather_facts: false
- tags: [ 'unreachable' ]
- ignore_unreachable: yes
- roles:
- # unreachable by the first host
- - role: test_connectivity
-
- # unreachable host will try again,
- # the successful host will not because it's deduplicated
- - role: test_connectivity
-
-- name: test deduping role for failed host
- hosts: testhost,localhost
- gather_facts: false
- tags: [ 'conditional_failed' ]
- ignore_errors: yes
- roles:
- # Uses run_once to fail on the first host the first time it executes
- - role: failed_when
-
- - role: set_var
- - role: recover
-
- # Deduplicated after the failure, ONLY runs for localhost
- - role: failed_when
- # Deduplicated with the previous success
- - role: failed_when
diff --git a/test/integration/targets/roles/role_dep_chain.yml b/test/integration/targets/roles/role_dep_chain.yml
deleted file mode 100644
index cf99a25a..00000000
--- a/test/integration/targets/roles/role_dep_chain.yml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- hosts: all
- tasks:
- - name: static import inside dynamic include inherits defaults/vars
- include_role:
- name: include_import_dep_chain
diff --git a/test/integration/targets/roles/roles/47023_role1/defaults/main.yml b/test/integration/targets/roles/roles/47023_role1/defaults/main.yml
deleted file mode 100644
index 166caa33..00000000
--- a/test/integration/targets/roles/roles/47023_role1/defaults/main.yml
+++ /dev/null
@@ -1 +0,0 @@
-my_default: defined
diff --git a/test/integration/targets/roles/roles/47023_role1/tasks/main.yml b/test/integration/targets/roles/roles/47023_role1/tasks/main.yml
deleted file mode 100644
index 9c408ba2..00000000
--- a/test/integration/targets/roles/roles/47023_role1/tasks/main.yml
+++ /dev/null
@@ -1 +0,0 @@
-- include_role: name=47023_role2
diff --git a/test/integration/targets/roles/roles/47023_role1/vars/main.yml b/test/integration/targets/roles/roles/47023_role1/vars/main.yml
deleted file mode 100644
index bfda56b9..00000000
--- a/test/integration/targets/roles/roles/47023_role1/vars/main.yml
+++ /dev/null
@@ -1 +0,0 @@
-my_var: defined
diff --git a/test/integration/targets/roles/roles/47023_role2/tasks/main.yml b/test/integration/targets/roles/roles/47023_role2/tasks/main.yml
deleted file mode 100644
index 4544215f..00000000
--- a/test/integration/targets/roles/roles/47023_role2/tasks/main.yml
+++ /dev/null
@@ -1 +0,0 @@
-- include_role: name=47023_role3
diff --git a/test/integration/targets/roles/roles/47023_role3/tasks/main.yml b/test/integration/targets/roles/roles/47023_role3/tasks/main.yml
deleted file mode 100644
index 9479fe3f..00000000
--- a/test/integration/targets/roles/roles/47023_role3/tasks/main.yml
+++ /dev/null
@@ -1 +0,0 @@
-- include_role: name=47023_role4
diff --git a/test/integration/targets/roles/roles/47023_role4/tasks/main.yml b/test/integration/targets/roles/roles/47023_role4/tasks/main.yml
deleted file mode 100644
index 64c96e97..00000000
--- a/test/integration/targets/roles/roles/47023_role4/tasks/main.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-- debug:
- msg: "Var is {{ my_var | default('undefined') }}"
-
-- debug:
- msg: "Default is {{ my_default | default('undefined') }}"
diff --git a/test/integration/targets/roles/roles/a/vars/main.yml b/test/integration/targets/roles/roles/a/vars/main.yml
deleted file mode 100644
index 7812aa78..00000000
--- a/test/integration/targets/roles/roles/a/vars/main.yml
+++ /dev/null
@@ -1 +0,0 @@
-privacy: in role a
diff --git a/test/integration/targets/roles/roles/bottom/tasks/main.yml b/test/integration/targets/roles/roles/bottom/tasks/main.yml
deleted file mode 100644
index 3f375973..00000000
--- a/test/integration/targets/roles/roles/bottom/tasks/main.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- name: "{{ info }} - {{ role_name }}: testvar content"
- debug:
- msg: '{{ testvar | default("Not specified") }}'
diff --git a/test/integration/targets/roles/roles/failed_when/tasks/main.yml b/test/integration/targets/roles/roles/failed_when/tasks/main.yml
deleted file mode 100644
index 6ca4d8cf..00000000
--- a/test/integration/targets/roles/roles/failed_when/tasks/main.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- debug:
- msg: "{{ role_set_var is undefined | ternary('failed_when task failed', 'failed_when task succeeded') }}"
- failed_when: role_set_var is undefined
- run_once: true
diff --git a/test/integration/targets/roles/roles/imported_from_include/tasks/main.yml b/test/integration/targets/roles/roles/imported_from_include/tasks/main.yml
deleted file mode 100644
index 32126f87..00000000
--- a/test/integration/targets/roles/roles/imported_from_include/tasks/main.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- assert:
- that:
- - inherit_var is defined
- - inherit_default is defined
diff --git a/test/integration/targets/roles/roles/include_import_dep_chain/defaults/main.yml b/test/integration/targets/roles/roles/include_import_dep_chain/defaults/main.yml
deleted file mode 100644
index 5b8a643d..00000000
--- a/test/integration/targets/roles/roles/include_import_dep_chain/defaults/main.yml
+++ /dev/null
@@ -1 +0,0 @@
-inherit_default: default
diff --git a/test/integration/targets/roles/roles/include_import_dep_chain/tasks/main.yml b/test/integration/targets/roles/roles/include_import_dep_chain/tasks/main.yml
deleted file mode 100644
index 84884a8d..00000000
--- a/test/integration/targets/roles/roles/include_import_dep_chain/tasks/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-- import_role:
- name: imported_from_include
diff --git a/test/integration/targets/roles/roles/include_import_dep_chain/vars/main.yml b/test/integration/targets/roles/roles/include_import_dep_chain/vars/main.yml
deleted file mode 100644
index 0d4aaa94..00000000
--- a/test/integration/targets/roles/roles/include_import_dep_chain/vars/main.yml
+++ /dev/null
@@ -1 +0,0 @@
-inherit_var: var
diff --git a/test/integration/targets/roles/roles/middle/tasks/main.yml b/test/integration/targets/roles/roles/middle/tasks/main.yml
deleted file mode 100644
index bd2b5294..00000000
--- a/test/integration/targets/roles/roles/middle/tasks/main.yml
+++ /dev/null
@@ -1,6 +0,0 @@
-- name: "{{ info }} - {{ role_name }}: testvar content"
- debug:
- msg: '{{ testvar | default("Not specified") }}'
-
-- include_role:
- name: bottom
diff --git a/test/integration/targets/roles/roles/recover/tasks/main.yml b/test/integration/targets/roles/roles/recover/tasks/main.yml
deleted file mode 100644
index 72ea3ac1..00000000
--- a/test/integration/targets/roles/roles/recover/tasks/main.yml
+++ /dev/null
@@ -1 +0,0 @@
-- meta: clear_host_errors
diff --git a/test/integration/targets/roles/roles/set_var/tasks/main.yml b/test/integration/targets/roles/roles/set_var/tasks/main.yml
deleted file mode 100644
index 45f83eb0..00000000
--- a/test/integration/targets/roles/roles/set_var/tasks/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-- set_fact:
- role_set_var: true
diff --git a/test/integration/targets/roles/roles/test_connectivity/tasks/main.yml b/test/integration/targets/roles/roles/test_connectivity/tasks/main.yml
deleted file mode 100644
index 22fac6ed..00000000
--- a/test/integration/targets/roles/roles/test_connectivity/tasks/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-- ping:
- data: 'reachable'
diff --git a/test/integration/targets/roles/roles/top/tasks/main.yml b/test/integration/targets/roles/roles/top/tasks/main.yml
deleted file mode 100644
index a7a5b529..00000000
--- a/test/integration/targets/roles/roles/top/tasks/main.yml
+++ /dev/null
@@ -1,6 +0,0 @@
-- name: "{{ info }} - {{ role_name }}: testvar content"
- debug:
- msg: '{{ testvar | default("Not specified") }}'
-
-- include_role:
- name: middle
diff --git a/test/integration/targets/roles/roles/vars_scope/defaults/main.yml b/test/integration/targets/roles/roles/vars_scope/defaults/main.yml
deleted file mode 100644
index 27f3e916..00000000
--- a/test/integration/targets/roles/roles/vars_scope/defaults/main.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-default_only: default
-role_vars_only: default
-play_and_role_vars: default
-play_and_roles_and_role_vars: default
-play_and_import_and_role_vars: default
-play_and_include_and_role_vars: default
-play_and_role_vars_and_role_vars: default
-roles_and_role_vars: default
-import_and_role_vars: default
-include_and_role_vars: default
diff --git a/test/integration/targets/roles/roles/vars_scope/tasks/check_vars.yml b/test/integration/targets/roles/roles/vars_scope/tasks/check_vars.yml
deleted file mode 100644
index 083415d1..00000000
--- a/test/integration/targets/roles/roles/vars_scope/tasks/check_vars.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-- debug: var={{item}}
- loop: '{{possible_vars}}'
-
-- assert:
- that:
- - (item in vars and item in defined and vars[item] == defined[item]) or (item not in vars and item not in defined)
- loop: '{{possible_vars}}'
diff --git a/test/integration/targets/roles/roles/vars_scope/tasks/main.yml b/test/integration/targets/roles/roles/vars_scope/tasks/main.yml
deleted file mode 100644
index 155f3629..00000000
--- a/test/integration/targets/roles/roles/vars_scope/tasks/main.yml
+++ /dev/null
@@ -1 +0,0 @@
-- include_tasks: check_vars.yml
diff --git a/test/integration/targets/roles/roles/vars_scope/vars/main.yml b/test/integration/targets/roles/roles/vars_scope/vars/main.yml
deleted file mode 100644
index 079353f8..00000000
--- a/test/integration/targets/roles/roles/vars_scope/vars/main.yml
+++ /dev/null
@@ -1,9 +0,0 @@
-role_vars_only: role_vars
-play_and_role_vars: role_vars
-play_and_roles_and_role_vars: role_vars
-play_and_import_and_role_vars: role_vars
-play_and_include_and_role_vars: role_vars
-play_and_role_vars_and_role_vars: role_vars
-roles_and_role_vars: role_vars
-import_and_role_vars: role_vars
-include_and_role_vars: role_vars
diff --git a/test/integration/targets/roles/runme.sh b/test/integration/targets/roles/runme.sh
index bf3aaf58..bb98a932 100755
--- a/test/integration/targets/roles/runme.sh
+++ b/test/integration/targets/roles/runme.sh
@@ -3,47 +3,26 @@
set -eux
# test no dupes when dependencies in b and c point to a in roles:
-[ "$(ansible-playbook no_dupes.yml -i ../../inventory --tags inroles | grep -c '"msg": "A"')" = "1" ]
-[ "$(ansible-playbook no_dupes.yml -i ../../inventory --tags acrossroles | grep -c '"msg": "A"')" = "1" ]
-[ "$(ansible-playbook no_dupes.yml -i ../../inventory --tags intasks | grep -c '"msg": "A"')" = "1" ]
+[ "$(ansible-playbook no_dupes.yml -i ../../inventory --tags inroles "$@" | grep -c '"msg": "A"')" = "1" ]
+[ "$(ansible-playbook no_dupes.yml -i ../../inventory --tags acrossroles "$@" | grep -c '"msg": "A"')" = "1" ]
+[ "$(ansible-playbook no_dupes.yml -i ../../inventory --tags intasks "$@" | grep -c '"msg": "A"')" = "1" ]
# but still dupe across plays
-[ "$(ansible-playbook no_dupes.yml -i ../../inventory | grep -c '"msg": "A"')" = "3" ]
-
-# and don't dedupe before the role successfully completes
-[ "$(ansible-playbook role_complete.yml -i ../../inventory -i fake, --tags conditional_skipped | grep -c '"msg": "A"')" = "1" ]
-[ "$(ansible-playbook role_complete.yml -i ../../inventory -i fake, --tags conditional_failed | grep -c '"msg": "failed_when task succeeded"')" = "1" ]
-[ "$(ansible-playbook role_complete.yml -i ../../inventory -i fake, --tags unreachable -vvv | grep -c '"data": "reachable"')" = "1" ]
-ansible-playbook role_complete.yml -i ../../inventory -i fake, --tags unreachable | grep -e 'ignored=2'
+[ "$(ansible-playbook no_dupes.yml -i ../../inventory "$@" | grep -c '"msg": "A"')" = "3" ]
# include/import can execute another instance of role
-[ "$(ansible-playbook allowed_dupes.yml -i ../../inventory --tags importrole | grep -c '"msg": "A"')" = "2" ]
-[ "$(ansible-playbook allowed_dupes.yml -i ../../inventory --tags includerole | grep -c '"msg": "A"')" = "2" ]
+[ "$(ansible-playbook allowed_dupes.yml -i ../../inventory --tags importrole "$@" | grep -c '"msg": "A"')" = "2" ]
+[ "$(ansible-playbook allowed_dupes.yml -i ../../inventory --tags includerole "$@" | grep -c '"msg": "A"')" = "2" ]
-[ "$(ansible-playbook dupe_inheritance.yml -i ../../inventory | grep -c '"msg": "abc"')" = "3" ]
# ensure role data is merged correctly
ansible-playbook data_integrity.yml -i ../../inventory "$@"
# ensure role fails when trying to load 'non role' in _from
-ansible-playbook no_outside.yml -i ../../inventory > role_outside_output.log 2>&1 || true
+ansible-playbook no_outside.yml -i ../../inventory "$@" > role_outside_output.log 2>&1 || true
if grep "as it is not inside the expected role path" role_outside_output.log >/dev/null; then
echo "Test passed (playbook failed with expected output, output not shown)."
else
echo "Test failed, expected output from playbook failure is missing, output not shown)."
exit 1
fi
-
-# ensure vars scope is correct
-ansible-playbook vars_scope.yml -i ../../inventory "$@"
-
-# test nested includes get parent roles greater than a depth of 3
-[ "$(ansible-playbook 47023.yml -i ../../inventory | grep '\<\(Default\|Var\)\>' | grep -c 'is defined')" = "2" ]
-
-# ensure import_role called from include_role has the include_role in the dep chain
-ansible-playbook role_dep_chain.yml -i ../../inventory "$@"
-
-# global role privacy setting test, set to private, set to not private, default
-ANSIBLE_PRIVATE_ROLE_VARS=1 ansible-playbook privacy.yml -e @vars/privacy_vars.yml "$@"
-ANSIBLE_PRIVATE_ROLE_VARS=0 ansible-playbook privacy.yml -e @vars/privacy_vars.yml "$@"
-ansible-playbook privacy.yml -e @vars/privacy_vars.yml "$@"
diff --git a/test/integration/targets/roles/tasks/check_vars.yml b/test/integration/targets/roles/tasks/check_vars.yml
deleted file mode 100644
index 083415d1..00000000
--- a/test/integration/targets/roles/tasks/check_vars.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-- debug: var={{item}}
- loop: '{{possible_vars}}'
-
-- assert:
- that:
- - (item in vars and item in defined and vars[item] == defined[item]) or (item not in vars and item not in defined)
- loop: '{{possible_vars}}'
diff --git a/test/integration/targets/roles/vars/play.yml b/test/integration/targets/roles/vars/play.yml
deleted file mode 100644
index dd84ae22..00000000
--- a/test/integration/targets/roles/vars/play.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-play_only: play
-play_and_roles: play
-play_and_import: play
-play_and_include: play
-play_and_role_vars: play
-play_and_roles_and_role_vars: play
-play_and_import_and_role_vars: play
-play_and_include_and_role_vars: play
-possible_vars:
- - default_only
- - import_and_role_vars
- - import_only
- - include_and_role_vars
- - include_only
- - play_and_import
- - play_and_import_and_role_vars
- - play_and_include
- - play_and_include_and_role_vars
- - play_and_roles
- - play_and_roles_and_role_vars
- - play_and_role_vars
- - play_and_role_vars_and_role_vars
- - play_only
- - roles_and_role_vars
- - roles_only
- - role_vars_only
diff --git a/test/integration/targets/roles/vars/privacy_vars.yml b/test/integration/targets/roles/vars/privacy_vars.yml
deleted file mode 100644
index 9539ed04..00000000
--- a/test/integration/targets/roles/vars/privacy_vars.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-is_private: "{{lookup('config', 'DEFAULT_PRIVATE_ROLE_VARS')}}"
-is_default: "{{lookup('env', 'ANSIBLE_PRIVATE_ROLE_VARS') == ''}}"
diff --git a/test/integration/targets/roles/vars_scope.yml b/test/integration/targets/roles/vars_scope.yml
deleted file mode 100644
index 3e6b16a3..00000000
--- a/test/integration/targets/roles/vars_scope.yml
+++ /dev/null
@@ -1,358 +0,0 @@
-- name: play and roles
- hosts: localhost
- gather_facts: false
- vars_files:
- - vars/play.yml
- roles:
- - name: vars_scope
- vars:
- roles_only: roles
- roles_and_role_vars: roles
- play_and_roles: roles
- play_and_roles_and_role_vars: roles
- defined:
- default_only: default
- import_and_role_vars: role_vars
- include_and_role_vars: role_vars
- play_and_import: play
- play_and_import_and_role_vars: role_vars
- play_and_include: play
- play_and_include_and_role_vars: role_vars
- play_and_roles: roles
- play_and_roles_and_role_vars: roles
- play_and_role_vars: role_vars
- play_and_role_vars_and_role_vars: role_vars
- play_only: play
- roles_and_role_vars: roles
- roles_only: roles
- role_vars_only: role_vars
-
- pre_tasks:
- - include_tasks: tasks/check_vars.yml
- vars:
- defined:
- default_only: default
- import_and_role_vars: role_vars
- include_and_role_vars: role_vars
- play_and_import: play
- play_and_import_and_role_vars: role_vars
- play_and_include: play
- play_and_include_and_role_vars: role_vars
- play_and_roles: play
- play_and_roles_and_role_vars: role_vars
- play_and_role_vars: role_vars
- play_and_role_vars_and_role_vars: role_vars
- play_only: play
- roles_and_role_vars: role_vars
- role_vars_only: role_vars
- tasks:
- - include_tasks: roles/vars_scope/tasks/check_vars.yml
- vars:
- defined:
- default_only: default
- import_and_role_vars: role_vars
- include_and_role_vars: role_vars
- play_and_import: play
- play_and_import_and_role_vars: role_vars
- play_and_include: play
- play_and_include_and_role_vars: role_vars
- play_and_roles: play
- play_and_roles_and_role_vars: role_vars
- play_and_role_vars: role_vars
- play_and_role_vars_and_role_vars: role_vars
- play_only: play
- roles_and_role_vars: role_vars
- role_vars_only: role_vars
-- name: play baseline (no roles)
- hosts: localhost
- gather_facts: false
- vars_files:
- - vars/play.yml
- tasks:
- - include_tasks: roles/vars_scope/tasks/check_vars.yml
- vars:
- defined:
- play_and_import: play
- play_and_import_and_role_vars: play
- play_and_include: play
- play_and_include_and_role_vars: play
- play_and_roles: play
- play_and_roles_and_role_vars: play
- play_and_role_vars: play
- play_only: play
-
-- name: play and import
- hosts: localhost
- gather_facts: false
- vars_files:
- - vars/play.yml
- tasks:
- - include_tasks: roles/vars_scope/tasks/check_vars.yml
- vars:
- defined:
- play_and_import: play
- play_and_include: play
- play_and_roles: play
- play_only: play
- default_only: default
- import_and_role_vars: role_vars
- include_and_role_vars: role_vars
- play_and_import_and_role_vars: role_vars
- play_and_role_vars: role_vars
- play_and_role_vars_and_role_vars: role_vars
- play_and_include_and_role_vars: role_vars
- play_and_roles_and_role_vars: role_vars
- roles_and_role_vars: role_vars
- role_vars_only: role_vars
-
- - name: static import
- import_role:
- name: vars_scope
- vars:
- import_only: import
- import_and_role_vars: import
- play_and_import: import
- play_and_import_and_role_vars: import
- defined:
- default_only: default
- import_and_role_vars: import
- import_only: import
- include_and_role_vars: role_vars
- play_and_import: import
- play_and_import_and_role_vars: import
- play_and_include: play
- play_and_include_and_role_vars: role_vars
- play_and_roles: play
- play_and_roles_and_role_vars: role_vars
- play_and_role_vars: role_vars
- play_and_role_vars_and_role_vars: role_vars
- play_only: play
- roles_and_role_vars: role_vars
- role_vars_only: role_vars
-
- - include_tasks: roles/vars_scope/tasks/check_vars.yml
- vars:
- defined:
- default_only: default
- import_and_role_vars: role_vars
- include_and_role_vars: role_vars
- play_and_import: play
- play_and_import_and_role_vars: role_vars
- play_and_include: play
- play_and_include_and_role_vars: role_vars
- play_and_roles: play
- play_and_roles_and_role_vars: role_vars
- play_and_role_vars: role_vars
- play_and_role_vars_and_role_vars: role_vars
- play_only: play
- roles_and_role_vars: role_vars
- role_vars_only: role_vars
-
-- name: play and include
- hosts: localhost
- gather_facts: false
- vars_files:
- - vars/play.yml
- tasks:
- - include_tasks: roles/vars_scope/tasks/check_vars.yml
- vars:
- defined:
- play_and_import: play
- play_and_import_and_role_vars: play
- play_and_include: play
- play_and_include_and_role_vars: play
- play_and_roles: play
- play_and_roles_and_role_vars: play
- play_and_role_vars: play
- play_only: play
-
- - name: dynamic include
- include_role:
- name: vars_scope
- vars:
- include_only: include
- include_and_role_vars: include
- play_and_include: include
- play_and_include_and_role_vars: include
- defined:
- default_only: default
- import_and_role_vars: role_vars
- include_and_role_vars: include
- include_only: include
- play_and_import: play
- play_and_import_and_role_vars: role_vars
- play_and_include: include
- play_and_include_and_role_vars: include
- play_and_roles: play
- play_and_roles_and_role_vars: role_vars
- play_and_role_vars: role_vars
- play_and_role_vars_and_role_vars: role_vars
- play_only: play
- roles_and_role_vars: role_vars
- role_vars_only: role_vars
-
- - include_tasks: roles/vars_scope/tasks/check_vars.yml
- vars:
- defined:
- play_and_import: play
- play_and_import_and_role_vars: play
- play_and_include: play
- play_and_include_and_role_vars: play
- play_and_roles: play
- play_and_roles_and_role_vars: play
- play_and_role_vars: play
- play_only: play
-
-- name: play and roles and import and include
- hosts: localhost
- gather_facts: false
- vars:
- vars_files:
- - vars/play.yml
- roles:
- - name: vars_scope
- vars:
- roles_only: roles
- roles_and_role_vars: roles
- play_and_roles: roles
- play_and_roles_and_role_vars: roles
- defined:
- default_only: default
- import_and_role_vars: role_vars
- include_and_role_vars: role_vars
- play_and_import: play
- play_and_import_and_role_vars: role_vars
- play_and_include: play
- play_and_include_and_role_vars: role_vars
- play_and_roles: roles
- play_and_roles_and_role_vars: roles
- play_and_role_vars: role_vars
- play_and_role_vars_and_role_vars: role_vars
- play_only: play
- roles_and_role_vars: roles
- roles_only: roles
- role_vars_only: role_vars
-
- pre_tasks:
- - include_tasks: roles/vars_scope/tasks/check_vars.yml
- vars:
- defined:
- default_only: default
- import_and_role_vars: role_vars
- include_and_role_vars: role_vars
- play_and_import: play
- play_and_import_and_role_vars: role_vars
- play_and_include: play
- play_and_include_and_role_vars: role_vars
- play_and_roles: play
- play_and_roles_and_role_vars: role_vars
- play_and_role_vars: role_vars
- play_and_role_vars_and_role_vars: role_vars
- play_only: play
- roles_and_role_vars: role_vars
- role_vars_only: role_vars
-
- tasks:
- - include_tasks: roles/vars_scope/tasks/check_vars.yml
- vars:
- defined:
- default_only: default
- import_and_role_vars: role_vars
- include_and_role_vars: role_vars
- play_and_import: play
- play_and_import_and_role_vars: role_vars
- play_and_include: play
- play_and_include_and_role_vars: role_vars
- play_and_roles: play
- play_and_roles_and_role_vars: role_vars
- play_and_role_vars: role_vars
- play_and_role_vars_and_role_vars: role_vars
- play_only: play
- roles_and_role_vars: role_vars
- role_vars_only: role_vars
-
- - name: static import
- import_role:
- name: vars_scope
- vars:
- import_only: import
- import_and_role_vars: import
- play_and_import: import
- play_and_import_and_role_vars: import
- defined:
- default_only: default
- import_and_role_vars: import
- import_only: import
- include_and_role_vars: role_vars
- play_and_import: import
- play_and_import_and_role_vars: import
- play_and_include: play
- play_and_include_and_role_vars: role_vars
- play_and_roles: play
- play_and_roles_and_role_vars: role_vars
- play_and_role_vars: role_vars
- play_and_role_vars_and_role_vars: role_vars
- play_only: play
- roles_and_role_vars: role_vars
- role_vars_only: role_vars
-
- - include_tasks: roles/vars_scope/tasks/check_vars.yml
- vars:
- defined:
- default_only: default
- import_and_role_vars: role_vars
- include_and_role_vars: role_vars
- play_and_import: play
- play_and_import_and_role_vars: role_vars
- play_and_include: play
- play_and_include_and_role_vars: role_vars
- play_and_roles: play
- play_and_roles_and_role_vars: role_vars
- play_and_role_vars: role_vars
- play_and_role_vars_and_role_vars: role_vars
- play_only: play
- roles_and_role_vars: role_vars
- role_vars_only: role_vars
-
- - name: dynamic include
- include_role:
- name: vars_scope
- vars:
- include_only: include
- include_and_role_vars: include
- play_and_include: include
- play_and_include_and_role_vars: include
- defined:
- default_only: default
- import_and_role_vars: role_vars
- include_and_role_vars: include
- include_only: include
- play_and_import: play
- play_and_import_and_role_vars: role_vars
- play_and_include: include
- play_and_include_and_role_vars: include
- play_and_roles: play
- play_and_roles_and_role_vars: role_vars
- play_and_role_vars: role_vars
- play_and_role_vars_and_role_vars: role_vars
- play_only: play
- roles_and_role_vars: role_vars
- role_vars_only: role_vars
-
- - include_tasks: roles/vars_scope/tasks/check_vars.yml
- vars:
- defined:
- default_only: default
- import_and_role_vars: role_vars
- include_and_role_vars: role_vars
- play_and_import: play
- play_and_import_and_role_vars: role_vars
- play_and_include: play
- play_and_include_and_role_vars: role_vars
- play_and_roles: play
- play_and_roles_and_role_vars: role_vars
- play_and_role_vars: role_vars
- play_and_role_vars_and_role_vars: role_vars
- play_only: play
- roles_and_role_vars: role_vars
- role_vars_only: role_vars
diff --git a/test/integration/targets/roles_arg_spec/roles/c/meta/main.yml b/test/integration/targets/roles_arg_spec/roles/c/meta/main.yml
index 10dce6d2..1a1ccbe4 100644
--- a/test/integration/targets/roles_arg_spec/roles/c/meta/main.yml
+++ b/test/integration/targets/roles_arg_spec/roles/c/meta/main.yml
@@ -2,15 +2,6 @@ argument_specs:
main:
short_description: Main entry point for role C.
options:
- c_dict:
- type: "dict"
- required: true
c_int:
type: "int"
required: true
- c_list:
- type: "list"
- required: true
- c_raw:
- type: "raw"
- required: true
diff --git a/test/integration/targets/roles_arg_spec/test.yml b/test/integration/targets/roles_arg_spec/test.yml
index b88d2e18..5eca7c73 100644
--- a/test/integration/targets/roles_arg_spec/test.yml
+++ b/test/integration/targets/roles_arg_spec/test.yml
@@ -48,7 +48,6 @@
name: a
vars:
a_int: "{{ INT_VALUE }}"
- a_str: "import_role"
- name: "Call role entry point that is defined, but has no spec data"
import_role:
@@ -145,10 +144,7 @@
hosts: localhost
gather_facts: false
vars:
- c_dict: {}
c_int: 1
- c_list: []
- c_raw: ~
a_str: "some string"
a_int: 42
tasks:
@@ -160,125 +156,6 @@
include_role:
name: c
-- name: "New play to reset vars: Test nested role including/importing role fails with null required options"
- hosts: localhost
- gather_facts: false
- vars:
- a_main_spec:
- a_str:
- required: true
- type: "str"
- c_main_spec:
- c_int:
- required: true
- type: "int"
- c_list:
- required: true
- type: "list"
- c_dict:
- required: true
- type: "dict"
- c_raw:
- required: true
- type: "raw"
- # role c calls a's main and alternate entrypoints
- a_str: ''
- c_dict: {}
- c_int: 0
- c_list: []
- c_raw: ~
- tasks:
- - name: test type coercion fails on None for required str
- block:
- - name: "Test import_role of role C (missing a_str)"
- import_role:
- name: c
- vars:
- a_str: ~
- - fail:
- msg: "Should not get here"
- rescue:
- - debug:
- var: ansible_failed_result
- - name: "Validate import_role failure"
- assert:
- that:
- # NOTE: a bug here that prevents us from getting ansible_failed_task
- - ansible_failed_result.argument_errors == [error]
- - ansible_failed_result.argument_spec_data == a_main_spec
- vars:
- error: >-
- argument 'a_str' is of type <class 'NoneType'> and we were unable to convert to str:
- 'None' is not a string and conversion is not allowed
-
- - name: test type coercion fails on None for required int
- block:
- - name: "Test import_role of role C (missing c_int)"
- import_role:
- name: c
- vars:
- c_int: ~
- - fail:
- msg: "Should not get here"
- rescue:
- - debug:
- var: ansible_failed_result
- - name: "Validate import_role failure"
- assert:
- that:
- # NOTE: a bug here that prevents us from getting ansible_failed_task
- - ansible_failed_result.argument_errors == [error]
- - ansible_failed_result.argument_spec_data == c_main_spec
- vars:
- error: >-
- argument 'c_int' is of type <class 'NoneType'> and we were unable to convert to int:
- <class 'NoneType'> cannot be converted to an int
-
- - name: test type coercion fails on None for required list
- block:
- - name: "Test import_role of role C (missing c_list)"
- import_role:
- name: c
- vars:
- c_list: ~
- - fail:
- msg: "Should not get here"
- rescue:
- - debug:
- var: ansible_failed_result
- - name: "Validate import_role failure"
- assert:
- that:
- # NOTE: a bug here that prevents us from getting ansible_failed_task
- - ansible_failed_result.argument_errors == [error]
- - ansible_failed_result.argument_spec_data == c_main_spec
- vars:
- error: >-
- argument 'c_list' is of type <class 'NoneType'> and we were unable to convert to list:
- <class 'NoneType'> cannot be converted to a list
-
- - name: test type coercion fails on None for required dict
- block:
- - name: "Test import_role of role C (missing c_dict)"
- import_role:
- name: c
- vars:
- c_dict: ~
- - fail:
- msg: "Should not get here"
- rescue:
- - debug:
- var: ansible_failed_result
- - name: "Validate import_role failure"
- assert:
- that:
- # NOTE: a bug here that prevents us from getting ansible_failed_task
- - ansible_failed_result.argument_errors == [error]
- - ansible_failed_result.argument_spec_data == c_main_spec
- vars:
- error: >-
- argument 'c_dict' is of type <class 'NoneType'> and we were unable to convert to dict:
- <class 'NoneType'> cannot be converted to a dict
- name: "New play to reset vars: Test nested role including/importing role fails"
hosts: localhost
@@ -293,15 +170,13 @@
required: true
type: "int"
- c_int: 100
- c_list: []
- c_dict: {}
- c_raw: ~
tasks:
- block:
- name: "Test import_role of role C (missing a_str)"
import_role:
name: c
+ vars:
+ c_int: 100
- fail:
msg: "Should not get here"
@@ -326,6 +201,7 @@
include_role:
name: c
vars:
+ c_int: 200
a_str: "some string"
- fail:
diff --git a/test/integration/targets/rpm_key/tasks/rpm_key.yaml b/test/integration/targets/rpm_key/tasks/rpm_key.yaml
index 204b42ac..89ed2361 100644
--- a/test/integration/targets/rpm_key/tasks/rpm_key.yaml
+++ b/test/integration/targets/rpm_key/tasks/rpm_key.yaml
@@ -123,32 +123,6 @@
assert:
that: "'rsa sha1 (md5) pgp md5 OK' in sl_check.stdout or 'digests signatures OK' in sl_check.stdout"
-- name: get keyid
- shell: "rpm -q gpg-pubkey | head -n 1 | xargs rpm -q --qf %{version}"
- register: key_id
-
-- name: remove GPG key using keyid
- rpm_key:
- state: absent
- key: "{{ key_id.stdout }}"
- register: remove_keyid
- failed_when: remove_keyid.changed == false
-
-- name: remove GPG key using keyid (idempotent)
- rpm_key:
- state: absent
- key: "{{ key_id.stdout }}"
- register: key_id_idempotence
-
-- name: verify idempotent (key_id)
- assert:
- that: "not key_id_idempotence.changed"
-
-- name: add very first key on system again
- rpm_key:
- state: present
- key: https://ci-files.testing.ansible.com/test/integration/targets/rpm_key/RPM-GPG-KEY-EPEL-7
-
- name: Issue 20325 - Verify fingerprint of key, invalid fingerprint - EXPECTED FAILURE
rpm_key:
key: https://ci-files.testing.ansible.com/test/integration/targets/rpm_key/RPM-GPG-KEY.dag
diff --git a/test/integration/targets/script/tasks/main.yml b/test/integration/targets/script/tasks/main.yml
index 668ec48e..74189f81 100644
--- a/test/integration/targets/script/tasks/main.yml
+++ b/test/integration/targets/script/tasks/main.yml
@@ -37,17 +37,6 @@
## script
##
-- name: Required one of free-form and cmd
- script:
- ignore_errors: yes
- register: script_required
-
-- name: assert that the script fails if neither free-form nor cmd is given
- assert:
- that:
- - script_required.failed
- - "'one of the following' in script_required.msg"
-
- name: execute the test.sh script via command
script: test.sh
register: script_result0
diff --git a/test/integration/targets/service/aliases b/test/integration/targets/service/aliases
index f3703f85..f2f9ac9d 100644
--- a/test/integration/targets/service/aliases
+++ b/test/integration/targets/service/aliases
@@ -1,3 +1,4 @@
destructive
shippable/posix/group1
+skip/osx
skip/macos
diff --git a/test/integration/targets/service/files/ansible_test_service.py b/test/integration/targets/service/files/ansible_test_service.py
index 6292272e..522493fc 100644
--- a/test/integration/targets/service/files/ansible_test_service.py
+++ b/test/integration/targets/service/files/ansible_test_service.py
@@ -9,6 +9,7 @@ __metaclass__ = type
import os
import resource
import signal
+import sys
import time
UMASK = 0
diff --git a/test/integration/targets/service_facts/aliases b/test/integration/targets/service_facts/aliases
index 32e10b03..17d3eb75 100644
--- a/test/integration/targets/service_facts/aliases
+++ b/test/integration/targets/service_facts/aliases
@@ -1,3 +1,4 @@
shippable/posix/group2
skip/freebsd
+skip/osx
skip/macos
diff --git a/test/integration/targets/setup_deb_repo/tasks/main.yml b/test/integration/targets/setup_deb_repo/tasks/main.yml
index 3e640f69..471fb2a2 100644
--- a/test/integration/targets/setup_deb_repo/tasks/main.yml
+++ b/test/integration/targets/setup_deb_repo/tasks/main.yml
@@ -59,7 +59,6 @@
loop:
- stable
- testing
- when: install_repo|default(True)|bool is true
# Need to uncomment the deb-src for the universe component for build-dep state
- name: Ensure deb-src for the universe component
diff --git a/test/integration/targets/setup_paramiko/install-Alpine-3-python-3.yml b/test/integration/targets/setup_paramiko/install-Alpine-3-python-3.yml
index 8c0b28bf..f16d9b53 100644
--- a/test/integration/targets/setup_paramiko/install-Alpine-3-python-3.yml
+++ b/test/integration/targets/setup_paramiko/install-Alpine-3-python-3.yml
@@ -1,2 +1,9 @@
+- name: Setup remote constraints
+ include_tasks: setup-remote-constraints.yml
- name: Install Paramiko for Python 3 on Alpine
- command: apk add py3-paramiko
+ pip: # no apk package manager in core, just use pip
+ name: paramiko
+ extra_args: "-c {{ remote_constraints }}"
+ environment:
+ # Not sure why this fixes the test, but it does.
+ SETUPTOOLS_USE_DISTUTILS: stdlib
diff --git a/test/integration/targets/setup_paramiko/install-CentOS-6-python-2.yml b/test/integration/targets/setup_paramiko/install-CentOS-6-python-2.yml
new file mode 100644
index 00000000..0c7b9e82
--- /dev/null
+++ b/test/integration/targets/setup_paramiko/install-CentOS-6-python-2.yml
@@ -0,0 +1,3 @@
+- name: Install Paramiko for Python 2 on CentOS 6
+ yum:
+ name: python-paramiko
diff --git a/test/integration/targets/setup_paramiko/install-Fedora-35-python-3.yml b/test/integration/targets/setup_paramiko/install-Fedora-35-python-3.yml
new file mode 100644
index 00000000..bbe97a96
--- /dev/null
+++ b/test/integration/targets/setup_paramiko/install-Fedora-35-python-3.yml
@@ -0,0 +1,9 @@
+- name: Install Paramiko and crypto policies scripts
+ dnf:
+ name:
+ - crypto-policies-scripts
+ - python3-paramiko
+ install_weak_deps: no
+
+- name: Drop the crypto-policy to LEGACY for these tests
+ command: update-crypto-policies --set LEGACY
diff --git a/test/integration/targets/setup_paramiko/install-Ubuntu-16-python-2.yml b/test/integration/targets/setup_paramiko/install-Ubuntu-16-python-2.yml
new file mode 100644
index 00000000..8f760740
--- /dev/null
+++ b/test/integration/targets/setup_paramiko/install-Ubuntu-16-python-2.yml
@@ -0,0 +1,3 @@
+- name: Install Paramiko for Python 2 on Ubuntu 16
+ apt:
+ name: python-paramiko
diff --git a/test/integration/targets/setup_paramiko/install-python-2.yml b/test/integration/targets/setup_paramiko/install-python-2.yml
new file mode 100644
index 00000000..be337a16
--- /dev/null
+++ b/test/integration/targets/setup_paramiko/install-python-2.yml
@@ -0,0 +1,3 @@
+- name: Install Paramiko for Python 2
+ package:
+ name: python2-paramiko
diff --git a/test/integration/targets/setup_paramiko/uninstall-Alpine-3-python-3.yml b/test/integration/targets/setup_paramiko/uninstall-Alpine-3-python-3.yml
index edb504ff..e9dcc62c 100644
--- a/test/integration/targets/setup_paramiko/uninstall-Alpine-3-python-3.yml
+++ b/test/integration/targets/setup_paramiko/uninstall-Alpine-3-python-3.yml
@@ -1,2 +1,4 @@
- name: Uninstall Paramiko for Python 3 on Alpine
- command: apk del py3-paramiko
+ pip:
+ name: paramiko
+ state: absent
diff --git a/test/integration/targets/setup_paramiko/uninstall-Fedora-35-python-3.yml b/test/integration/targets/setup_paramiko/uninstall-Fedora-35-python-3.yml
new file mode 100644
index 00000000..6d0e9a19
--- /dev/null
+++ b/test/integration/targets/setup_paramiko/uninstall-Fedora-35-python-3.yml
@@ -0,0 +1,5 @@
+- name: Revert the crypto-policy back to DEFAULT
+ command: update-crypto-policies --set DEFAULT
+
+- name: Uninstall Paramiko and crypto policies scripts using dnf history undo
+ command: dnf history undo last --assumeyes
diff --git a/test/integration/targets/setup_paramiko/uninstall-apt-python-2.yml b/test/integration/targets/setup_paramiko/uninstall-apt-python-2.yml
new file mode 100644
index 00000000..507d94cc
--- /dev/null
+++ b/test/integration/targets/setup_paramiko/uninstall-apt-python-2.yml
@@ -0,0 +1,5 @@
+- name: Uninstall Paramiko for Python 2 using apt
+ apt:
+ name: python-paramiko
+ state: absent
+ autoremove: yes
diff --git a/test/integration/targets/setup_paramiko/uninstall-zypper-python-2.yml b/test/integration/targets/setup_paramiko/uninstall-zypper-python-2.yml
new file mode 100644
index 00000000..adb26e5c
--- /dev/null
+++ b/test/integration/targets/setup_paramiko/uninstall-zypper-python-2.yml
@@ -0,0 +1,2 @@
+- name: Uninstall Paramiko for Python 2 using zypper
+ command: zypper --quiet --non-interactive remove --clean-deps python2-paramiko
diff --git a/test/integration/targets/setup_rpm_repo/tasks/main.yml b/test/integration/targets/setup_rpm_repo/tasks/main.yml
index bf5af101..be20078f 100644
--- a/test/integration/targets/setup_rpm_repo/tasks/main.yml
+++ b/test/integration/targets/setup_rpm_repo/tasks/main.yml
@@ -24,18 +24,9 @@
args:
name: "{{ rpm_repo_packages }}"
- - name: Install rpmfluff via pip, ensure it is installed with default python as python3-rpm may not exist for other versions
- block:
- - action: "{{ ansible_facts.pkg_mgr }}"
- args:
- name:
- - python3-pip
- - python3
- state: latest
-
- - pip:
- name: rpmfluff
- executable: pip3
+ - name: Install rpmfluff via pip
+ pip:
+ name: rpmfluff
when: ansible_facts.os_family == 'RedHat' and ansible_distribution_major_version is version('9', '==')
- set_fact:
diff --git a/test/integration/targets/strategy_linear/runme.sh b/test/integration/targets/strategy_linear/runme.sh
index a2734f97..cbb6aea3 100755
--- a/test/integration/targets/strategy_linear/runme.sh
+++ b/test/integration/targets/strategy_linear/runme.sh
@@ -5,5 +5,3 @@ set -eux
ansible-playbook test_include_file_noop.yml -i inventory "$@"
ansible-playbook task_action_templating.yml -i inventory "$@"
-
-ansible-playbook task_templated_run_once.yml -i inventory "$@"
diff --git a/test/integration/targets/strategy_linear/task_templated_run_once.yml b/test/integration/targets/strategy_linear/task_templated_run_once.yml
deleted file mode 100644
index bacf06a9..00000000
--- a/test/integration/targets/strategy_linear/task_templated_run_once.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-- hosts: testhost,testhost2
- gather_facts: no
- vars:
- run_once_flag: false
- tasks:
- - debug:
- msg: "I am {{ item }}"
- run_once: "{{ run_once_flag }}"
- register: reg1
- loop:
- - "{{ inventory_hostname }}"
-
- - assert:
- that:
- - "reg1.results[0].msg == 'I am testhost'"
- when: inventory_hostname == 'testhost'
- - assert:
- that:
- - "reg1.results[0].msg == 'I am testhost2'"
- when: inventory_hostname == 'testhost2'
diff --git a/test/integration/targets/subversion/aliases b/test/integration/targets/subversion/aliases
index 03b96434..3cc41e4c 100644
--- a/test/integration/targets/subversion/aliases
+++ b/test/integration/targets/subversion/aliases
@@ -1,4 +1,6 @@
shippable/posix/group2
+skip/osx
skip/macos
+skip/rhel/9.0b # svn checkout hangs
destructive
needs/root
diff --git a/test/integration/targets/support-callback_plugins/aliases b/test/integration/targets/support-callback_plugins/aliases
deleted file mode 100644
index 136c05e0..00000000
--- a/test/integration/targets/support-callback_plugins/aliases
+++ /dev/null
@@ -1 +0,0 @@
-hidden
diff --git a/test/integration/targets/systemd/tasks/test_indirect_service.yml b/test/integration/targets/systemd/tasks/test_indirect_service.yml
index 0df60486..fc11343e 100644
--- a/test/integration/targets/systemd/tasks/test_indirect_service.yml
+++ b/test/integration/targets/systemd/tasks/test_indirect_service.yml
@@ -34,4 +34,4 @@
- assert:
that:
- systemd_enable_dummy_indirect_1 is changed
- - systemd_enable_dummy_indirect_2 is not changed
+ - systemd_enable_dummy_indirect_2 is not changed \ No newline at end of file
diff --git a/test/integration/targets/systemd/vars/Debian.yml b/test/integration/targets/systemd/vars/Debian.yml
index 2dd0affb..613410f0 100644
--- a/test/integration/targets/systemd/vars/Debian.yml
+++ b/test/integration/targets/systemd/vars/Debian.yml
@@ -1,3 +1,3 @@
ssh_service: ssh
sleep_bin_path: /bin/sleep
-indirect_service: dummy
+indirect_service: dummy \ No newline at end of file
diff --git a/test/integration/targets/tags/runme.sh b/test/integration/targets/tags/runme.sh
index 7dcb9985..9da0b301 100755
--- a/test/integration/targets/tags/runme.sh
+++ b/test/integration/targets/tags/runme.sh
@@ -73,12 +73,3 @@ ansible-playbook -i ../../inventory ansible_run_tags.yml -e expect=list --tags t
ansible-playbook -i ../../inventory ansible_run_tags.yml -e expect=untagged --tags untagged "$@"
ansible-playbook -i ../../inventory ansible_run_tags.yml -e expect=untagged_list --tags untagged,tag3 "$@"
ansible-playbook -i ../../inventory ansible_run_tags.yml -e expect=tagged --tags tagged "$@"
-
-ansible-playbook test_template_parent_tags.yml "$@" 2>&1 | tee out.txt
-[ "$(grep out.txt -ce 'Tagged_task')" = "1" ]; rm out.txt
-
-ansible-playbook test_template_parent_tags.yml --tags tag1 "$@" 2>&1 | tee out.txt
-[ "$(grep out.txt -ce 'Tagged_task')" = "1" ]; rm out.txt
-
-ansible-playbook test_template_parent_tags.yml --skip-tags tag1 "$@" 2>&1 | tee out.txt
-[ "$(grep out.txt -ce 'Tagged_task')" = "0" ]; rm out.txt
diff --git a/test/integration/targets/tags/test_template_parent_tags.yml b/test/integration/targets/tags/test_template_parent_tags.yml
deleted file mode 100644
index ea1c8289..00000000
--- a/test/integration/targets/tags/test_template_parent_tags.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-- hosts: localhost
- gather_facts: false
- vars:
- tags_in_var:
- - tag1
- tasks:
- - block:
- - name: Tagged_task
- debug:
- tags: "{{ tags_in_var }}"
diff --git a/test/integration/targets/tasks/playbook.yml b/test/integration/targets/tasks/playbook.yml
index 10bd8591..80d9f8b1 100644
--- a/test/integration/targets/tasks/playbook.yml
+++ b/test/integration/targets/tasks/playbook.yml
@@ -6,11 +6,6 @@
debug:
msg: Hello
- # ensure we properly test for an action name, not a task name when cheking for a meta task
- - name: "meta"
- debug:
- msg: Hello
-
- name: ensure malformed raw_params on arbitrary actions are not ignored
debug:
garbage {{"with a template"}}
diff --git a/test/integration/targets/tasks/runme.sh b/test/integration/targets/tasks/runme.sh
index 57cbf28a..594447bd 100755
--- a/test/integration/targets/tasks/runme.sh
+++ b/test/integration/targets/tasks/runme.sh
@@ -1,3 +1,3 @@
#!/usr/bin/env bash
-ansible-playbook playbook.yml \ No newline at end of file
+ansible-playbook playbook.yml "$@"
diff --git a/test/integration/targets/template/ansible_managed_79129.yml b/test/integration/targets/template/ansible_managed_79129.yml
deleted file mode 100644
index e00ada8c..00000000
--- a/test/integration/targets/template/ansible_managed_79129.yml
+++ /dev/null
@@ -1,29 +0,0 @@
----
-- hosts: testhost
- gather_facts: false
- tasks:
- - set_fact:
- output_dir: "{{ lookup('env', 'OUTPUT_DIR') }}"
-
- - name: check strftime
- block:
- - template:
- src: "templates/%necho Onii-chan help Im stuck;exit 1%n.j2"
- dest: "{{ output_dir }}/79129-strftime.sh"
- mode: '0755'
-
- - shell: "exec {{ output_dir | quote }}/79129-strftime.sh"
-
- - name: check jinja template
- block:
- - template:
- src: !unsafe "templates/completely{{ 1 % 0 }} safe template.j2"
- dest: "{{ output_dir }}/79129-jinja.sh"
- mode: '0755'
-
- - shell: "exec {{ output_dir | quote }}/79129-jinja.sh"
- register: result
-
- - assert:
- that:
- - "'Hello' in result.stdout"
diff --git a/test/integration/targets/template/arg_template_overrides.j2 b/test/integration/targets/template/arg_template_overrides.j2
deleted file mode 100644
index 17a79b91..00000000
--- a/test/integration/targets/template/arg_template_overrides.j2
+++ /dev/null
@@ -1,4 +0,0 @@
-var_a: << var_a >>
-var_b: << var_b >>
-var_c: << var_c >>
-var_d: << var_d >>
diff --git a/test/integration/targets/template/in_template_overrides.yml b/test/integration/targets/template/in_template_overrides.yml
new file mode 100644
index 00000000..3c2d4d99
--- /dev/null
+++ b/test/integration/targets/template/in_template_overrides.yml
@@ -0,0 +1,28 @@
+- hosts: localhost
+ gather_facts: false
+ vars:
+ var_a: "value"
+ var_b: "{{ var_a }}"
+ var_c: "<< var_a >>"
+ tasks:
+ - set_fact:
+ var_d: "{{ var_a }}"
+
+ - block:
+ - template:
+ src: in_template_overrides.j2
+ dest: out.txt
+
+ - command: cat out.txt
+ register: out
+
+ - assert:
+ that:
+ - "'var_a: value' in out.stdout"
+ - "'var_b: value' in out.stdout"
+ - "'var_c: << var_a >>' in out.stdout"
+ - "'var_d: value' in out.stdout"
+ always:
+ - file:
+ path: out.txt
+ state: absent
diff --git a/test/integration/targets/template/runme.sh b/test/integration/targets/template/runme.sh
index d3913d97..30163af7 100755
--- a/test/integration/targets/template/runme.sh
+++ b/test/integration/targets/template/runme.sh
@@ -8,10 +8,7 @@ ANSIBLE_ROLES_PATH=../ ansible-playbook template.yml -i ../../inventory -v "$@"
ansible testhost -i testhost, -m debug -a 'msg={{ hostvars["localhost"] }}' -e "vars1={{ undef() }}" -e "vars2={{ vars1 }}"
# Test for https://github.com/ansible/ansible/issues/27262
-ANSIBLE_CONFIG=ansible_managed.cfg ansible-playbook ansible_managed.yml -i ../../inventory -v "$@"
-
-# Test for https://github.com/ansible/ansible/pull/79129
-ANSIBLE_CONFIG=ansible_managed.cfg ansible-playbook ansible_managed_79129.yml -i ../../inventory -v "$@"
+ansible-playbook ansible_managed.yml -c ansible_managed.cfg -i ../../inventory -v "$@"
# Test for #42585
ANSIBLE_ROLES_PATH=../ ansible-playbook custom_template.yml -i ../../inventory -v "$@"
@@ -42,7 +39,7 @@ ansible-playbook 72262.yml -v "$@"
ansible-playbook unsafe.yml -v "$@"
# ensure Jinja2 overrides from a template are used
-ansible-playbook template_overrides.yml -v "$@"
+ansible-playbook in_template_overrides.yml -v "$@"
ansible-playbook lazy_eval.yml -i ../../inventory -v "$@"
diff --git a/test/integration/targets/template/tasks/main.yml b/test/integration/targets/template/tasks/main.yml
index 34e88287..3c91734b 100644
--- a/test/integration/targets/template/tasks/main.yml
+++ b/test/integration/targets/template/tasks/main.yml
@@ -25,7 +25,7 @@
- name: show jinja2 version
debug:
- msg: "{{ lookup('pipe', ansible_python.executable ~ ' -c \"import jinja2; print(jinja2.__version__)\"') }}"
+ msg: "{{ lookup('pipe', '{{ ansible_python[\"executable\"] }} -c \"import jinja2; print(jinja2.__version__)\"') }}"
- name: get default group
shell: id -gn
@@ -760,7 +760,7 @@
that:
- test
vars:
- test: "{{ lookup('file', output_dir ~ '/empty_template.templated')|length == 0 }}"
+ test: "{{ lookup('file', '{{ output_dir }}/empty_template.templated')|length == 0 }}"
- name: test jinja2 override without colon throws proper error
block:
diff --git a/test/integration/targets/template/template_overrides.yml b/test/integration/targets/template/template_overrides.yml
deleted file mode 100644
index 50cfb8f1..00000000
--- a/test/integration/targets/template/template_overrides.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-- hosts: localhost
- gather_facts: false
- vars:
- output_dir: "{{ lookup('env', 'OUTPUT_DIR') }}"
- var_a: "value"
- var_b: "{{ var_a }}"
- var_c: "<< var_a >>"
- tasks:
- - set_fact:
- var_d: "{{ var_a }}"
-
- - template:
- src: in_template_overrides.j2
- dest: '{{ output_dir }}/in_template_overrides.out'
-
- - template:
- src: arg_template_overrides.j2
- dest: '{{ output_dir }}/arg_template_overrides.out'
- variable_start_string: '<<'
- variable_end_string: '>>'
-
- - command: cat '{{ output_dir }}/in_template_overrides.out'
- register: in_template_overrides_out
-
- - command: cat '{{ output_dir }}/arg_template_overrides.out'
- register: arg_template_overrides_out
-
- - assert:
- that:
- - "'var_a: value' in in_template_overrides_out.stdout"
- - "'var_b: value' in in_template_overrides_out.stdout"
- - "'var_c: << var_a >>' in in_template_overrides_out.stdout"
- - "'var_d: value' in in_template_overrides_out.stdout"
-
- - "'var_a: value' in arg_template_overrides_out.stdout"
- - "'var_b: value' in arg_template_overrides_out.stdout"
- - "'var_c: << var_a >>' in arg_template_overrides_out.stdout"
- - "'var_d: value' in arg_template_overrides_out.stdout"
diff --git a/test/integration/targets/template/templates/%necho Onii-chan help Im stuck;exit 1%n.j2 b/test/integration/targets/template/templates/%necho Onii-chan help Im stuck;exit 1%n.j2
deleted file mode 100644
index 2d63c158..00000000
--- a/test/integration/targets/template/templates/%necho Onii-chan help Im stuck;exit 1%n.j2
+++ /dev/null
@@ -1,3 +0,0 @@
-# {{ ansible_managed }}
-echo 79129 test passed
-exit 0
diff --git a/test/integration/targets/template/templates/completely{{ 1 % 0 }} safe template.j2 b/test/integration/targets/template/templates/completely{{ 1 % 0 }} safe template.j2
deleted file mode 100644
index c9a04b4f..00000000
--- a/test/integration/targets/template/templates/completely{{ 1 % 0 }} safe template.j2
+++ /dev/null
@@ -1,3 +0,0 @@
-# {{ ansible_managed }}
-echo Hello
-exit 0
diff --git a/test/integration/targets/template/unsafe.yml b/test/integration/targets/template/unsafe.yml
index 6f163881..bef9a4b4 100644
--- a/test/integration/targets/template/unsafe.yml
+++ b/test/integration/targets/template/unsafe.yml
@@ -3,7 +3,6 @@
vars:
nottemplated: this should not be seen
imunsafe: !unsafe '{{ nottemplated }}'
- unsafe_set: !unsafe '{{ "test" }}'
tasks:
- set_fact:
@@ -13,15 +12,11 @@
- set_fact:
this_always_safe: '{{ imunsafe }}'
- - set_fact:
- this_unsafe_set: "{{ unsafe_set }}"
-
- name: ensure nothing was templated
assert:
that:
- this_always_safe == imunsafe
- imunsafe == this_was_unsafe.strip()
- - unsafe_set == this_unsafe_set.strip()
- hosts: localhost
diff --git a/test/integration/targets/template_jinja2_non_native/macro_override.yml b/test/integration/targets/template_jinja2_non_native/macro_override.yml
index c3f9ab69..8a1cabd2 100644
--- a/test/integration/targets/template_jinja2_non_native/macro_override.yml
+++ b/test/integration/targets/template_jinja2_non_native/macro_override.yml
@@ -12,4 +12,4 @@
- "'foobar' not in data"
- "'\"foo\" \"bar\"' in data"
vars:
- data: "{{ lookup('file', output_dir ~ '/macro_override.out') }}"
+ data: "{{ lookup('file', '{{ output_dir }}/macro_override.out') }}"
diff --git a/test/integration/targets/templating/tasks/main.yml b/test/integration/targets/templating/tasks/main.yml
index edbf012e..312e171d 100644
--- a/test/integration/targets/templating/tasks/main.yml
+++ b/test/integration/targets/templating/tasks/main.yml
@@ -33,14 +33,3 @@
- result is failed
- >-
"TemplateSyntaxError: Could not load \"asdf \": 'invalid plugin name: ansible.builtin.asdf '" in result.msg
-
-- name: Make sure syntax errors originating from a template being compiled into Python code object result in a failure
- debug:
- msg: "{{ lookup('vars', 'v1', default='', default='') }}"
- ignore_errors: true
- register: r
-
-- assert:
- that:
- - r is failed
- - "'keyword argument repeated' in r.msg"
diff --git a/test/integration/targets/test_core/tasks/main.yml b/test/integration/targets/test_core/tasks/main.yml
index ac06d67e..8c2decbd 100644
--- a/test/integration/targets/test_core/tasks/main.yml
+++ b/test/integration/targets/test_core/tasks/main.yml
@@ -126,16 +126,6 @@
hello: world
register: executed_task
-- name: Skip me with multiple conditions
- set_fact:
- hello: world
- when:
- - True == True
- - foo == 'bar'
- vars:
- foo: foo
- register: skipped_task_multi_condition
-
- name: Try skipped test on non-dictionary
set_fact:
hello: "{{ 'nope' is skipped }}"
@@ -146,11 +136,8 @@
assert:
that:
- skipped_task is skipped
- - skipped_task.false_condition == False
- executed_task is not skipped
- misuse_of_skipped is failure
- - skipped_task_multi_condition is skipped
- - skipped_task_multi_condition.false_condition == "foo == 'bar'"
- name: Not an async task
set_fact:
diff --git a/test/integration/targets/test_utils/aliases b/test/integration/targets/test_utils/aliases
deleted file mode 100644
index 136c05e0..00000000
--- a/test/integration/targets/test_utils/aliases
+++ /dev/null
@@ -1 +0,0 @@
-hidden
diff --git a/test/integration/targets/test_utils/scripts/timeout.py b/test/integration/targets/test_utils/scripts/timeout.py
deleted file mode 100755
index f88f3e4e..00000000
--- a/test/integration/targets/test_utils/scripts/timeout.py
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env python
-
-import argparse
-import subprocess
-import sys
-
-parser = argparse.ArgumentParser()
-parser.add_argument('duration', type=int)
-parser.add_argument('command', nargs='+')
-args = parser.parse_args()
-
-try:
- p = subprocess.run(
- ' '.join(args.command),
- shell=True,
- timeout=args.duration,
- check=False,
- )
- sys.exit(p.returncode)
-except subprocess.TimeoutExpired:
- sys.exit(124)
diff --git a/test/integration/targets/unarchive/runme.sh b/test/integration/targets/unarchive/runme.sh
deleted file mode 100755
index 5351a0c2..00000000
--- a/test/integration/targets/unarchive/runme.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env bash
-
-set -eux
-
-ansible-playbook -i ../../inventory runme.yml -v "$@"
-
-# https://github.com/ansible/ansible/issues/80710
-ANSIBLE_REMOTE_TMP=./ansible ansible-playbook -i ../../inventory test_relative_tmp_dir.yml -v "$@"
diff --git a/test/integration/targets/unarchive/runme.yml b/test/integration/targets/unarchive/runme.yml
deleted file mode 100644
index ddcd6095..00000000
--- a/test/integration/targets/unarchive/runme.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- hosts: all
- gather_facts: no
- roles:
- - { role: ../unarchive }
diff --git a/test/integration/targets/unarchive/tasks/main.yml b/test/integration/targets/unarchive/tasks/main.yml
index b07c2fe7..148e583f 100644
--- a/test/integration/targets/unarchive/tasks/main.yml
+++ b/test/integration/targets/unarchive/tasks/main.yml
@@ -20,4 +20,3 @@
- import_tasks: test_different_language_var.yml
- import_tasks: test_invalid_options.yml
- import_tasks: test_ownership_top_folder.yml
-- import_tasks: test_relative_dest.yml
diff --git a/test/integration/targets/unarchive/tasks/test_different_language_var.yml b/test/integration/targets/unarchive/tasks/test_different_language_var.yml
index 32c84f4b..9eec658e 100644
--- a/test/integration/targets/unarchive/tasks/test_different_language_var.yml
+++ b/test/integration/targets/unarchive/tasks/test_different_language_var.yml
@@ -2,10 +2,10 @@
when: ansible_os_family == 'Debian'
block:
- name: install fr language pack
- apt:
+ apt:
name: language-pack-fr
state: present
-
+
- name: create our unarchive destination
file:
path: "{{ remote_tmp_dir }}/test-unarchive-nonascii-くらとみ-tar-gz"
diff --git a/test/integration/targets/unarchive/tasks/test_mode.yml b/test/integration/targets/unarchive/tasks/test_mode.yml
index efd428eb..06fbc7b8 100644
--- a/test/integration/targets/unarchive/tasks/test_mode.yml
+++ b/test/integration/targets/unarchive/tasks/test_mode.yml
@@ -3,29 +3,6 @@
path: '{{remote_tmp_dir}}/test-unarchive-tar-gz'
state: directory
-- name: test invalid modes
- unarchive:
- src: "{{ remote_tmp_dir }}/test-unarchive.tar.gz"
- dest: "{{ remote_tmp_dir }}/test-unarchive-tar-gz"
- remote_src: yes
- mode: "{{ item }}"
- list_files: True
- register: unarchive_mode_errors
- ignore_errors: yes
- loop:
- - u=foo
- - foo=r
- - ufoo=r
- - abc=r
- - ao=r
- - oa=r
-
-- assert:
- that:
- - item.failed
- - "'bad symbolic permission for mode: ' + item.item == item.details"
- loop: "{{ unarchive_mode_errors.results }}"
-
- name: unarchive and set mode to 0600, directories 0700
unarchive:
src: "{{ remote_tmp_dir }}/test-unarchive.tar.gz"
diff --git a/test/integration/targets/unarchive/tasks/test_relative_dest.yml b/test/integration/targets/unarchive/tasks/test_relative_dest.yml
deleted file mode 100644
index aae31fb6..00000000
--- a/test/integration/targets/unarchive/tasks/test_relative_dest.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-- name: Create relative test directory
- file:
- path: test-unarchive-relative
- state: directory
-
-- name: Unarchive a file using a relative destination path
- unarchive:
- src: "{{ remote_tmp_dir }}/test-unarchive.tar"
- dest: test-unarchive-relative
- remote_src: yes
- register: relative_dest_1
-
-- name: Unarchive a file using a relative destination path again
- unarchive:
- src: "{{ remote_tmp_dir }}/test-unarchive.tar"
- dest: test-unarchive-relative
- remote_src: yes
- register: relative_dest_2
-
-- name: Ensure changes were made correctly
- assert:
- that:
- - relative_dest_1 is changed
- - relative_dest_1.warnings | length > 0
- - relative_dest_1.warnings[0] is search('absolute path')
- - relative_dest_2 is not changed
diff --git a/test/integration/targets/unarchive/test_relative_tmp_dir.yml b/test/integration/targets/unarchive/test_relative_tmp_dir.yml
deleted file mode 100644
index f368f7a6..00000000
--- a/test/integration/targets/unarchive/test_relative_tmp_dir.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-- hosts: all
- gather_facts: no
- tasks:
- - include_role:
- name: ../setup_remote_tmp_dir
- - include_role:
- name: ../setup_gnutar
- - include_tasks: tasks/prepare_tests.yml
-
- - include_tasks: tasks/test_tar.yml
diff --git a/test/integration/targets/unsafe_writes/aliases b/test/integration/targets/unsafe_writes/aliases
index 3560af2f..da1b554e 100644
--- a/test/integration/targets/unsafe_writes/aliases
+++ b/test/integration/targets/unsafe_writes/aliases
@@ -1,6 +1,7 @@
context/target
needs/root
skip/freebsd
+skip/osx
skip/macos
shippable/posix/group2
needs/target/setup_remote_tmp_dir
diff --git a/test/integration/targets/until/tasks/main.yml b/test/integration/targets/until/tasks/main.yml
index 42ce9c8f..2b2ac94e 100644
--- a/test/integration/targets/until/tasks/main.yml
+++ b/test/integration/targets/until/tasks/main.yml
@@ -82,37 +82,3 @@
register: counter
delay: 0.5
until: counter.rc == 0
-
-- name: test retries without explicit until, defaults to "until task succeeds"
- block:
- - name: EXPECTED FAILURE
- fail:
- retries: 3
- delay: 0.1
- register: r
- ignore_errors: true
-
- - assert:
- that:
- - r.attempts == 3
-
- - vars:
- test_file: "{{ lookup('env', 'OUTPUT_DIR') }}/until_success_test_file"
- block:
- - file:
- name: "{{ test_file }}"
- state: absent
-
- - name: fail on the first invocation, succeed on the second
- shell: "[ -f {{ test_file }} ] || (touch {{ test_file }} && false)"
- retries: 5
- delay: 0.1
- register: r
- always:
- - file:
- name: "{{ test_file }}"
- state: absent
-
- - assert:
- that:
- - r.attempts == 2
diff --git a/test/integration/targets/unvault/main.yml b/test/integration/targets/unvault/main.yml
index 8f0adc75..a0f97b4b 100644
--- a/test/integration/targets/unvault/main.yml
+++ b/test/integration/targets/unvault/main.yml
@@ -1,5 +1,4 @@
- hosts: localhost
- gather_facts: false
tasks:
- set_fact:
unvaulted: "{{ lookup('unvault', 'vault') }}"
diff --git a/test/integration/targets/unvault/runme.sh b/test/integration/targets/unvault/runme.sh
index 054a14df..df4585e3 100755
--- a/test/integration/targets/unvault/runme.sh
+++ b/test/integration/targets/unvault/runme.sh
@@ -2,5 +2,5 @@
set -eux
-# simple run
+
ansible-playbook --vault-password-file password main.yml
diff --git a/test/integration/targets/uri/tasks/main.yml b/test/integration/targets/uri/tasks/main.yml
index ddae83a0..9ba09ece 100644
--- a/test/integration/targets/uri/tasks/main.yml
+++ b/test/integration/targets/uri/tasks/main.yml
@@ -132,7 +132,7 @@
- "result.changed == true"
- name: "get ca certificate {{ self_signed_host }}"
- uri:
+ get_url:
url: "http://{{ httpbin_host }}/ca2cert.pem"
dest: "{{ remote_tmp_dir }}/ca2cert.pem"
@@ -638,18 +638,9 @@
- assert:
that:
- result['set_cookie'] == 'Foo=bar, Baz=qux'
- # Python 3.10 and earlier sorts cookies in order of most specific (ie. longest) path first
+ # Python sorts cookies in order of most specific (ie. longest) path first
# items with the same path are reversed from response order
- result['cookies_string'] == 'Baz=qux; Foo=bar'
- when: ansible_python_version is version('3.11', '<')
-
-- assert:
- that:
- - result['set_cookie'] == 'Foo=bar, Baz=qux'
- # Python 3.11 no longer sorts cookies.
- # See: https://github.com/python/cpython/issues/86232
- - result['cookies_string'] == 'Foo=bar; Baz=qux'
- when: ansible_python_version is version('3.11', '>=')
- name: Write out netrc template
template:
@@ -766,30 +757,6 @@
dest: "{{ remote_tmp_dir }}/output"
state: absent
-- name: Test download root to dir without content-disposition
- uri:
- url: "https://{{ httpbin_host }}/"
- dest: "{{ remote_tmp_dir }}"
- register: get_root_no_filename
-
-- name: Test downloading to dir without content-disposition
- uri:
- url: "https://{{ httpbin_host }}/response-headers"
- dest: "{{ remote_tmp_dir }}"
- register: get_dir_no_filename
-
-- name: Test downloading to dir with content-disposition
- uri:
- url: 'https://{{ httpbin_host }}/response-headers?Content-Disposition=attachment%3B%20filename%3D%22filename.json%22'
- dest: "{{ remote_tmp_dir }}"
- register: get_dir_filename
-
-- assert:
- that:
- - get_root_no_filename.path == remote_tmp_dir ~ "/index.html"
- - get_dir_no_filename.path == remote_tmp_dir ~ "/response-headers"
- - get_dir_filename.path == remote_tmp_dir ~ "/filename.json"
-
- name: Test follow_redirects=none
import_tasks: redirect-none.yml
diff --git a/test/integration/targets/uri/tasks/redirect-none.yml b/test/integration/targets/uri/tasks/redirect-none.yml
index 060950d2..0d1b2b34 100644
--- a/test/integration/targets/uri/tasks/redirect-none.yml
+++ b/test/integration/targets/uri/tasks/redirect-none.yml
@@ -240,7 +240,7 @@
url: https://{{ httpbin_host }}/redirect-to?status_code=308&url=https://{{ httpbin_host }}/anything
follow_redirects: none
return_content: yes
- method: HEAD
+ method: GET
ignore_errors: yes
register: http_308_head
diff --git a/test/integration/targets/uri/tasks/redirect-urllib2.yml b/test/integration/targets/uri/tasks/redirect-urllib2.yml
index 73e87960..6cdafdb2 100644
--- a/test/integration/targets/uri/tasks/redirect-urllib2.yml
+++ b/test/integration/targets/uri/tasks/redirect-urllib2.yml
@@ -237,7 +237,7 @@
url: https://{{ httpbin_host }}/redirect-to?status_code=308&url=https://{{ httpbin_host }}/anything
follow_redirects: urllib2
return_content: yes
- method: HEAD
+ method: GET
ignore_errors: yes
register: http_308_head
@@ -250,23 +250,6 @@
- http_308_head.redirected == false
- http_308_head.status == 308
- http_308_head.url == 'https://{{ httpbin_host }}/redirect-to?status_code=308&url=https://{{ httpbin_host }}/anything'
- # Python 3.10 and earlier do not support HTTP 308 responses.
- # See: https://github.com/python/cpython/issues/84501
- when: ansible_python_version is version('3.11', '<')
-
-# NOTE: The HTTP HEAD turns into an HTTP GET
-- assert:
- that:
- - http_308_head is successful
- - http_308_head.json.data == ''
- - http_308_head.json.method == 'GET'
- - http_308_head.json.url == 'https://{{ httpbin_host }}/anything'
- - http_308_head.redirected == true
- - http_308_head.status == 200
- - http_308_head.url == 'https://{{ httpbin_host }}/anything'
- # Python 3.11 introduced support for HTTP 308 responses.
- # See: https://github.com/python/cpython/issues/84501
- when: ansible_python_version is version('3.11', '>=')
# FIXME: This is fixed in https://github.com/ansible/ansible/pull/36809
- name: Test HTTP 308 using GET
@@ -287,22 +270,6 @@
- http_308_get.redirected == false
- http_308_get.status == 308
- http_308_get.url == 'https://{{ httpbin_host }}/redirect-to?status_code=308&url=https://{{ httpbin_host }}/anything'
- # Python 3.10 and earlier do not support HTTP 308 responses.
- # See: https://github.com/python/cpython/issues/84501
- when: ansible_python_version is version('3.11', '<')
-
-- assert:
- that:
- - http_308_get is successful
- - http_308_get.json.data == ''
- - http_308_get.json.method == 'GET'
- - http_308_get.json.url == 'https://{{ httpbin_host }}/anything'
- - http_308_get.redirected == true
- - http_308_get.status == 200
- - http_308_get.url == 'https://{{ httpbin_host }}/anything'
- # Python 3.11 introduced support for HTTP 308 responses.
- # See: https://github.com/python/cpython/issues/84501
- when: ansible_python_version is version('3.11', '>=')
# FIXME: This is fixed in https://github.com/ansible/ansible/pull/36809
- name: Test HTTP 308 using POST
diff --git a/test/integration/targets/uri/tasks/return-content.yml b/test/integration/targets/uri/tasks/return-content.yml
index cb8aeea2..5a9b97e6 100644
--- a/test/integration/targets/uri/tasks/return-content.yml
+++ b/test/integration/targets/uri/tasks/return-content.yml
@@ -46,4 +46,4 @@
assert:
that:
- result is failed
- - "'content' not in result"
+ - "'content' not in result" \ No newline at end of file
diff --git a/test/integration/targets/uri/tasks/use_netrc.yml b/test/integration/targets/uri/tasks/use_netrc.yml
index 521f8ebf..da745b89 100644
--- a/test/integration/targets/uri/tasks/use_netrc.yml
+++ b/test/integration/targets/uri/tasks/use_netrc.yml
@@ -48,4 +48,4 @@
- name: Clean up
file:
dest: "{{ remote_tmp_dir }}/netrc"
- state: absent
+ state: absent \ No newline at end of file
diff --git a/test/integration/targets/user/tasks/main.yml b/test/integration/targets/user/tasks/main.yml
index be4c4d6f..9d36bfca 100644
--- a/test/integration/targets/user/tasks/main.yml
+++ b/test/integration/targets/user/tasks/main.yml
@@ -31,9 +31,7 @@
- import_tasks: test_expires.yml
- import_tasks: test_expires_new_account.yml
- import_tasks: test_expires_new_account_epoch_negative.yml
-- import_tasks: test_expires_no_shadow.yml
- import_tasks: test_expires_min_max.yml
-- import_tasks: test_expires_warn.yml
- import_tasks: test_shadow_backup.yml
- import_tasks: test_ssh_key_passphrase.yml
- import_tasks: test_password_lock.yml
diff --git a/test/integration/targets/user/tasks/test_create_user.yml b/test/integration/targets/user/tasks/test_create_user.yml
index 644dbebb..bced7905 100644
--- a/test/integration/targets/user/tasks/test_create_user.yml
+++ b/test/integration/targets/user/tasks/test_create_user.yml
@@ -65,15 +65,3 @@
- "user_test1.results[2]['state'] == 'present'"
- "user_test1.results[3]['state'] == 'present'"
- "user_test1.results[4]['state'] == 'present'"
-
-- name: register user informations
- when: ansible_facts.system == 'Darwin'
- command: dscl . -read /Users/ansibulluser
- register: user_test2
-
-- name: validate user defaults for MacOS
- when: ansible_facts.system == 'Darwin'
- assert:
- that:
- - "'RealName: ansibulluser' in user_test2.stdout_lines "
- - "'PrimaryGroupID: 20' in user_test2.stdout_lines "
diff --git a/test/integration/targets/user/tasks/test_create_user_home.yml b/test/integration/targets/user/tasks/test_create_user_home.yml
index 5561a2f5..1b529f76 100644
--- a/test/integration/targets/user/tasks/test_create_user_home.yml
+++ b/test/integration/targets/user/tasks/test_create_user_home.yml
@@ -134,21 +134,3 @@
name: randomuser
state: absent
remove: yes
-
-- name: Create user home directory with /dev/null as skeleton, https://github.com/ansible/ansible/issues/75063
- # create_homedir is mostly used by linux, rest of OSs take care of it themselves via -k option (which fails this task)
- when: ansible_system == 'Linux'
- block:
- - name: "Create user home directory with /dev/null as skeleton"
- user:
- name: withskeleton
- state: present
- skeleton: "/dev/null"
- createhome: yes
- register: create_user_with_skeleton_dev_null
- always:
- - name: "Remove test user"
- user:
- name: withskeleton
- state: absent
- remove: yes
diff --git a/test/integration/targets/user/tasks/test_expires_no_shadow.yml b/test/integration/targets/user/tasks/test_expires_no_shadow.yml
deleted file mode 100644
index 4629c6fb..00000000
--- a/test/integration/targets/user/tasks/test_expires_no_shadow.yml
+++ /dev/null
@@ -1,47 +0,0 @@
-# https://github.com/ansible/ansible/issues/71916
-- name: Test setting expiration for a user account that does not have an /etc/shadow entry
- when: ansible_facts.os_family in ['RedHat', 'Debian', 'Suse']
- block:
- - name: Remove ansibulluser
- user:
- name: ansibulluser
- state: absent
- remove: yes
-
- - name: Create user account entry in /etc/passwd
- lineinfile:
- path: /etc/passwd
- line: "ansibulluser::575:575::/home/dummy:/bin/bash"
- regexp: "^ansibulluser.*"
- state: present
-
- - name: Create user with negative expiration
- user:
- name: ansibulluser
- uid: 575
- expires: -1
- register: user_test_expires_no_shadow_1
-
- - name: Create user with negative expiration again
- user:
- name: ansibulluser
- uid: 575
- expires: -1
- register: user_test_expires_no_shadow_2
-
- - name: Ensure changes were made appropriately
- assert:
- that:
- - user_test_expires_no_shadow_1 is changed
- - user_test_expires_no_shadow_2 is not changed
-
- - name: Get expiration date for ansibulluser
- getent:
- database: shadow
- key: ansibulluser
-
- - name: LINUX | Ensure proper expiration date was set
- assert:
- msg: "expiry is supposed to be empty or -1, not {{ getent_shadow['ansibulluser'][6] }}"
- that:
- - not getent_shadow['ansibulluser'][6] or getent_shadow['ansibulluser'][6] | int < 0
diff --git a/test/integration/targets/user/tasks/test_expires_warn.yml b/test/integration/targets/user/tasks/test_expires_warn.yml
deleted file mode 100644
index afe033cc..00000000
--- a/test/integration/targets/user/tasks/test_expires_warn.yml
+++ /dev/null
@@ -1,36 +0,0 @@
-# https://github.com/ansible/ansible/issues/79882
-- name: Test setting warning days
- when: ansible_facts.os_family in ['RedHat', 'Debian', 'Suse']
- block:
- - name: create user
- user:
- name: ansibulluser
- state: present
-
- - name: add warning days for password
- user:
- name: ansibulluser
- password_expire_warn: 28
- register: pass_warn_1_0
-
- - name: again add warning days for password
- user:
- name: ansibulluser
- password_expire_warn: 28
- register: pass_warn_1_1
-
- - name: validate result for warning days
- assert:
- that:
- - pass_warn_1_0 is changed
- - pass_warn_1_1 is not changed
-
- - name: Get shadow data for ansibulluser
- getent:
- database: shadow
- key: ansibulluser
-
- - name: Ensure number of warning days was set properly
- assert:
- that:
- - ansible_facts.getent_shadow['ansibulluser'][4] == '28'
diff --git a/test/integration/targets/user/tasks/test_local.yml b/test/integration/targets/user/tasks/test_local.yml
index 217d4769..67c24a21 100644
--- a/test/integration/targets/user/tasks/test_local.yml
+++ b/test/integration/targets/user/tasks/test_local.yml
@@ -86,11 +86,9 @@
- testgroup3
- testgroup4
- testgroup5
- - testgroup6
- local_ansibulluser
tags:
- user_test_local_mode
- register: test_groups
- name: Create local_ansibulluser with groups
user:
@@ -115,18 +113,6 @@
tags:
- user_test_local_mode
-- name: Append groups for local_ansibulluser (again)
- user:
- name: local_ansibulluser
- state: present
- local: yes
- groups: ['testgroup3', 'testgroup4']
- append: yes
- register: local_user_test_4_again
- ignore_errors: yes
- tags:
- - user_test_local_mode
-
- name: Test append without groups for local_ansibulluser
user:
name: local_ansibulluser
@@ -147,28 +133,6 @@
tags:
- user_test_local_mode
-- name: Append groups for local_ansibulluser using group id
- user:
- name: local_ansibulluser
- state: present
- append: yes
- groups: "{{ test_groups.results[5]['gid'] }}"
- register: local_user_test_7
- ignore_errors: yes
- tags:
- - user_test_local_mode
-
-- name: Append groups for local_ansibulluser using gid (again)
- user:
- name: local_ansibulluser
- state: present
- append: yes
- groups: "{{ test_groups.results[5]['gid'] }}"
- register: local_user_test_7_again
- ignore_errors: yes
- tags:
- - user_test_local_mode
-
# If we don't re-assign, then "Set user expiration" will
# fail.
- name: Re-assign named group for local_ansibulluser
@@ -200,7 +164,6 @@
- testgroup3
- testgroup4
- testgroup5
- - testgroup6
- local_ansibulluser
tags:
- user_test_local_mode
@@ -212,10 +175,7 @@
- local_user_test_2 is not changed
- local_user_test_3 is changed
- local_user_test_4 is changed
- - local_user_test_4_again is not changed
- local_user_test_6 is changed
- - local_user_test_7 is changed
- - local_user_test_7_again is not changed
- local_user_test_remove_1 is changed
- local_user_test_remove_2 is not changed
tags:
diff --git a/test/integration/targets/user/vars/main.yml b/test/integration/targets/user/vars/main.yml
index 2acd1e12..4b328f71 100644
--- a/test/integration/targets/user/vars/main.yml
+++ b/test/integration/targets/user/vars/main.yml
@@ -10,4 +10,4 @@ status_command:
default_user_group:
openSUSE Leap: users
- MacOSX: staff
+ MacOSX: admin
diff --git a/test/integration/targets/var_blending/roles/test_var_blending/tasks/main.yml b/test/integration/targets/var_blending/roles/test_var_blending/tasks/main.yml
index ef2a06e1..f2b2e54a 100644
--- a/test/integration/targets/var_blending/roles/test_var_blending/tasks/main.yml
+++ b/test/integration/targets/var_blending/roles/test_var_blending/tasks/main.yml
@@ -1,4 +1,4 @@
-# test code
+# test code
# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com>
# This file is part of Ansible
@@ -22,7 +22,7 @@
output_dir: "{{ lookup('env', 'OUTPUT_DIR') }}"
- name: deploy a template that will use variables at various levels
- template: src=foo.j2 dest={{output_dir}}/foo.templated
+ template: src=foo.j2 dest={{output_dir}}/foo.templated
register: template_result
- name: copy known good into place
@@ -33,9 +33,9 @@
register: diff_result
- name: verify templated file matches known good
- assert:
- that:
- - 'diff_result.stdout == ""'
+ assert:
+ that:
+ - 'diff_result.stdout == ""'
- name: check debug variable with same name as var content
debug: var=same_value_as_var_name_var
diff --git a/test/integration/targets/var_precedence/ansible-var-precedence-check.py b/test/integration/targets/var_precedence/ansible-var-precedence-check.py
index b03c87b8..fc31688b 100755
--- a/test/integration/targets/var_precedence/ansible-var-precedence-check.py
+++ b/test/integration/targets/var_precedence/ansible-var-precedence-check.py
@@ -14,6 +14,7 @@ import stat
import subprocess
import tempfile
import yaml
+from pprint import pprint
from optparse import OptionParser
from jinja2 import Environment
@@ -363,9 +364,9 @@ class VarTestMaker(object):
block_wrapper = [debug_task, test_task]
if 'include_params' in self.features:
- self.tasks.append(dict(name='including tasks', include_tasks='included_tasks.yml', vars=dict(findme='include_params')))
+ self.tasks.append(dict(name='including tasks', include='included_tasks.yml', vars=dict(findme='include_params')))
else:
- self.tasks.append(dict(include_tasks='included_tasks.yml'))
+ self.tasks.append(dict(include='included_tasks.yml'))
fname = os.path.join(TESTDIR, 'included_tasks.yml')
with open(fname, 'w') as f:
diff --git a/test/integration/targets/var_precedence/test_var_precedence.yml b/test/integration/targets/var_precedence/test_var_precedence.yml
index bba661db..58584bfb 100644
--- a/test/integration/targets/var_precedence/test_var_precedence.yml
+++ b/test/integration/targets/var_precedence/test_var_precedence.yml
@@ -1,18 +1,14 @@
---
- hosts: testhost
vars:
- ansible_hostname: "BAD!"
- vars_var: "vars_var"
- param_var: "BAD!"
- vars_files_var: "BAD!"
- extra_var_override_once_removed: "{{ extra_var_override }}"
- from_inventory_once_removed: "{{ inven_var | default('BAD!') }}"
+ - ansible_hostname: "BAD!"
+ - vars_var: "vars_var"
+ - param_var: "BAD!"
+ - vars_files_var: "BAD!"
+ - extra_var_override_once_removed: "{{ extra_var_override }}"
+ - from_inventory_once_removed: "{{ inven_var | default('BAD!') }}"
vars_files:
- vars/test_var_precedence.yml
- pre_tasks:
- - name: param vars should also override set_fact
- set_fact:
- param_var: "BAD!"
roles:
- { role: test_var_precedence, param_var: "param_var" }
tasks:
diff --git a/test/integration/targets/vars_files/aliases b/test/integration/targets/vars_files/aliases
deleted file mode 100644
index 8278ec8b..00000000
--- a/test/integration/targets/vars_files/aliases
+++ /dev/null
@@ -1,2 +0,0 @@
-shippable/posix/group3
-context/controller
diff --git a/test/integration/targets/vars_files/inventory b/test/integration/targets/vars_files/inventory
deleted file mode 100644
index 88dae267..00000000
--- a/test/integration/targets/vars_files/inventory
+++ /dev/null
@@ -1,3 +0,0 @@
-[testgroup]
-testhost foo=bar
-testhost2 foo=baz
diff --git a/test/integration/targets/vars_files/runme.sh b/test/integration/targets/vars_files/runme.sh
deleted file mode 100755
index 127536fa..00000000
--- a/test/integration/targets/vars_files/runme.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-set -eux
-
-ansible-playbook runme.yml -i inventory -v "$@"
diff --git a/test/integration/targets/vars_files/runme.yml b/test/integration/targets/vars_files/runme.yml
deleted file mode 100644
index 257f9294..00000000
--- a/test/integration/targets/vars_files/runme.yml
+++ /dev/null
@@ -1,22 +0,0 @@
----
-- hosts: testgroup
- gather_facts: no
- vars_files:
- - "vars/common.yml"
- -
- - "vars/{{ foo }}.yml"
- - "vars/defaults.yml"
- tasks:
- - import_tasks: validate.yml
-
-- hosts: testgroup
- gather_facts: no
- vars:
- _vars_files:
- - 'vars/{{ foo }}.yml'
- - 'vars/defaults.yml'
- vars_files:
- - "vars/common.yml"
- - "{{ lookup('first_found', _vars_files) }}"
- tasks:
- - import_tasks: validate.yml
diff --git a/test/integration/targets/vars_files/validate.yml b/test/integration/targets/vars_files/validate.yml
deleted file mode 100644
index dc889c54..00000000
--- a/test/integration/targets/vars_files/validate.yml
+++ /dev/null
@@ -1,11 +0,0 @@
-- assert:
- that:
- - common is true
-- assert:
- that:
- - is_bar is true
- when: inventory_hostname == 'testhost'
-- assert:
- that:
- - is_bar is false
- when: inventory_hostname == 'testhost2'
diff --git a/test/integration/targets/vars_files/vars/bar.yml b/test/integration/targets/vars_files/vars/bar.yml
deleted file mode 100644
index d6f3c5b1..00000000
--- a/test/integration/targets/vars_files/vars/bar.yml
+++ /dev/null
@@ -1 +0,0 @@
-is_bar: yes
diff --git a/test/integration/targets/vars_files/vars/common.yml b/test/integration/targets/vars_files/vars/common.yml
deleted file mode 100644
index a8cd8085..00000000
--- a/test/integration/targets/vars_files/vars/common.yml
+++ /dev/null
@@ -1 +0,0 @@
-common: yes
diff --git a/test/integration/targets/vars_files/vars/defaults.yml b/test/integration/targets/vars_files/vars/defaults.yml
deleted file mode 100644
index 4a7bfac8..00000000
--- a/test/integration/targets/vars_files/vars/defaults.yml
+++ /dev/null
@@ -1 +0,0 @@
-is_bar: no
diff --git a/test/integration/targets/wait_for/tasks/main.yml b/test/integration/targets/wait_for/tasks/main.yml
index 74b8e9aa..f81fd0f2 100644
--- a/test/integration/targets/wait_for/tasks/main.yml
+++ b/test/integration/targets/wait_for/tasks/main.yml
@@ -91,7 +91,7 @@
wait_for:
path: "{{remote_tmp_dir}}/wait_for_keyword"
search_regex: completed (?P<foo>\w+) ([0-9]+)
- timeout: 25
+ timeout: 5
register: waitfor
- name: verify test wait for keyword in file with match groups
@@ -114,15 +114,6 @@
path: "{{remote_tmp_dir}}/utf16.txt"
search_regex: completed
-- name: test non mmapable file
- wait_for:
- path: "/sys/class/net/lo/carrier"
- search_regex: "1"
- timeout: 30
- when:
- - ansible_facts['os_family'] not in ['FreeBSD', 'Darwin']
- - not (ansible_facts['os_family'] in ['RedHat', 'CentOS'] and ansible_facts['distribution_major_version'] is version('7', '<='))
-
- name: test wait for port timeout
wait_for:
port: 12121
diff --git a/test/integration/targets/win_exec_wrapper/action_plugins/test_rc_1.py b/test/integration/targets/win_exec_wrapper/action_plugins/test_rc_1.py
deleted file mode 100644
index 60cffde9..00000000
--- a/test/integration/targets/win_exec_wrapper/action_plugins/test_rc_1.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright: (c) 2023, Ansible Project
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-import json
-
-from ansible.plugins.action import ActionBase
-
-
-class ActionModule(ActionBase):
-
- def run(self, tmp=None, task_vars=None):
- super().run(tmp, task_vars)
- del tmp
-
- exec_command = self._connection.exec_command
-
- def patched_exec_command(*args, **kwargs):
- rc, stdout, stderr = exec_command(*args, **kwargs)
-
- new_stdout = json.dumps({
- "rc": rc,
- "stdout": stdout.decode(),
- "stderr": stderr.decode(),
- "failed": False,
- "changed": False,
- }).encode()
-
- return (0, new_stdout, b"")
-
- try:
- # This is done to capture the raw rc/stdio from the module exec
- self._connection.exec_command = patched_exec_command
- return self._execute_module(task_vars=task_vars)
- finally:
- self._connection.exec_command = exec_command
diff --git a/test/integration/targets/win_exec_wrapper/library/test_rc_1.ps1 b/test/integration/targets/win_exec_wrapper/library/test_rc_1.ps1
deleted file mode 100644
index a9879548..00000000
--- a/test/integration/targets/win_exec_wrapper/library/test_rc_1.ps1
+++ /dev/null
@@ -1,17 +0,0 @@
-#!powershell
-
-# This scenario needs to use Legacy, the same HadErrors won't be set if using
-# Ansible.Basic
-#Requires -Module Ansible.ModuleUtils.Legacy
-
-# This will set `$ps.HadErrors` in the running pipeline but with no error
-# record written. We are testing that it won't set the rc to 1 for this
-# scenario.
-try {
- Write-Error -Message err -ErrorAction Stop
-}
-catch {
- Exit-Json @{}
-}
-
-Fail-Json @{} "This should not be reached"
diff --git a/test/integration/targets/win_exec_wrapper/tasks/main.yml b/test/integration/targets/win_exec_wrapper/tasks/main.yml
index f1342c48..8fc54f7c 100644
--- a/test/integration/targets/win_exec_wrapper/tasks/main.yml
+++ b/test/integration/targets/win_exec_wrapper/tasks/main.yml
@@ -272,12 +272,3 @@
assert:
that:
- ps_log_count.stdout | int == 0
-
-- name: test module that sets HadErrors with no error records
- test_rc_1:
- register: module_had_errors
-
-- name: assert test module that sets HadErrors with no error records
- assert:
- that:
- - module_had_errors.rc == 0
diff --git a/test/integration/targets/win_fetch/tasks/main.yml b/test/integration/targets/win_fetch/tasks/main.yml
index 16a28761..b5818352 100644
--- a/test/integration/targets/win_fetch/tasks/main.yml
+++ b/test/integration/targets/win_fetch/tasks/main.yml
@@ -215,17 +215,3 @@
- fetch_special_file.checksum == '34d4150adc3347f1dd8ce19fdf65b74d971ab602'
- fetch_special_file.dest == host_output_dir + "/abc$not var'quote‘"
- fetch_special_file_actual.stdout == 'abc'
-
-- name: create file with wildcard characters
- raw: Set-Content -LiteralPath '{{ remote_tmp_dir }}\abc[].txt' -Value 'abc'
-
-- name: fetch file with wildcard characters
- fetch:
- src: '{{ remote_tmp_dir }}\abc[].txt'
- dest: '{{ host_output_dir }}/'
- register: fetch_wildcard_file_nofail
-
-- name: assert fetch file with wildcard characters
- assert:
- that:
- - "fetch_wildcard_file_nofail is not failed"
diff --git a/test/integration/targets/win_script/files/test_script_with_args.ps1 b/test/integration/targets/win_script/files/test_script_with_args.ps1
index 669c6410..01bb37f5 100644
--- a/test/integration/targets/win_script/files/test_script_with_args.ps1
+++ b/test/integration/targets/win_script/files/test_script_with_args.ps1
@@ -2,5 +2,5 @@
# passed to the script.
foreach ($i in $args) {
- Write-Host $i
+ Write-Host $i;
}
diff --git a/test/integration/targets/win_script/files/test_script_with_errors.ps1 b/test/integration/targets/win_script/files/test_script_with_errors.ps1
index bdf7ee48..56f97735 100644
--- a/test/integration/targets/win_script/files/test_script_with_errors.ps1
+++ b/test/integration/targets/win_script/files/test_script_with_errors.ps1
@@ -2,7 +2,7 @@
trap {
Write-Error -ErrorRecord $_
- exit 1
+ exit 1;
}
throw "Oh noes I has an error"
diff --git a/test/integration/targets/windows-minimal/library/win_ping_set_attr.ps1 b/test/integration/targets/windows-minimal/library/win_ping_set_attr.ps1
index d23bbc74..f1704964 100644
--- a/test/integration/targets/windows-minimal/library/win_ping_set_attr.ps1
+++ b/test/integration/targets/windows-minimal/library/win_ping_set_attr.ps1
@@ -16,16 +16,16 @@
# POWERSHELL_COMMON
-$params = Parse-Args $args $true
+$params = Parse-Args $args $true;
-$data = Get-Attr $params "data" "pong"
+$data = Get-Attr $params "data" "pong";
$result = @{
changed = $false
ping = "pong"
-}
+};
# Test that Set-Attr will replace an existing attribute.
Set-Attr $result "ping" $data
-Exit-Json $result
+Exit-Json $result;
diff --git a/test/integration/targets/windows-minimal/library/win_ping_strict_mode_error.ps1 b/test/integration/targets/windows-minimal/library/win_ping_strict_mode_error.ps1
index 09400d08..508174af 100644
--- a/test/integration/targets/windows-minimal/library/win_ping_strict_mode_error.ps1
+++ b/test/integration/targets/windows-minimal/library/win_ping_strict_mode_error.ps1
@@ -16,15 +16,15 @@
# POWERSHELL_COMMON
-$params = Parse-Args $args $true
+$params = Parse-Args $args $true;
$params.thisPropertyDoesNotExist
-$data = Get-Attr $params "data" "pong"
+$data = Get-Attr $params "data" "pong";
$result = @{
changed = $false
ping = $data
-}
+};
-Exit-Json $result
+Exit-Json $result;
diff --git a/test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1 b/test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1
index 6932d538..d4c9f07a 100644
--- a/test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1
+++ b/test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1
@@ -18,13 +18,13 @@
$blah = 'I can't quote my strings correctly.'
-$params = Parse-Args $args $true
+$params = Parse-Args $args $true;
-$data = Get-Attr $params "data" "pong"
+$data = Get-Attr $params "data" "pong";
$result = @{
changed = $false
ping = $data
-}
+};
-Exit-Json $result
+Exit-Json $result;
diff --git a/test/integration/targets/windows-minimal/library/win_ping_throw.ps1 b/test/integration/targets/windows-minimal/library/win_ping_throw.ps1
index 2fba2092..7306f4d2 100644
--- a/test/integration/targets/windows-minimal/library/win_ping_throw.ps1
+++ b/test/integration/targets/windows-minimal/library/win_ping_throw.ps1
@@ -18,13 +18,13 @@
throw
-$params = Parse-Args $args $true
+$params = Parse-Args $args $true;
-$data = Get-Attr $params "data" "pong"
+$data = Get-Attr $params "data" "pong";
$result = @{
changed = $false
ping = $data
-}
+};
-Exit-Json $result
+Exit-Json $result;
diff --git a/test/integration/targets/windows-minimal/library/win_ping_throw_string.ps1 b/test/integration/targets/windows-minimal/library/win_ping_throw_string.ps1
index 62de8263..09e3b7cb 100644
--- a/test/integration/targets/windows-minimal/library/win_ping_throw_string.ps1
+++ b/test/integration/targets/windows-minimal/library/win_ping_throw_string.ps1
@@ -18,13 +18,13 @@
throw "no ping for you"
-$params = Parse-Args $args $true
+$params = Parse-Args $args $true;
-$data = Get-Attr $params "data" "pong"
+$data = Get-Attr $params "data" "pong";
$result = @{
changed = $false
ping = $data
-}
+};
-Exit-Json $result
+Exit-Json $result;
diff --git a/test/integration/targets/yum/aliases b/test/integration/targets/yum/aliases
index b12f3547..1d491339 100644
--- a/test/integration/targets/yum/aliases
+++ b/test/integration/targets/yum/aliases
@@ -1,4 +1,5 @@
destructive
shippable/posix/group1
skip/freebsd
+skip/osx
skip/macos
diff --git a/test/integration/targets/yum/filter_plugins/filter_list_of_tuples_by_first_param.py b/test/integration/targets/yum/filter_plugins/filter_list_of_tuples_by_first_param.py
index 306ccd9a..27f38ce5 100644
--- a/test/integration/targets/yum/filter_plugins/filter_list_of_tuples_by_first_param.py
+++ b/test/integration/targets/yum/filter_plugins/filter_list_of_tuples_by_first_param.py
@@ -1,6 +1,8 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+from ansible.errors import AnsibleError, AnsibleFilterError
+
def filter_list_of_tuples_by_first_param(lst, search, startswith=False):
out = []
diff --git a/test/lib/ansible_test/_data/completion/docker.txt b/test/lib/ansible_test/_data/completion/docker.txt
index a863ecbf..9e1a9d5e 100644
--- a/test/lib/ansible_test/_data/completion/docker.txt
+++ b/test/lib/ansible_test/_data/completion/docker.txt
@@ -1,9 +1,9 @@
-base image=quay.io/ansible/base-test-container:5.10.0 python=3.12,2.7,3.6,3.7,3.8,3.9,3.10,3.11
-default image=quay.io/ansible/default-test-container:8.12.0 python=3.12,2.7,3.6,3.7,3.8,3.9,3.10,3.11 context=collection
-default image=quay.io/ansible/ansible-core-test-container:8.12.0 python=3.12,2.7,3.6,3.7,3.8,3.9,3.10,3.11 context=ansible-core
-alpine3 image=quay.io/ansible/alpine3-test-container:6.3.0 python=3.11 cgroup=none audit=none
-centos7 image=quay.io/ansible/centos7-test-container:6.3.0 python=2.7 cgroup=v1-only
-fedora38 image=quay.io/ansible/fedora38-test-container:6.3.0 python=3.11
-opensuse15 image=quay.io/ansible/opensuse15-test-container:6.3.0 python=3.6
-ubuntu2004 image=quay.io/ansible/ubuntu2004-test-container:6.3.0 python=3.8
-ubuntu2204 image=quay.io/ansible/ubuntu2204-test-container:6.3.0 python=3.10
+base image=quay.io/ansible/base-test-container:3.9.0 python=3.11,2.7,3.5,3.6,3.7,3.8,3.9,3.10
+default image=quay.io/ansible/default-test-container:6.13.0 python=3.11,2.7,3.5,3.6,3.7,3.8,3.9,3.10 context=collection
+default image=quay.io/ansible/ansible-core-test-container:6.13.0 python=3.11,2.7,3.5,3.6,3.7,3.8,3.9,3.10 context=ansible-core
+alpine3 image=quay.io/ansible/alpine3-test-container:4.8.0 python=3.10 cgroup=none audit=none
+centos7 image=quay.io/ansible/centos7-test-container:4.8.0 python=2.7 cgroup=v1-only
+fedora36 image=quay.io/ansible/fedora36-test-container:4.8.0 python=3.10
+opensuse15 image=quay.io/ansible/opensuse15-test-container:4.8.0 python=3.6
+ubuntu2004 image=quay.io/ansible/ubuntu2004-test-container:4.8.0 python=3.8
+ubuntu2204 image=quay.io/ansible/ubuntu2204-test-container:4.8.0 python=3.10
diff --git a/test/lib/ansible_test/_data/completion/remote.txt b/test/lib/ansible_test/_data/completion/remote.txt
index 06d4b5ef..9cb8dee8 100644
--- a/test/lib/ansible_test/_data/completion/remote.txt
+++ b/test/lib/ansible_test/_data/completion/remote.txt
@@ -1,14 +1,16 @@
-alpine/3.18 python=3.11 become=doas_sudo provider=aws arch=x86_64
+alpine/3.16 python=3.10 become=doas_sudo provider=aws arch=x86_64
alpine become=doas_sudo provider=aws arch=x86_64
-fedora/38 python=3.11 become=sudo provider=aws arch=x86_64
+fedora/36 python=3.10 become=sudo provider=aws arch=x86_64
fedora become=sudo provider=aws arch=x86_64
-freebsd/13.2 python=3.9,3.11 python_dir=/usr/local/bin become=su_sudo provider=aws arch=x86_64
+freebsd/12.4 python=3.9 python_dir=/usr/local/bin become=su_sudo provider=aws arch=x86_64
+freebsd/13.2 python=3.8,3.7,3.9,3.10 python_dir=/usr/local/bin become=su_sudo provider=aws arch=x86_64
freebsd python_dir=/usr/local/bin become=su_sudo provider=aws arch=x86_64
-macos/13.2 python=3.11 python_dir=/usr/local/bin become=sudo provider=parallels arch=x86_64
+macos/12.0 python=3.10 python_dir=/usr/local/bin become=sudo provider=parallels arch=x86_64
macos python_dir=/usr/local/bin become=sudo provider=parallels arch=x86_64
rhel/7.9 python=2.7 become=sudo provider=aws arch=x86_64
-rhel/8.8 python=3.6,3.11 become=sudo provider=aws arch=x86_64
-rhel/9.2 python=3.9,3.11 become=sudo provider=aws arch=x86_64
+rhel/8.6 python=3.6,3.8,3.9 become=sudo provider=aws arch=x86_64
+rhel/9.0 python=3.9 become=sudo provider=aws arch=x86_64
rhel become=sudo provider=aws arch=x86_64
+ubuntu/20.04 python=3.8,3.9 become=sudo provider=aws arch=x86_64
ubuntu/22.04 python=3.10 become=sudo provider=aws arch=x86_64
ubuntu become=sudo provider=aws arch=x86_64
diff --git a/test/lib/ansible_test/_data/completion/windows.txt b/test/lib/ansible_test/_data/completion/windows.txt
index 860a2e32..92b0d086 100644
--- a/test/lib/ansible_test/_data/completion/windows.txt
+++ b/test/lib/ansible_test/_data/completion/windows.txt
@@ -1,3 +1,5 @@
+windows/2012 provider=azure arch=x86_64
+windows/2012-R2 provider=azure arch=x86_64
windows/2016 provider=aws arch=x86_64
windows/2019 provider=aws arch=x86_64
windows/2022 provider=aws arch=x86_64
diff --git a/test/lib/ansible_test/_data/requirements/ansible-test.txt b/test/lib/ansible_test/_data/requirements/ansible-test.txt
index 17662f07..f7cb9c27 100644
--- a/test/lib/ansible_test/_data/requirements/ansible-test.txt
+++ b/test/lib/ansible_test/_data/requirements/ansible-test.txt
@@ -1,5 +1,4 @@
# The test-constraints sanity test verifies this file, but changes must be made manually to keep it in up-to-date.
virtualenv == 16.7.12 ; python_version < '3'
-coverage == 7.3.2 ; python_version >= '3.8' and python_version <= '3.12'
-coverage == 6.5.0 ; python_version >= '3.7' and python_version <= '3.7'
+coverage == 6.5.0 ; python_version >= '3.7' and python_version <= '3.11'
coverage == 4.5.4 ; python_version >= '2.6' and python_version <= '3.6'
diff --git a/test/lib/ansible_test/_data/requirements/ansible.txt b/test/lib/ansible_test/_data/requirements/ansible.txt
index 5eaf9f2c..20562c3e 100644
--- a/test/lib/ansible_test/_data/requirements/ansible.txt
+++ b/test/lib/ansible_test/_data/requirements/ansible.txt
@@ -12,4 +12,4 @@ packaging
# NOTE: Ref: https://github.com/sarugaku/resolvelib/issues/69
# NOTE: When updating the upper bound, also update the latest version used
# NOTE: in the ansible-galaxy-collection test suite.
-resolvelib >= 0.5.3, < 1.1.0 # dependency resolver used by ansible-galaxy
+resolvelib >= 0.5.3, < 0.9.0 # dependency resolver used by ansible-galaxy
diff --git a/test/lib/ansible_test/_data/requirements/constraints.txt b/test/lib/ansible_test/_data/requirements/constraints.txt
index dd837e3b..627f41df 100644
--- a/test/lib/ansible_test/_data/requirements/constraints.txt
+++ b/test/lib/ansible_test/_data/requirements/constraints.txt
@@ -5,6 +5,7 @@ pywinrm >= 0.3.0 ; python_version < '3.11' # message encryption support
pywinrm >= 0.4.3 ; python_version >= '3.11' # support for Python 3.11
pytest < 5.0.0, >= 4.5.0 ; python_version == '2.7' # pytest 5.0.0 and later will no longer support python 2.7
pytest >= 4.5.0 ; python_version > '2.7' # pytest 4.5.0 added support for --strict-markers
+pytest-forked >= 1.0.2 # pytest-forked before 1.0.2 does not work with pytest 4.2.0+
ntlm-auth >= 1.3.0 # message encryption support using cryptography
requests-ntlm >= 1.1.0 # message encryption support
requests-credssp >= 0.1.0 # message encryption support
@@ -12,4 +13,5 @@ pyparsing < 3.0.0 ; python_version < '3.5' # pyparsing 3 and later require pytho
mock >= 2.0.0 # needed for features backported from Python 3.6 unittest.mock (assert_called, assert_called_once...)
pytest-mock >= 1.4.0 # needed for mock_use_standalone_module pytest option
setuptools < 45 ; python_version == '2.7' # setuptools 45 and later require python 3.5 or later
+pyspnego >= 0.1.6 ; python_version >= '3.10' # bug in older releases breaks on Python 3.10
wheel < 0.38.0 ; python_version < '3.7' # wheel 0.38.0 and later require python 3.7 or later
diff --git a/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt b/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt
index 66801459..580f0641 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt
@@ -1,5 +1,8 @@
# edit "sanity.ansible-doc.in" and generate with: hacking/update-sanity-requirements.py --test ansible-doc
+# pre-build requirement: pyyaml == 6.0
+# pre-build constraint: Cython < 3.0
Jinja2==3.1.2
-MarkupSafe==2.1.3
-packaging==23.2
-PyYAML==6.0.1
+MarkupSafe==2.1.1
+packaging==21.3
+pyparsing==3.0.9
+PyYAML==6.0
diff --git a/test/lib/ansible_test/_data/requirements/sanity.changelog.in b/test/lib/ansible_test/_data/requirements/sanity.changelog.in
index 81d65ff8..7f231827 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.changelog.in
+++ b/test/lib/ansible_test/_data/requirements/sanity.changelog.in
@@ -1,2 +1,3 @@
-rstcheck < 6 # newer versions have too many dependencies
+rstcheck < 4 # match version used in other sanity tests
antsibull-changelog
+docutils < 0.18 # match version required by sphinx in the docs-build sanity test
diff --git a/test/lib/ansible_test/_data/requirements/sanity.changelog.txt b/test/lib/ansible_test/_data/requirements/sanity.changelog.txt
index d763bad2..1755a489 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.changelog.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.changelog.txt
@@ -1,9 +1,10 @@
# edit "sanity.changelog.in" and generate with: hacking/update-sanity-requirements.py --test changelog
-antsibull-changelog==0.23.0
-docutils==0.18.1
-packaging==23.2
-PyYAML==6.0.1
-rstcheck==5.0.0
+# pre-build requirement: pyyaml == 6.0
+# pre-build constraint: Cython < 3.0
+antsibull-changelog==0.16.0
+docutils==0.17.1
+packaging==21.3
+pyparsing==3.0.9
+PyYAML==6.0
+rstcheck==3.5.0
semantic-version==2.10.0
-types-docutils==0.18.3
-typing_extensions==4.8.0
diff --git a/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt b/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt
index 56366b77..93e147a5 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt
@@ -1,4 +1,6 @@
# edit "sanity.import.plugin.in" and generate with: hacking/update-sanity-requirements.py --test import.plugin
+# pre-build requirement: pyyaml == 6.0
+# pre-build constraint: Cython < 3.0
Jinja2==3.1.2
-MarkupSafe==2.1.3
-PyYAML==6.0.1
+MarkupSafe==2.1.1
+PyYAML==6.0
diff --git a/test/lib/ansible_test/_data/requirements/sanity.import.txt b/test/lib/ansible_test/_data/requirements/sanity.import.txt
index 4d9d4f53..4fda120d 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.import.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.import.txt
@@ -1,2 +1,4 @@
# edit "sanity.import.in" and generate with: hacking/update-sanity-requirements.py --test import
-PyYAML==6.0.1
+# pre-build requirement: pyyaml == 6.0
+# pre-build constraint: Cython < 3.0
+PyYAML==6.0
diff --git a/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt b/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt
index 17d60b6f..51cc1ca3 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt
@@ -1,2 +1,4 @@
# edit "sanity.integration-aliases.in" and generate with: hacking/update-sanity-requirements.py --test integration-aliases
-PyYAML==6.0.1
+# pre-build requirement: pyyaml == 6.0
+# pre-build constraint: Cython < 3.0
+PyYAML==6.0
diff --git a/test/lib/ansible_test/_data/requirements/sanity.mypy.in b/test/lib/ansible_test/_data/requirements/sanity.mypy.in
index f01ae948..98dead6c 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.mypy.in
+++ b/test/lib/ansible_test/_data/requirements/sanity.mypy.in
@@ -1,10 +1,10 @@
-mypy
-cryptography # type stubs not published separately
-jinja2 # type stubs not published separately
+mypy[python2] != 0.971 # regression in 0.971 (see https://github.com/python/mypy/pull/13223)
packaging # type stubs not published separately
types-backports
-types-paramiko
-types-pyyaml
+types-jinja2
+types-paramiko < 2.8.14 # newer versions drop support for Python 2.7
+types-pyyaml < 6 # PyYAML 6+ stubs do not support Python 2.7
+types-cryptography < 3.3.16 # newer versions drop support for Python 2.7
types-requests
types-setuptools
types-toml
diff --git a/test/lib/ansible_test/_data/requirements/sanity.mypy.txt b/test/lib/ansible_test/_data/requirements/sanity.mypy.txt
index f6a47fb0..9dffc8fb 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.mypy.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.mypy.txt
@@ -1,18 +1,20 @@
# edit "sanity.mypy.in" and generate with: hacking/update-sanity-requirements.py --test mypy
-cffi==1.16.0
-cryptography==41.0.4
-Jinja2==3.1.2
-MarkupSafe==2.1.3
-mypy==1.5.1
-mypy-extensions==1.0.0
-packaging==23.2
-pycparser==2.21
+mypy==0.961
+mypy-extensions==0.4.3
+packaging==21.3
+pyparsing==3.0.9
tomli==2.0.1
+typed-ast==1.5.4
types-backports==0.1.3
-types-paramiko==3.3.0.0
-types-PyYAML==6.0.12.12
-types-requests==2.31.0.7
-types-setuptools==68.2.0.0
-types-toml==0.10.8.7
-typing_extensions==4.8.0
-urllib3==2.0.6
+types-cryptography==3.3.15
+types-enum34==1.1.8
+types-ipaddress==1.0.8
+types-Jinja2==2.11.9
+types-MarkupSafe==1.1.10
+types-paramiko==2.8.13
+types-PyYAML==5.4.12
+types-requests==2.28.10
+types-setuptools==65.3.0
+types-toml==0.10.8
+types-urllib3==1.26.24
+typing_extensions==4.3.0
diff --git a/test/lib/ansible_test/_data/requirements/sanity.pep8.txt b/test/lib/ansible_test/_data/requirements/sanity.pep8.txt
index 1a36d4da..60d5784f 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.pep8.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.pep8.txt
@@ -1,2 +1,2 @@
# edit "sanity.pep8.in" and generate with: hacking/update-sanity-requirements.py --test pep8
-pycodestyle==2.11.0
+pycodestyle==2.9.1
diff --git a/test/lib/ansible_test/_data/requirements/sanity.pslint.ps1 b/test/lib/ansible_test/_data/requirements/sanity.pslint.ps1
index df36d61a..68545c9e 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.pslint.ps1
+++ b/test/lib/ansible_test/_data/requirements/sanity.pslint.ps1
@@ -28,10 +28,8 @@ Function Install-PSModule {
}
}
-# Versions changes should be made first in ansible-test which is then synced to
-# the default-test-container over time
Set-PSRepository -Name PSGallery -InstallationPolicy Trusted
-Install-PSModule -Name PSScriptAnalyzer -RequiredVersion 1.21.0
+Install-PSModule -Name PSScriptAnalyzer -RequiredVersion 1.20.0
if ($IsContainer) {
# PSScriptAnalyzer contain lots of json files for the UseCompatibleCommands check. We don't use this rule so by
diff --git a/test/lib/ansible_test/_data/requirements/sanity.pylint.in b/test/lib/ansible_test/_data/requirements/sanity.pylint.in
index ae189587..fde21f12 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.pylint.in
+++ b/test/lib/ansible_test/_data/requirements/sanity.pylint.in
@@ -1,2 +1,2 @@
-pylint
+pylint == 2.15.5 # currently vetted version
pyyaml # needed for collection_detail.py
diff --git a/test/lib/ansible_test/_data/requirements/sanity.pylint.txt b/test/lib/ansible_test/_data/requirements/sanity.pylint.txt
index c3144fe5..44d8b88c 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.pylint.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.pylint.txt
@@ -1,11 +1,15 @@
# edit "sanity.pylint.in" and generate with: hacking/update-sanity-requirements.py --test pylint
-astroid==3.0.0
-dill==0.3.7
-isort==5.12.0
+# pre-build requirement: pyyaml == 6.0
+# pre-build constraint: Cython < 3.0
+astroid==2.12.12
+dill==0.3.6
+isort==5.10.1
+lazy-object-proxy==1.7.1
mccabe==0.7.0
-platformdirs==3.11.0
-pylint==3.0.1
-PyYAML==6.0.1
+platformdirs==2.5.2
+pylint==2.15.5
+PyYAML==6.0
tomli==2.0.1
-tomlkit==0.12.1
-typing_extensions==4.8.0
+tomlkit==0.11.5
+typing_extensions==4.3.0
+wrapt==1.14.1
diff --git a/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt b/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt
index 4af9b95e..b2b70567 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt
@@ -1,3 +1,5 @@
# edit "sanity.runtime-metadata.in" and generate with: hacking/update-sanity-requirements.py --test runtime-metadata
-PyYAML==6.0.1
+# pre-build requirement: pyyaml == 6.0
+# pre-build constraint: Cython < 3.0
+PyYAML==6.0
voluptuous==0.13.1
diff --git a/test/lib/ansible_test/_data/requirements/sanity.validate-modules.in b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.in
index 78e116f5..efe94004 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.validate-modules.in
+++ b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.in
@@ -1,4 +1,3 @@
jinja2 # ansible-core requirement
pyyaml # needed for collection_detail.py
voluptuous
-antsibull-docs-parser==1.0.0
diff --git a/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt
index 4e24d64d..8a877bba 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt
@@ -1,6 +1,7 @@
# edit "sanity.validate-modules.in" and generate with: hacking/update-sanity-requirements.py --test validate-modules
-antsibull-docs-parser==1.0.0
+# pre-build requirement: pyyaml == 6.0
+# pre-build constraint: Cython < 3.0
Jinja2==3.1.2
-MarkupSafe==2.1.3
-PyYAML==6.0.1
+MarkupSafe==2.1.1
+PyYAML==6.0
voluptuous==0.13.1
diff --git a/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt b/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt
index bafd30b6..dd401113 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt
@@ -1,4 +1,6 @@
# edit "sanity.yamllint.in" and generate with: hacking/update-sanity-requirements.py --test yamllint
-pathspec==0.11.2
-PyYAML==6.0.1
-yamllint==1.32.0
+# pre-build requirement: pyyaml == 6.0
+# pre-build constraint: Cython < 3.0
+pathspec==0.10.1
+PyYAML==6.0
+yamllint==1.28.0
diff --git a/test/lib/ansible_test/_data/requirements/units.txt b/test/lib/ansible_test/_data/requirements/units.txt
index d723a65f..d2f56d35 100644
--- a/test/lib/ansible_test/_data/requirements/units.txt
+++ b/test/lib/ansible_test/_data/requirements/units.txt
@@ -2,4 +2,5 @@ mock
pytest
pytest-mock
pytest-xdist
+pytest-forked
pyyaml # required by the collection loader (only needed for collections)
diff --git a/test/lib/ansible_test/_internal/ci/azp.py b/test/lib/ansible_test/_internal/ci/azp.py
index ebf260b9..404f8056 100644
--- a/test/lib/ansible_test/_internal/ci/azp.py
+++ b/test/lib/ansible_test/_internal/ci/azp.py
@@ -70,7 +70,7 @@ class AzurePipelines(CIProvider):
os.environ['SYSTEM_JOBIDENTIFIER'],
)
except KeyError as ex:
- raise MissingEnvironmentVariable(name=ex.args[0]) from None
+ raise MissingEnvironmentVariable(name=ex.args[0])
return prefix
@@ -121,7 +121,7 @@ class AzurePipelines(CIProvider):
task_id=str(uuid.UUID(os.environ['SYSTEM_TASKINSTANCEID'])),
)
except KeyError as ex:
- raise MissingEnvironmentVariable(name=ex.args[0]) from None
+ raise MissingEnvironmentVariable(name=ex.args[0])
self.auth.sign_request(request)
@@ -154,7 +154,7 @@ class AzurePipelinesAuthHelper(CryptographyAuthHelper):
try:
agent_temp_directory = os.environ['AGENT_TEMPDIRECTORY']
except KeyError as ex:
- raise MissingEnvironmentVariable(name=ex.args[0]) from None
+ raise MissingEnvironmentVariable(name=ex.args[0])
# the temporary file cannot be deleted because we do not know when the agent has processed it
# placing the file in the agent's temp directory allows it to be picked up when the job is running in a container
@@ -181,7 +181,7 @@ class AzurePipelinesChanges:
self.source_branch_name = os.environ['BUILD_SOURCEBRANCHNAME']
self.pr_branch_name = os.environ.get('SYSTEM_PULLREQUEST_TARGETBRANCH')
except KeyError as ex:
- raise MissingEnvironmentVariable(name=ex.args[0]) from None
+ raise MissingEnvironmentVariable(name=ex.args[0])
if self.source_branch.startswith('refs/tags/'):
raise ChangeDetectionNotSupported('Change detection is not supported for tags.')
diff --git a/test/lib/ansible_test/_internal/cli/environments.py b/test/lib/ansible_test/_internal/cli/environments.py
index 7b1fd1c2..94cafae3 100644
--- a/test/lib/ansible_test/_internal/cli/environments.py
+++ b/test/lib/ansible_test/_internal/cli/environments.py
@@ -146,6 +146,12 @@ def add_global_options(
help='install command requirements',
)
+ global_parser.add_argument(
+ '--no-pip-check',
+ action='store_true',
+ help=argparse.SUPPRESS, # deprecated, kept for now (with a warning) for backwards compatibility
+ )
+
add_global_remote(global_parser, controller_mode)
add_global_docker(global_parser, controller_mode)
@@ -390,6 +396,7 @@ def add_global_docker(
"""Add global options for Docker."""
if controller_mode != ControllerMode.DELEGATED:
parser.set_defaults(
+ docker_no_pull=False,
docker_network=None,
docker_terminate=None,
prime_containers=False,
@@ -400,6 +407,12 @@ def add_global_docker(
return
parser.add_argument(
+ '--docker-no-pull',
+ action='store_true',
+ help=argparse.SUPPRESS, # deprecated, kept for now (with a warning) for backwards compatibility
+ )
+
+ parser.add_argument(
'--docker-network',
metavar='NET',
help='run using the specified network',
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py
index 64bb13b0..ad6cf86f 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py
@@ -57,9 +57,9 @@ def load_report(report: dict[str, t.Any]) -> tuple[list[str], Arcs, Lines]:
arc_data: dict[str, dict[str, int]] = report['arcs']
line_data: dict[str, dict[int, int]] = report['lines']
except KeyError as ex:
- raise ApplicationError('Document is missing key "%s".' % ex.args) from None
+ raise ApplicationError('Document is missing key "%s".' % ex.args)
except TypeError:
- raise ApplicationError('Document is type "%s" instead of "dict".' % type(report).__name__) from None
+ raise ApplicationError('Document is type "%s" instead of "dict".' % type(report).__name__)
arcs = dict((path, dict((parse_arc(arc), set(target_sets[index])) for arc, index in data.items())) for path, data in arc_data.items())
lines = dict((path, dict((int(line), set(target_sets[index])) for line, index in data.items())) for path, data in line_data.items())
@@ -72,12 +72,12 @@ def read_report(path: str) -> tuple[list[str], Arcs, Lines]:
try:
report = read_json_file(path)
except Exception as ex:
- raise ApplicationError('File "%s" is not valid JSON: %s' % (path, ex)) from None
+ raise ApplicationError('File "%s" is not valid JSON: %s' % (path, ex))
try:
return load_report(report)
except ApplicationError as ex:
- raise ApplicationError('File "%s" is not an aggregated coverage data file. %s' % (path, ex)) from None
+ raise ApplicationError('File "%s" is not an aggregated coverage data file. %s' % (path, ex))
def write_report(args: CoverageAnalyzeTargetsConfig, report: dict[str, t.Any], path: str) -> None:
diff --git a/test/lib/ansible_test/_internal/commands/coverage/combine.py b/test/lib/ansible_test/_internal/commands/coverage/combine.py
index fdeac838..12cb54e2 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/combine.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/combine.py
@@ -121,7 +121,7 @@ def _command_coverage_combine_python(args: CoverageCombineConfig, host_state: Ho
coverage_files = get_python_coverage_files()
def _default_stub_value(source_paths: list[str]) -> dict[str, set[tuple[int, int]]]:
- return {path: {(0, 0)} for path in source_paths}
+ return {path: set() for path in source_paths}
counter = 0
sources = _get_coverage_targets(args, walk_compile_targets)
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py b/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py
index 136c5331..e8020ca9 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py
@@ -8,6 +8,7 @@ from ....config import (
)
from ....containers import (
+ CleanupMode,
run_support_container,
)
@@ -21,6 +22,8 @@ from . import (
class ACMEProvider(CloudProvider):
"""ACME plugin. Sets up cloud resources for tests."""
+ DOCKER_SIMULATOR_NAME = 'acme-simulator'
+
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
@@ -48,18 +51,17 @@ class ACMEProvider(CloudProvider):
14000, # Pebble ACME CA
]
- descriptor = run_support_container(
+ run_support_container(
self.args,
self.platform,
self.image,
- 'acme-simulator',
+ self.DOCKER_SIMULATOR_NAME,
ports,
+ allow_existing=True,
+ cleanup=CleanupMode.YES,
)
- if not descriptor:
- return
-
- self._set_cloud_config('acme_host', descriptor.name)
+ self._set_cloud_config('acme_host', self.DOCKER_SIMULATOR_NAME)
def _setup_static(self) -> None:
raise NotImplementedError()
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py b/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py
index 8060804a..8588df7d 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py
@@ -21,6 +21,7 @@ from ....docker_util import (
)
from ....containers import (
+ CleanupMode,
run_support_container,
wait_for_file,
)
@@ -35,10 +36,12 @@ from . import (
class CsCloudProvider(CloudProvider):
"""CloudStack cloud provider plugin. Sets up cloud resources before delegation."""
+ DOCKER_SIMULATOR_NAME = 'cloudstack-sim'
+
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
- self.image = os.environ.get('ANSIBLE_CLOUDSTACK_CONTAINER', 'quay.io/ansible/cloudstack-test-container:1.6.1')
+ self.image = os.environ.get('ANSIBLE_CLOUDSTACK_CONTAINER', 'quay.io/ansible/cloudstack-test-container:1.4.0')
self.host = ''
self.port = 0
@@ -93,8 +96,10 @@ class CsCloudProvider(CloudProvider):
self.args,
self.platform,
self.image,
- 'cloudstack-sim',
+ self.DOCKER_SIMULATOR_NAME,
ports,
+ allow_existing=True,
+ cleanup=CleanupMode.YES,
)
if not descriptor:
@@ -102,7 +107,7 @@ class CsCloudProvider(CloudProvider):
# apply work-around for OverlayFS issue
# https://github.com/docker/for-linux/issues/72#issuecomment-319904698
- docker_exec(self.args, descriptor.name, ['find', '/var/lib/mysql', '-type', 'f', '-exec', 'touch', '{}', ';'], capture=True)
+ docker_exec(self.args, self.DOCKER_SIMULATOR_NAME, ['find', '/var/lib/mysql', '-type', 'f', '-exec', 'touch', '{}', ';'], capture=True)
if self.args.explain:
values = dict(
@@ -110,10 +115,10 @@ class CsCloudProvider(CloudProvider):
PORT=str(self.port),
)
else:
- credentials = self._get_credentials(descriptor.name)
+ credentials = self._get_credentials(self.DOCKER_SIMULATOR_NAME)
values = dict(
- HOST=descriptor.name,
+ HOST=self.DOCKER_SIMULATOR_NAME,
PORT=str(self.port),
KEY=credentials['apikey'],
SECRET=credentials['secretkey'],
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py b/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py
new file mode 100644
index 00000000..9e919cd8
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py
@@ -0,0 +1,96 @@
+"""Foreman plugin for integration tests."""
+from __future__ import annotations
+
+import os
+
+from ....config import (
+ IntegrationConfig,
+)
+
+from ....containers import (
+ CleanupMode,
+ run_support_container,
+)
+
+from . import (
+ CloudEnvironment,
+ CloudEnvironmentConfig,
+ CloudProvider,
+)
+
+
+class ForemanProvider(CloudProvider):
+ """Foreman plugin. Sets up Foreman stub server for tests."""
+
+ DOCKER_SIMULATOR_NAME = 'foreman-stub'
+
+ # Default image to run Foreman stub from.
+ #
+ # The simulator must be pinned to a specific version
+ # to guarantee CI passes with the version used.
+ #
+ # It's source source itself resides at:
+ # https://github.com/ansible/foreman-test-container
+ DOCKER_IMAGE = 'quay.io/ansible/foreman-test-container:1.4.0'
+
+ def __init__(self, args: IntegrationConfig) -> None:
+ super().__init__(args)
+
+ self.__container_from_env = os.environ.get('ANSIBLE_FRMNSIM_CONTAINER')
+ """
+ Overrides target container, might be used for development.
+
+ Use ANSIBLE_FRMNSIM_CONTAINER=whatever_you_want if you want
+ to use other image. Omit/empty otherwise.
+ """
+ self.image = self.__container_from_env or self.DOCKER_IMAGE
+
+ self.uses_docker = True
+
+ def setup(self) -> None:
+ """Setup cloud resource before delegation and reg cleanup callback."""
+ super().setup()
+
+ if self._use_static_config():
+ self._setup_static()
+ else:
+ self._setup_dynamic()
+
+ def _setup_dynamic(self) -> None:
+ """Spawn a Foreman stub within docker container."""
+ foreman_port = 8080
+
+ ports = [
+ foreman_port,
+ ]
+
+ run_support_container(
+ self.args,
+ self.platform,
+ self.image,
+ self.DOCKER_SIMULATOR_NAME,
+ ports,
+ allow_existing=True,
+ cleanup=CleanupMode.YES,
+ )
+
+ self._set_cloud_config('FOREMAN_HOST', self.DOCKER_SIMULATOR_NAME)
+ self._set_cloud_config('FOREMAN_PORT', str(foreman_port))
+
+ def _setup_static(self) -> None:
+ raise NotImplementedError()
+
+
+class ForemanEnvironment(CloudEnvironment):
+ """Foreman environment plugin. Updates integration test environment after delegation."""
+
+ def get_environment_config(self) -> CloudEnvironmentConfig:
+ """Return environment configuration for use in the test environment after delegation."""
+ env_vars = dict(
+ FOREMAN_HOST=str(self._get_cloud_config('FOREMAN_HOST')),
+ FOREMAN_PORT=str(self._get_cloud_config('FOREMAN_PORT')),
+ )
+
+ return CloudEnvironmentConfig(
+ env_vars=env_vars,
+ )
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py b/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py
index f7053c8b..1391cd84 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py
@@ -10,21 +10,12 @@ from ....config import (
from ....docker_util import (
docker_cp_to,
- docker_exec,
)
from ....containers import (
run_support_container,
)
-from ....encoding import (
- to_text,
-)
-
-from ....util import (
- display,
-)
-
from . import (
CloudEnvironment,
CloudEnvironmentConfig,
@@ -32,59 +23,53 @@ from . import (
)
-GALAXY_HOST_NAME = 'galaxy-pulp'
-SETTINGS = {
- 'PULP_CONTENT_ORIGIN': f'http://{GALAXY_HOST_NAME}',
- 'PULP_ANSIBLE_API_HOSTNAME': f'http://{GALAXY_HOST_NAME}',
- 'PULP_GALAXY_API_PATH_PREFIX': '/api/galaxy/',
- # These paths are unique to the container image which has an nginx location for /pulp/content to route
- # requests to the content backend
- 'PULP_ANSIBLE_CONTENT_HOSTNAME': f'http://{GALAXY_HOST_NAME}/pulp/content/api/galaxy/v3/artifacts/collections/',
- 'PULP_CONTENT_PATH_PREFIX': '/pulp/content/api/galaxy/v3/artifacts/collections/',
- 'PULP_GALAXY_AUTHENTICATION_CLASSES': [
- 'rest_framework.authentication.SessionAuthentication',
- 'rest_framework.authentication.TokenAuthentication',
- 'rest_framework.authentication.BasicAuthentication',
- 'django.contrib.auth.backends.ModelBackend',
- ],
- # This should probably be false see https://issues.redhat.com/browse/AAH-2328
- 'PULP_GALAXY_REQUIRE_CONTENT_APPROVAL': 'true',
- 'PULP_GALAXY_DEPLOYMENT_MODE': 'standalone',
- 'PULP_GALAXY_AUTO_SIGN_COLLECTIONS': 'false',
- 'PULP_GALAXY_COLLECTION_SIGNING_SERVICE': 'ansible-default',
- 'PULP_RH_ENTITLEMENT_REQUIRED': 'insights',
- 'PULP_TOKEN_AUTH_DISABLED': 'false',
- 'PULP_TOKEN_SERVER': f'http://{GALAXY_HOST_NAME}/token/',
- 'PULP_TOKEN_SIGNATURE_ALGORITHM': 'ES256',
- 'PULP_PUBLIC_KEY_PATH': '/src/galaxy_ng/dev/common/container_auth_public_key.pem',
- 'PULP_PRIVATE_KEY_PATH': '/src/galaxy_ng/dev/common/container_auth_private_key.pem',
- 'PULP_ANALYTICS': 'false',
- 'PULP_GALAXY_ENABLE_UNAUTHENTICATED_COLLECTION_ACCESS': 'true',
- 'PULP_GALAXY_ENABLE_UNAUTHENTICATED_COLLECTION_DOWNLOAD': 'true',
- 'PULP_GALAXY_ENABLE_LEGACY_ROLES': 'true',
- 'PULP_GALAXY_FEATURE_FLAGS__execution_environments': 'false',
- 'PULP_SOCIAL_AUTH_LOGIN_REDIRECT_URL': '/',
- 'PULP_GALAXY_FEATURE_FLAGS__ai_deny_index': 'true',
- 'PULP_DEFAULT_ADMIN_PASSWORD': 'password'
+# We add BasicAuthentication, to make the tasks that deal with
+# direct API access easier to deal with across galaxy_ng and pulp
+SETTINGS = b'''
+CONTENT_ORIGIN = 'http://ansible-ci-pulp:80'
+ANSIBLE_API_HOSTNAME = 'http://ansible-ci-pulp:80'
+ANSIBLE_CONTENT_HOSTNAME = 'http://ansible-ci-pulp:80/pulp/content'
+TOKEN_AUTH_DISABLED = True
+GALAXY_REQUIRE_CONTENT_APPROVAL = False
+GALAXY_AUTHENTICATION_CLASSES = [
+ "rest_framework.authentication.SessionAuthentication",
+ "rest_framework.authentication.TokenAuthentication",
+ "rest_framework.authentication.BasicAuthentication",
+]
+'''
+
+SET_ADMIN_PASSWORD = b'''#!/usr/bin/execlineb -S0
+foreground {
+ redirfd -w 1 /dev/null
+ redirfd -w 2 /dev/null
+ export DJANGO_SETTINGS_MODULE pulpcore.app.settings
+ export PULP_CONTENT_ORIGIN localhost
+ s6-setuidgid postgres
+ if { /usr/local/bin/django-admin reset-admin-password --password password }
+ if { /usr/local/bin/pulpcore-manager create-group system:partner-engineers --users admin }
+}
+'''
+
+# There are 2 overrides here:
+# 1. Change the gunicorn bind address from 127.0.0.1 to 0.0.0.0 now that Galaxy NG does not allow us to access the
+# Pulp API through it.
+# 2. Grant access allowing us to DELETE a namespace in Galaxy NG. This is as CI deletes and recreates repos and
+# distributions in Pulp which now breaks the namespace in Galaxy NG. Recreating it is the "simple" fix to get it
+# working again.
+# These may not be needed in the future, especially if 1 becomes configurable by an env var but for now they must be
+# done.
+OVERRIDES = b'''#!/usr/bin/execlineb -S0
+foreground {
+ sed -i "0,/\\"127.0.0.1:24817\\"/s//\\"0.0.0.0:24817\\"/" /etc/services.d/pulpcore-api/run
}
-
-GALAXY_IMPORTER = b'''
-[galaxy-importer]
-ansible_local_tmp=~/.ansible/tmp
-ansible_test_local_image=false
-check_required_tags=false
-check_runtime_yaml=false
-check_changelog=false
-infra_osd=false
-local_image_docker=false
-log_level_main=INFO
-require_v1_or_greater=false
-run_ansible_doc=false
-run_ansible_lint=false
-run_ansible_test=false
-run_flake8=false
-'''.strip()
+# This sed calls changes the first occurrence to "allow" which is conveniently the delete operation for a namespace.
+# https://github.com/ansible/galaxy_ng/blob/master/galaxy_ng/app/access_control/statements/standalone.py#L9-L11.
+backtick NG_PREFIX { python -c "import galaxy_ng; print(galaxy_ng.__path__[0], end='')" }
+importas ng_prefix NG_PREFIX
+foreground {
+ sed -i "0,/\\"effect\\": \\"deny\\"/s//\\"effect\\": \\"allow\\"/" ${ng_prefix}/app/access_control/statements/standalone.py
+}'''
class GalaxyProvider(CloudProvider):
@@ -96,9 +81,13 @@ class GalaxyProvider(CloudProvider):
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
- self.image = os.environ.get(
+ # Cannot use the latest container image as either galaxy_ng 4.2.0rc2 or pulp 0.5.0 has sporatic issues with
+ # dropping published collections in CI. Try running the tests multiple times when updating. Will also need to
+ # comment out the cache tests in 'test/integration/targets/ansible-galaxy-collection/tasks/install.yml' when
+ # the newer update is available.
+ self.pulp = os.environ.get(
'ANSIBLE_PULP_CONTAINER',
- 'quay.io/pulp/galaxy:4.7.1'
+ 'quay.io/ansible/pulp-galaxy-ng:b79a7be64eff'
)
self.uses_docker = True
@@ -107,46 +96,48 @@ class GalaxyProvider(CloudProvider):
"""Setup cloud resource before delegation and reg cleanup callback."""
super().setup()
- with tempfile.NamedTemporaryFile(mode='w+') as env_fd:
- settings = '\n'.join(
- f'{key}={value}' for key, value in SETTINGS.items()
- )
- env_fd.write(settings)
- env_fd.flush()
- display.info(f'>>> galaxy_ng Configuration\n{settings}', verbosity=3)
- descriptor = run_support_container(
- self.args,
- self.platform,
- self.image,
- GALAXY_HOST_NAME,
- [
- 80,
- ],
- aliases=[
- GALAXY_HOST_NAME,
- ],
- start=True,
- options=[
- '--env-file', env_fd.name,
- ],
- )
+ galaxy_port = 80
+ pulp_host = 'ansible-ci-pulp'
+ pulp_port = 24817
+
+ ports = [
+ galaxy_port,
+ pulp_port,
+ ]
+
+ # Create the container, don't run it, we need to inject configs before it starts
+ descriptor = run_support_container(
+ self.args,
+ self.platform,
+ self.pulp,
+ pulp_host,
+ ports,
+ start=False,
+ allow_existing=True,
+ )
if not descriptor:
return
- injected_files = [
- ('/etc/galaxy-importer/galaxy-importer.cfg', GALAXY_IMPORTER, 'galaxy-importer'),
- ]
- for path, content, friendly_name in injected_files:
- with tempfile.NamedTemporaryFile() as temp_fd:
- temp_fd.write(content)
- temp_fd.flush()
- display.info(f'>>> {friendly_name} Configuration\n{to_text(content)}', verbosity=3)
- docker_exec(self.args, descriptor.container_id, ['mkdir', '-p', os.path.dirname(path)], True)
- docker_cp_to(self.args, descriptor.container_id, temp_fd.name, path)
- docker_exec(self.args, descriptor.container_id, ['chown', 'pulp:pulp', path], True)
-
- self._set_cloud_config('PULP_HOST', GALAXY_HOST_NAME)
+ if not descriptor.running:
+ pulp_id = descriptor.container_id
+
+ injected_files = {
+ '/etc/pulp/settings.py': SETTINGS,
+ '/etc/cont-init.d/111-postgres': SET_ADMIN_PASSWORD,
+ '/etc/cont-init.d/000-ansible-test-overrides': OVERRIDES,
+ }
+ for path, content in injected_files.items():
+ with tempfile.NamedTemporaryFile() as temp_fd:
+ temp_fd.write(content)
+ temp_fd.flush()
+ docker_cp_to(self.args, pulp_id, temp_fd.name, path)
+
+ descriptor.start(self.args)
+
+ self._set_cloud_config('PULP_HOST', pulp_host)
+ self._set_cloud_config('PULP_PORT', str(pulp_port))
+ self._set_cloud_config('GALAXY_PORT', str(galaxy_port))
self._set_cloud_config('PULP_USER', 'admin')
self._set_cloud_config('PULP_PASSWORD', 'password')
@@ -159,19 +150,21 @@ class GalaxyEnvironment(CloudEnvironment):
pulp_user = str(self._get_cloud_config('PULP_USER'))
pulp_password = str(self._get_cloud_config('PULP_PASSWORD'))
pulp_host = self._get_cloud_config('PULP_HOST')
+ galaxy_port = self._get_cloud_config('GALAXY_PORT')
+ pulp_port = self._get_cloud_config('PULP_PORT')
return CloudEnvironmentConfig(
ansible_vars=dict(
pulp_user=pulp_user,
pulp_password=pulp_password,
- pulp_api=f'http://{pulp_host}',
- pulp_server=f'http://{pulp_host}/pulp_ansible/galaxy/',
- galaxy_ng_server=f'http://{pulp_host}/api/galaxy/',
+ pulp_api='http://%s:%s' % (pulp_host, pulp_port),
+ pulp_server='http://%s:%s/pulp_ansible/galaxy/' % (pulp_host, pulp_port),
+ galaxy_ng_server='http://%s:%s/api/galaxy/' % (pulp_host, galaxy_port),
),
env_vars=dict(
PULP_USER=pulp_user,
PULP_PASSWORD=pulp_password,
- PULP_SERVER=f'http://{pulp_host}/pulp_ansible/galaxy/api/',
- GALAXY_NG_SERVER=f'http://{pulp_host}/api/galaxy/',
+ PULP_SERVER='http://%s:%s/pulp_ansible/galaxy/api/' % (pulp_host, pulp_port),
+ GALAXY_NG_SERVER='http://%s:%s/api/galaxy/' % (pulp_host, galaxy_port),
),
)
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py b/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py
index b3cf2d49..85065d6f 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py
@@ -13,6 +13,7 @@ from ....config import (
)
from ....containers import (
+ CleanupMode,
run_support_container,
)
@@ -61,6 +62,8 @@ class HttptesterProvider(CloudProvider):
'http-test-container',
ports,
aliases=aliases,
+ allow_existing=True,
+ cleanup=CleanupMode.YES,
env={
KRB5_PASSWORD_ENV: generate_password(),
},
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py b/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py
index 62dd1558..5bed8340 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py
@@ -8,6 +8,7 @@ from ....config import (
)
from ....containers import (
+ CleanupMode,
run_support_container,
)
@@ -21,6 +22,8 @@ from . import (
class NiosProvider(CloudProvider):
"""Nios plugin. Sets up NIOS mock server for tests."""
+ DOCKER_SIMULATOR_NAME = 'nios-simulator'
+
# Default image to run the nios simulator.
#
# The simulator must be pinned to a specific version
@@ -28,7 +31,7 @@ class NiosProvider(CloudProvider):
#
# It's source source itself resides at:
# https://github.com/ansible/nios-test-container
- DOCKER_IMAGE = 'quay.io/ansible/nios-test-container:2.0.0'
+ DOCKER_IMAGE = 'quay.io/ansible/nios-test-container:1.4.0'
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
@@ -62,18 +65,17 @@ class NiosProvider(CloudProvider):
nios_port,
]
- descriptor = run_support_container(
+ run_support_container(
self.args,
self.platform,
self.image,
- 'nios-simulator',
+ self.DOCKER_SIMULATOR_NAME,
ports,
+ allow_existing=True,
+ cleanup=CleanupMode.YES,
)
- if not descriptor:
- return
-
- self._set_cloud_config('NIOS_HOST', descriptor.name)
+ self._set_cloud_config('NIOS_HOST', self.DOCKER_SIMULATOR_NAME)
def _setup_static(self) -> None:
raise NotImplementedError()
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py b/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py
index 6e8a5e4f..ddd434a8 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py
@@ -16,6 +16,7 @@ from ....config import (
)
from ....containers import (
+ CleanupMode,
run_support_container,
wait_for_file,
)
@@ -30,6 +31,8 @@ from . import (
class OpenShiftCloudProvider(CloudProvider):
"""OpenShift cloud provider plugin. Sets up cloud resources before delegation."""
+ DOCKER_CONTAINER_NAME = 'openshift-origin'
+
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args, config_extension='.kubeconfig')
@@ -71,8 +74,10 @@ class OpenShiftCloudProvider(CloudProvider):
self.args,
self.platform,
self.image,
- 'openshift-origin',
+ self.DOCKER_CONTAINER_NAME,
ports,
+ allow_existing=True,
+ cleanup=CleanupMode.YES,
cmd=cmd,
)
@@ -82,7 +87,7 @@ class OpenShiftCloudProvider(CloudProvider):
if self.args.explain:
config = '# Unknown'
else:
- config = self._get_config(descriptor.name, 'https://%s:%s/' % (descriptor.name, port))
+ config = self._get_config(self.DOCKER_CONTAINER_NAME, 'https://%s:%s/' % (self.DOCKER_CONTAINER_NAME, port))
self._write_config(config)
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py b/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py
index b0ff7fe3..242b0204 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py
@@ -2,6 +2,7 @@
from __future__ import annotations
import configparser
+import os
from ....util import (
ApplicationError,
@@ -12,6 +13,11 @@ from ....config import (
IntegrationConfig,
)
+from ....containers import (
+ CleanupMode,
+ run_support_container,
+)
+
from . import (
CloudEnvironment,
CloudEnvironmentConfig,
@@ -22,16 +28,66 @@ from . import (
class VcenterProvider(CloudProvider):
"""VMware vcenter/esx plugin. Sets up cloud resources for tests."""
+ DOCKER_SIMULATOR_NAME = 'vcenter-simulator'
+
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
- self.uses_config = True
+ # The simulator must be pinned to a specific version to guarantee CI passes with the version used.
+ if os.environ.get('ANSIBLE_VCSIM_CONTAINER'):
+ self.image = os.environ.get('ANSIBLE_VCSIM_CONTAINER')
+ else:
+ self.image = 'quay.io/ansible/vcenter-test-container:1.7.0'
+
+ # VMware tests can be run on govcsim or BYO with a static config file.
+ # The simulator is the default if no config is provided.
+ self.vmware_test_platform = os.environ.get('VMWARE_TEST_PLATFORM', 'govcsim')
+
+ if self.vmware_test_platform == 'govcsim':
+ self.uses_docker = True
+ self.uses_config = False
+ elif self.vmware_test_platform == 'static':
+ self.uses_docker = False
+ self.uses_config = True
def setup(self) -> None:
"""Setup the cloud resource before delegation and register a cleanup callback."""
super().setup()
- if not self._use_static_config():
+ self._set_cloud_config('vmware_test_platform', self.vmware_test_platform)
+
+ if self.vmware_test_platform == 'govcsim':
+ self._setup_dynamic_simulator()
+ self.managed = True
+ elif self.vmware_test_platform == 'static':
+ self._use_static_config()
+ self._setup_static()
+ else:
+ raise ApplicationError('Unknown vmware_test_platform: %s' % self.vmware_test_platform)
+
+ def _setup_dynamic_simulator(self) -> None:
+ """Create a vcenter simulator using docker."""
+ ports = [
+ 443,
+ 8080,
+ 8989,
+ 5000, # control port for flask app in simulator
+ ]
+
+ run_support_container(
+ self.args,
+ self.platform,
+ self.image,
+ self.DOCKER_SIMULATOR_NAME,
+ ports,
+ allow_existing=True,
+ cleanup=CleanupMode.YES,
+ )
+
+ self._set_cloud_config('vcenter_hostname', self.DOCKER_SIMULATOR_NAME)
+
+ def _setup_static(self) -> None:
+ if not os.path.exists(self.config_static_path):
raise ApplicationError('Configuration file does not exist: %s' % self.config_static_path)
@@ -40,21 +96,37 @@ class VcenterEnvironment(CloudEnvironment):
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
- # We may be in a container, so we cannot just reach VMWARE_TEST_PLATFORM,
- # We do a try/except instead
- parser = configparser.ConfigParser()
- parser.read(self.config_path) # static
-
- ansible_vars = dict(
- resource_prefix=self.resource_prefix,
- )
- ansible_vars.update(dict(parser.items('DEFAULT', raw=True)))
+ try:
+ # We may be in a container, so we cannot just reach VMWARE_TEST_PLATFORM,
+ # We do a try/except instead
+ parser = configparser.ConfigParser()
+ parser.read(self.config_path) # static
+
+ env_vars = {}
+ ansible_vars = dict(
+ resource_prefix=self.resource_prefix,
+ )
+ ansible_vars.update(dict(parser.items('DEFAULT', raw=True)))
+ except KeyError: # govcsim
+ env_vars = dict(
+ VCENTER_HOSTNAME=str(self._get_cloud_config('vcenter_hostname')),
+ VCENTER_USERNAME='user',
+ VCENTER_PASSWORD='pass',
+ )
+
+ ansible_vars = dict(
+ vcsim=str(self._get_cloud_config('vcenter_hostname')),
+ vcenter_hostname=str(self._get_cloud_config('vcenter_hostname')),
+ vcenter_username='user',
+ vcenter_password='pass',
+ )
for key, value in ansible_vars.items():
if key.endswith('_password'):
display.sensitive.add(value)
return CloudEnvironmentConfig(
+ env_vars=env_vars,
ansible_vars=ansible_vars,
module_defaults={
'group/vmware': {
diff --git a/test/lib/ansible_test/_internal/commands/sanity/__init__.py b/test/lib/ansible_test/_internal/commands/sanity/__init__.py
index 9b675e4a..0bc68a21 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/__init__.py
@@ -127,13 +127,9 @@ TARGET_SANITY_ROOT = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'sanity')
# NOTE: must match ansible.constants.DOCUMENTABLE_PLUGINS, but with 'module' replaced by 'modules'!
DOCUMENTABLE_PLUGINS = (
- 'become', 'cache', 'callback', 'cliconf', 'connection', 'filter', 'httpapi', 'inventory',
- 'lookup', 'netconf', 'modules', 'shell', 'strategy', 'test', 'vars',
+ 'become', 'cache', 'callback', 'cliconf', 'connection', 'httpapi', 'inventory', 'lookup', 'netconf', 'modules', 'shell', 'strategy', 'vars'
)
-# Plugin types that can have multiple plugins per file, and where filenames not always correspond to plugin names
-MULTI_FILE_PLUGINS = ('filter', 'test', )
-
created_venvs: list[str] = []
@@ -264,7 +260,7 @@ def command_sanity(args: SanityConfig) -> None:
virtualenv_python = create_sanity_virtualenv(args, test_profile.python, test.name)
if virtualenv_python:
- virtualenv_yaml = args.explain or check_sanity_virtualenv_yaml(virtualenv_python)
+ virtualenv_yaml = check_sanity_virtualenv_yaml(virtualenv_python)
if test.require_libyaml and not virtualenv_yaml:
result = SanitySkipped(test.name)
@@ -879,7 +875,6 @@ class SanityCodeSmellTest(SanitySingleVersion):
self.__include_directories: bool = self.config.get('include_directories')
self.__include_symlinks: bool = self.config.get('include_symlinks')
self.__py2_compat: bool = self.config.get('py2_compat', False)
- self.__error_code: str | None = self.config.get('error_code', None)
else:
self.output = None
self.extensions = []
@@ -895,7 +890,6 @@ class SanityCodeSmellTest(SanitySingleVersion):
self.__include_directories = False
self.__include_symlinks = False
self.__py2_compat = False
- self.__error_code = None
if self.no_targets:
mutually_exclusive = (
@@ -915,11 +909,6 @@ class SanityCodeSmellTest(SanitySingleVersion):
raise ApplicationError('Sanity test "%s" option "no_targets" is mutually exclusive with options: %s' % (self.name, ', '.join(problems)))
@property
- def error_code(self) -> t.Optional[str]:
- """Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
- return self.__error_code
-
- @property
def all_targets(self) -> bool:
"""True if test targets will not be filtered using includes, excludes, requires or changes. Mutually exclusive with no_targets."""
return self.__all_targets
@@ -1003,8 +992,6 @@ class SanityCodeSmellTest(SanitySingleVersion):
pattern = '^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<message>.*)$'
elif self.output == 'path-message':
pattern = '^(?P<path>[^:]*): (?P<message>.*)$'
- elif self.output == 'path-line-column-code-message':
- pattern = '^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<code>[^:]*): (?P<message>.*)$'
else:
raise ApplicationError('Unsupported output type: %s' % self.output)
@@ -1034,7 +1021,6 @@ class SanityCodeSmellTest(SanitySingleVersion):
path=m['path'],
line=int(m.get('line', 0)),
column=int(m.get('column', 0)),
- code=m.get('code'),
) for m in matches]
messages = settings.process_errors(messages, paths)
@@ -1180,23 +1166,20 @@ def create_sanity_virtualenv(
run_pip(args, virtualenv_python, commands, None) # create_sanity_virtualenv()
- if not args.explain:
- write_text_file(meta_install, virtualenv_install)
+ write_text_file(meta_install, virtualenv_install)
# false positive: pylint: disable=no-member
if any(isinstance(command, PipInstall) and command.has_package('pyyaml') for command in commands):
- virtualenv_yaml = yamlcheck(virtualenv_python, args.explain)
+ virtualenv_yaml = yamlcheck(virtualenv_python)
else:
virtualenv_yaml = None
- if not args.explain:
- write_json_file(meta_yaml, virtualenv_yaml)
+ write_json_file(meta_yaml, virtualenv_yaml)
created_venvs.append(f'{label}-{python.version}')
- if not args.explain:
- # touch the marker to keep track of when the virtualenv was last used
- pathlib.Path(virtualenv_marker).touch()
+ # touch the marker to keep track of when the virtualenv was last used
+ pathlib.Path(virtualenv_marker).touch()
return virtualenv_python
diff --git a/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py b/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py
index ff035ef9..04080f60 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py
@@ -2,13 +2,11 @@
from __future__ import annotations
import collections
-import json
import os
import re
from . import (
DOCUMENTABLE_PLUGINS,
- MULTI_FILE_PLUGINS,
SanitySingleVersion,
SanityFailure,
SanitySuccess,
@@ -87,44 +85,6 @@ class AnsibleDocTest(SanitySingleVersion):
doc_targets[plugin_type].append(plugin_fqcn)
env = ansible_environment(args, color=False)
-
- for doc_type in MULTI_FILE_PLUGINS:
- if doc_targets.get(doc_type):
- # List plugins
- cmd = ['ansible-doc', '-l', '--json', '-t', doc_type]
- prefix = data_context().content.prefix if data_context().content.collection else 'ansible.builtin.'
- cmd.append(prefix[:-1])
- try:
- stdout, stderr = intercept_python(args, python, cmd, env, capture=True)
- status = 0
- except SubprocessError as ex:
- stdout = ex.stdout
- stderr = ex.stderr
- status = ex.status
-
- if status:
- summary = '%s' % SubprocessError(cmd=cmd, status=status, stderr=stderr)
- return SanityFailure(self.name, summary=summary)
-
- if stdout:
- display.info(stdout.strip(), verbosity=3)
-
- if stderr:
- summary = 'Output on stderr from ansible-doc is considered an error.\n\n%s' % SubprocessError(cmd, stderr=stderr)
- return SanityFailure(self.name, summary=summary)
-
- if args.explain:
- continue
-
- plugin_list_json = json.loads(stdout)
- doc_targets[doc_type] = []
- for plugin_name, plugin_value in sorted(plugin_list_json.items()):
- if plugin_value != 'UNDOCUMENTED':
- doc_targets[doc_type].append(plugin_name)
-
- if not doc_targets[doc_type]:
- del doc_targets[doc_type]
-
error_messages: list[SanityMessage] = []
for doc_type in sorted(doc_targets):
diff --git a/test/lib/ansible_test/_internal/commands/sanity/import.py b/test/lib/ansible_test/_internal/commands/sanity/import.py
index 36f52415..b8083324 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/import.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/import.py
@@ -127,26 +127,20 @@ class ImportTest(SanityMultipleVersion):
('plugin', _get_module_test(False)),
):
if import_type == 'plugin' and python.version in REMOTE_ONLY_PYTHON_VERSIONS:
- # Plugins are not supported on remote-only Python versions.
- # However, the collection loader is used by the import sanity test and unit tests on remote-only Python versions.
- # To support this, it is tested as a plugin, but using a venv which installs no requirements.
- # Filtering of paths relevant to the Python version tested has already been performed by filter_remote_targets.
- venv_type = 'empty'
- else:
- venv_type = import_type
+ continue
data = '\n'.join([path for path in paths if test(path)])
if not data and not args.prime_venvs:
continue
- virtualenv_python = create_sanity_virtualenv(args, python, f'{self.name}.{venv_type}', coverage=args.coverage, minimize=True)
+ virtualenv_python = create_sanity_virtualenv(args, python, f'{self.name}.{import_type}', coverage=args.coverage, minimize=True)
if not virtualenv_python:
display.warning(f'Skipping sanity test "{self.name}" on Python {python.version} due to missing virtual environment support.')
return SanitySkipped(self.name, python.version)
- virtualenv_yaml = args.explain or check_sanity_virtualenv_yaml(virtualenv_python)
+ virtualenv_yaml = check_sanity_virtualenv_yaml(virtualenv_python)
if virtualenv_yaml is False:
display.warning(f'Sanity test "{self.name}" ({import_type}) on Python {python.version} may be slow due to missing libyaml support in PyYAML.')
diff --git a/test/lib/ansible_test/_internal/commands/sanity/mypy.py b/test/lib/ansible_test/_internal/commands/sanity/mypy.py
index c93474e8..57ce1277 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/mypy.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/mypy.py
@@ -19,7 +19,6 @@ from . import (
from ...constants import (
CONTROLLER_PYTHON_VERSIONS,
REMOTE_ONLY_PYTHON_VERSIONS,
- SUPPORTED_PYTHON_VERSIONS,
)
from ...test import (
@@ -37,7 +36,6 @@ from ...util import (
ANSIBLE_TEST_CONTROLLER_ROOT,
ApplicationError,
is_subdir,
- str_to_version,
)
from ...util_common import (
@@ -73,19 +71,9 @@ class MypyTest(SanityMultipleVersion):
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' and target.path not in self.vendored_paths and (
target.path.startswith('lib/ansible/') or target.path.startswith('test/lib/ansible_test/_internal/')
- or target.path.startswith('packaging/')
or target.path.startswith('test/lib/ansible_test/_util/target/sanity/import/'))]
@property
- def supported_python_versions(self) -> t.Optional[tuple[str, ...]]:
- """A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
- # mypy 0.981 dropped support for Python 2
- # see: https://mypy-lang.blogspot.com/2022/09/mypy-0981-released.html
- # cryptography dropped support for Python 3.5 in version 3.3
- # see: https://cryptography.io/en/latest/changelog/#v3-3
- return tuple(version for version in SUPPORTED_PYTHON_VERSIONS if str_to_version(version) >= (3, 6))
-
- @property
def error_code(self) -> t.Optional[str]:
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'ansible-test'
@@ -117,7 +105,6 @@ class MypyTest(SanityMultipleVersion):
MyPyContext('ansible-test', ['test/lib/ansible_test/_internal/'], controller_python_versions),
MyPyContext('ansible-core', ['lib/ansible/'], controller_python_versions),
MyPyContext('modules', ['lib/ansible/modules/', 'lib/ansible/module_utils/'], remote_only_python_versions),
- MyPyContext('packaging', ['packaging/'], controller_python_versions),
)
unfiltered_messages: list[SanityMessage] = []
@@ -170,9 +157,6 @@ class MypyTest(SanityMultipleVersion):
# However, it will also report issues on those files, which is not the desired behavior.
messages = [message for message in messages if message.path in paths_set]
- if args.explain:
- return SanitySuccess(self.name, python_version=python.version)
-
results = settings.process_errors(messages, paths)
if results:
@@ -255,7 +239,7 @@ class MypyTest(SanityMultipleVersion):
pattern = r'^(?P<path>[^:]*):(?P<line>[0-9]+):((?P<column>[0-9]+):)? (?P<level>[^:]+): (?P<message>.*)$'
- parsed = parse_to_list_of_dict(pattern, stdout or '')
+ parsed = parse_to_list_of_dict(pattern, stdout)
messages = [SanityMessage(
level=r['level'],
diff --git a/test/lib/ansible_test/_internal/commands/sanity/pylint.py b/test/lib/ansible_test/_internal/commands/sanity/pylint.py
index 54b1952f..c089f834 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/pylint.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/pylint.py
@@ -18,11 +18,6 @@ from . import (
SANITY_ROOT,
)
-from ...constants import (
- CONTROLLER_PYTHON_VERSIONS,
- REMOTE_ONLY_PYTHON_VERSIONS,
-)
-
from ...io import (
make_dirs,
)
@@ -43,7 +38,6 @@ from ...util import (
from ...util_common import (
run_command,
- process_scoped_temporary_file,
)
from ...ansible_util import (
@@ -87,8 +81,6 @@ class PylintTest(SanitySingleVersion):
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')]
def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult:
- min_python_version_db_path = self.create_min_python_db(args, targets.targets)
-
plugin_dir = os.path.join(SANITY_ROOT, 'pylint', 'plugins')
plugin_names = sorted(p[0] for p in [
os.path.splitext(p) for p in os.listdir(plugin_dir)] if p[1] == '.py' and p[0] != '__init__')
@@ -171,7 +163,7 @@ class PylintTest(SanitySingleVersion):
continue
context_start = datetime.datetime.now(tz=datetime.timezone.utc)
- messages += self.pylint(args, context, context_paths, plugin_dir, plugin_names, python, collection_detail, min_python_version_db_path)
+ messages += self.pylint(args, context, context_paths, plugin_dir, plugin_names, python, collection_detail)
context_end = datetime.datetime.now(tz=datetime.timezone.utc)
context_times.append('%s: %d (%s)' % (context, len(context_paths), context_end - context_start))
@@ -202,22 +194,6 @@ class PylintTest(SanitySingleVersion):
return SanitySuccess(self.name)
- def create_min_python_db(self, args: SanityConfig, targets: t.Iterable[TestTarget]) -> str:
- """Create a database of target file paths and their minimum required Python version, returning the path to the database."""
- target_paths = set(target.path for target in self.filter_remote_targets(list(targets)))
- controller_min_version = CONTROLLER_PYTHON_VERSIONS[0]
- target_min_version = REMOTE_ONLY_PYTHON_VERSIONS[0]
- min_python_versions = {
- os.path.abspath(target.path): target_min_version if target.path in target_paths else controller_min_version for target in targets
- }
-
- min_python_version_db_path = process_scoped_temporary_file(args)
-
- with open(min_python_version_db_path, 'w') as database_file:
- json.dump(min_python_versions, database_file)
-
- return min_python_version_db_path
-
@staticmethod
def pylint(
args: SanityConfig,
@@ -227,7 +203,6 @@ class PylintTest(SanitySingleVersion):
plugin_names: list[str],
python: PythonConfig,
collection_detail: CollectionDetail,
- min_python_version_db_path: str,
) -> list[dict[str, str]]:
"""Run pylint using the config specified by the context on the specified paths."""
rcfile = os.path.join(SANITY_ROOT, 'pylint', 'config', context.split('/')[0] + '.cfg')
@@ -259,7 +234,6 @@ class PylintTest(SanitySingleVersion):
'--rcfile', rcfile,
'--output-format', 'json',
'--load-plugins', ','.join(sorted(load_plugins)),
- '--min-python-version-db', min_python_version_db_path,
] + paths # fmt: skip
if data_context().content.collection:
diff --git a/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py b/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py
index e29b5dec..3153bc99 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py
@@ -10,7 +10,6 @@ import typing as t
from . import (
DOCUMENTABLE_PLUGINS,
- MULTI_FILE_PLUGINS,
SanitySingleVersion,
SanityMessage,
SanityFailure,
@@ -129,10 +128,6 @@ class ValidateModulesTest(SanitySingleVersion):
for target in targets.include:
target_per_type[self.get_plugin_type(target)].append(target)
- # Remove plugins that cannot be associated to a single file (test and filter plugins).
- for plugin_type in MULTI_FILE_PLUGINS:
- target_per_type.pop(plugin_type, None)
-
cmd = [
python.path,
os.path.join(SANITY_ROOT, 'validate-modules', 'validate.py'),
diff --git a/test/lib/ansible_test/_internal/commands/units/__init__.py b/test/lib/ansible_test/_internal/commands/units/__init__.py
index 71ce5c4d..7d192e1b 100644
--- a/test/lib/ansible_test/_internal/commands/units/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/units/__init__.py
@@ -253,6 +253,7 @@ def command_units(args: UnitsConfig) -> None:
cmd = [
'pytest',
+ '--forked',
'-r', 'a',
'-n', str(args.num_workers) if args.num_workers else 'auto',
'--color', 'yes' if args.color else 'no',
@@ -261,7 +262,6 @@ def command_units(args: UnitsConfig) -> None:
'--junit-xml', os.path.join(ResultType.JUNIT.path, 'python%s-%s-units.xml' % (python.version, test_context)),
'--strict-markers', # added in pytest 4.5.0
'--rootdir', data_context().content.root,
- '--confcutdir', data_context().content.root, # avoid permission errors when running from an installed version and using pytest >= 8
] # fmt:skip
if not data_context().content.collection:
@@ -275,8 +275,6 @@ def command_units(args: UnitsConfig) -> None:
if data_context().content.collection:
plugins.append('ansible_pytest_collections')
- plugins.append('ansible_forked')
-
if plugins:
env['PYTHONPATH'] += ':%s' % os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'pytest/plugins')
env['PYTEST_PLUGINS'] = ','.join(plugins)
diff --git a/test/lib/ansible_test/_internal/config.py b/test/lib/ansible_test/_internal/config.py
index dbc137b5..4e697933 100644
--- a/test/lib/ansible_test/_internal/config.py
+++ b/test/lib/ansible_test/_internal/config.py
@@ -8,6 +8,7 @@ import sys
import typing as t
from .util import (
+ display,
verify_sys_executable,
version_to_str,
type_guard,
@@ -135,6 +136,12 @@ class EnvironmentConfig(CommonConfig):
data_context().register_payload_callback(host_callback)
+ if args.docker_no_pull:
+ display.warning('The --docker-no-pull option is deprecated and has no effect. It will be removed in a future version of ansible-test.')
+
+ if args.no_pip_check:
+ display.warning('The --no-pip-check option is deprecated and has no effect. It will be removed in a future version of ansible-test.')
+
@property
def controller(self) -> ControllerHostConfig:
"""Host configuration for the controller."""
diff --git a/test/lib/ansible_test/_internal/containers.py b/test/lib/ansible_test/_internal/containers.py
index 92a40a48..869f1fba 100644
--- a/test/lib/ansible_test/_internal/containers.py
+++ b/test/lib/ansible_test/_internal/containers.py
@@ -3,6 +3,7 @@ from __future__ import annotations
import collections.abc as c
import contextlib
+import enum
import json
import random
import time
@@ -45,7 +46,6 @@ from .docker_util import (
get_docker_container_id,
get_docker_host_ip,
get_podman_host_ip,
- get_session_container_name,
require_docker,
detect_host_properties,
)
@@ -101,6 +101,14 @@ class HostType:
managed = 'managed'
+class CleanupMode(enum.Enum):
+ """How container cleanup should be handled."""
+
+ YES = enum.auto()
+ NO = enum.auto()
+ INFO = enum.auto()
+
+
def run_support_container(
args: EnvironmentConfig,
context: str,
@@ -109,7 +117,8 @@ def run_support_container(
ports: list[int],
aliases: t.Optional[list[str]] = None,
start: bool = True,
- cleanup: bool = True,
+ allow_existing: bool = False,
+ cleanup: t.Optional[CleanupMode] = None,
cmd: t.Optional[list[str]] = None,
env: t.Optional[dict[str, str]] = None,
options: t.Optional[list[str]] = None,
@@ -119,8 +128,6 @@ def run_support_container(
Start a container used to support tests, but not run them.
Containers created this way will be accessible from tests.
"""
- name = get_session_container_name(args, name)
-
if args.prime_containers:
docker_pull(args, image)
return None
@@ -158,13 +165,46 @@ def run_support_container(
options.extend(['--ulimit', 'nofile=%s' % max_open_files])
+ support_container_id = None
+
+ if allow_existing:
+ try:
+ container = docker_inspect(args, name)
+ except ContainerNotFoundError:
+ container = None
+
+ if container:
+ support_container_id = container.id
+
+ if not container.running:
+ display.info('Ignoring existing "%s" container which is not running.' % name, verbosity=1)
+ support_container_id = None
+ elif not container.image:
+ display.info('Ignoring existing "%s" container which has the wrong image.' % name, verbosity=1)
+ support_container_id = None
+ elif publish_ports and not all(port and len(port) == 1 for port in [container.get_tcp_port(port) for port in ports]):
+ display.info('Ignoring existing "%s" container which does not have the required published ports.' % name, verbosity=1)
+ support_container_id = None
+
+ if not support_container_id:
+ docker_rm(args, name)
+
if args.dev_systemd_debug:
options.extend(('--env', 'SYSTEMD_LOG_LEVEL=debug'))
- display.info('Starting new "%s" container.' % name)
- docker_pull(args, image)
- support_container_id = run_container(args, image, name, options, create_only=not start, cmd=cmd)
- running = start
+ if support_container_id:
+ display.info('Using existing "%s" container.' % name)
+ running = True
+ existing = True
+ else:
+ display.info('Starting new "%s" container.' % name)
+ docker_pull(args, image)
+ support_container_id = run_container(args, image, name, options, create_only=not start, cmd=cmd)
+ running = start
+ existing = False
+
+ if cleanup is None:
+ cleanup = CleanupMode.INFO if existing else CleanupMode.YES
descriptor = ContainerDescriptor(
image,
@@ -175,6 +215,7 @@ def run_support_container(
aliases,
publish_ports,
running,
+ existing,
cleanup,
env,
)
@@ -653,7 +694,8 @@ class ContainerDescriptor:
aliases: list[str],
publish_ports: bool,
running: bool,
- cleanup: bool,
+ existing: bool,
+ cleanup: CleanupMode,
env: t.Optional[dict[str, str]],
) -> None:
self.image = image
@@ -664,6 +706,7 @@ class ContainerDescriptor:
self.aliases = aliases
self.publish_ports = publish_ports
self.running = running
+ self.existing = existing
self.cleanup = cleanup
self.env = env
self.details: t.Optional[SupportContainer] = None
@@ -762,8 +805,10 @@ def wait_for_file(
def cleanup_containers(args: EnvironmentConfig) -> None:
"""Clean up containers."""
for container in support_containers.values():
- if container.cleanup:
- docker_rm(args, container.name)
+ if container.cleanup == CleanupMode.YES:
+ docker_rm(args, container.container_id)
+ elif container.cleanup == CleanupMode.INFO:
+ display.notice(f'Remember to run `{require_docker().command} rm -f {container.name}` when finished testing.')
def create_hosts_entries(context: dict[str, ContainerAccess]) -> list[str]:
diff --git a/test/lib/ansible_test/_internal/core_ci.py b/test/lib/ansible_test/_internal/core_ci.py
index 77e6753f..6e44b3d9 100644
--- a/test/lib/ansible_test/_internal/core_ci.py
+++ b/test/lib/ansible_test/_internal/core_ci.py
@@ -28,6 +28,7 @@ from .io import (
from .util import (
ApplicationError,
display,
+ ANSIBLE_TEST_TARGET_ROOT,
mutex,
)
@@ -291,12 +292,18 @@ class AnsibleCoreCI:
"""Start instance."""
display.info(f'Initializing new {self.label} instance using: {self._uri}', verbosity=1)
+ if self.platform == 'windows':
+ winrm_config = read_text_file(os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'ConfigureRemotingForAnsible.ps1'))
+ else:
+ winrm_config = None
+
data = dict(
config=dict(
platform=self.platform,
version=self.version,
architecture=self.arch,
public_key=self.ssh_key.pub_contents,
+ winrm_config=winrm_config,
)
)
diff --git a/test/lib/ansible_test/_internal/coverage_util.py b/test/lib/ansible_test/_internal/coverage_util.py
index 30176236..ae640249 100644
--- a/test/lib/ansible_test/_internal/coverage_util.py
+++ b/test/lib/ansible_test/_internal/coverage_util.py
@@ -69,8 +69,7 @@ class CoverageVersion:
COVERAGE_VERSIONS = (
# IMPORTANT: Keep this in sync with the ansible-test.txt requirements file.
- CoverageVersion('7.3.2', 7, (3, 8), (3, 12)),
- CoverageVersion('6.5.0', 7, (3, 7), (3, 7)),
+ CoverageVersion('6.5.0', 7, (3, 7), (3, 11)),
CoverageVersion('4.5.4', 0, (2, 6), (3, 6)),
)
"""
@@ -251,9 +250,7 @@ def generate_ansible_coverage_config() -> str:
coverage_config = '''
[run]
branch = True
-concurrency =
- multiprocessing
- thread
+concurrency = multiprocessing
parallel = True
omit =
@@ -274,9 +271,7 @@ def generate_collection_coverage_config(args: TestConfig) -> str:
coverage_config = '''
[run]
branch = True
-concurrency =
- multiprocessing
- thread
+concurrency = multiprocessing
parallel = True
disable_warnings =
no-data-collected
diff --git a/test/lib/ansible_test/_internal/delegation.py b/test/lib/ansible_test/_internal/delegation.py
index 84896830..f9e54455 100644
--- a/test/lib/ansible_test/_internal/delegation.py
+++ b/test/lib/ansible_test/_internal/delegation.py
@@ -328,6 +328,7 @@ def filter_options(
) -> c.Iterable[str]:
"""Return an iterable that filters out unwanted CLI options and injects new ones as requested."""
replace: list[tuple[str, int, t.Optional[t.Union[bool, str, list[str]]]]] = [
+ ('--docker-no-pull', 0, False),
('--truncate', 1, str(args.truncate)),
('--color', 1, 'yes' if args.color else 'no'),
('--redact', 0, False),
diff --git a/test/lib/ansible_test/_internal/diff.py b/test/lib/ansible_test/_internal/diff.py
index 5a94aafc..2ddc2ff9 100644
--- a/test/lib/ansible_test/_internal/diff.py
+++ b/test/lib/ansible_test/_internal/diff.py
@@ -143,7 +143,7 @@ class DiffParser:
traceback.format_exc(),
)
- raise ApplicationError(message.strip()) from None
+ raise ApplicationError(message.strip())
self.previous_line = self.line
diff --git a/test/lib/ansible_test/_internal/docker_util.py b/test/lib/ansible_test/_internal/docker_util.py
index 52b9691e..06f383b5 100644
--- a/test/lib/ansible_test/_internal/docker_util.py
+++ b/test/lib/ansible_test/_internal/docker_util.py
@@ -300,7 +300,7 @@ def detect_host_properties(args: CommonConfig) -> ContainerHostProperties:
options = ['--volume', '/sys/fs/cgroup:/probe:ro']
cmd = ['sh', '-c', ' && echo "-" && '.join(multi_line_commands)]
- stdout = run_utility_container(args, 'ansible-test-probe', cmd, options)[0]
+ stdout = run_utility_container(args, f'ansible-test-probe-{args.session_name}', cmd, options)[0]
if args.explain:
return ContainerHostProperties(
@@ -336,7 +336,7 @@ def detect_host_properties(args: CommonConfig) -> ContainerHostProperties:
cmd = ['sh', '-c', 'ulimit -Hn']
try:
- stdout = run_utility_container(args, 'ansible-test-ulimit', cmd, options)[0]
+ stdout = run_utility_container(args, f'ansible-test-ulimit-{args.session_name}', cmd, options)[0]
except SubprocessError as ex:
display.warning(str(ex))
else:
@@ -402,11 +402,6 @@ def detect_host_properties(args: CommonConfig) -> ContainerHostProperties:
return properties
-def get_session_container_name(args: CommonConfig, name: str) -> str:
- """Return the given container name with the current test session name applied to it."""
- return f'{name}-{args.session_name}'
-
-
def run_utility_container(
args: CommonConfig,
name: str,
@@ -415,8 +410,6 @@ def run_utility_container(
data: t.Optional[str] = None,
) -> tuple[t.Optional[str], t.Optional[str]]:
"""Run the specified command using the ansible-test utility container, returning stdout and stderr."""
- name = get_session_container_name(args, name)
-
options = options + [
'--name', name,
'--rm',
diff --git a/test/lib/ansible_test/_internal/host_profiles.py b/test/lib/ansible_test/_internal/host_profiles.py
index 09812456..a51eb693 100644
--- a/test/lib/ansible_test/_internal/host_profiles.py
+++ b/test/lib/ansible_test/_internal/host_profiles.py
@@ -99,6 +99,7 @@ from .ansible_util import (
)
from .containers import (
+ CleanupMode,
HostType,
get_container_database,
run_support_container,
@@ -446,7 +447,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do
@property
def label(self) -> str:
"""Label to apply to resources related to this profile."""
- return f'{"controller" if self.controller else "target"}'
+ return f'{"controller" if self.controller else "target"}-{self.args.session_name}'
def provision(self) -> None:
"""Provision the host before delegation."""
@@ -461,7 +462,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do
ports=[22],
publish_ports=not self.controller, # connections to the controller over SSH are not required
options=init_config.options,
- cleanup=False,
+ cleanup=CleanupMode.NO,
cmd=self.build_init_command(init_config, init_probe),
)
@@ -806,7 +807,6 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do
- Avoid hanging indefinitely or for an unreasonably long time.
NOTE: The container must have a POSIX-compliant default shell "sh" with a non-builtin "sleep" command.
- The "sleep" command is invoked through "env" to avoid using a shell builtin "sleep" (if present).
"""
command = ''
@@ -814,7 +814,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do
command += f'{init_config.command} && '
if sleep or init_config.command_privileged:
- command += 'env sleep 60 ; '
+ command += 'sleep 60 ; '
if not command:
return None
@@ -838,7 +838,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do
"""Check the cgroup v1 systemd hierarchy to verify it is writeable for our container."""
probe_script = (read_text_file(os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'check_systemd_cgroup_v1.sh'))
.replace('@MARKER@', self.MARKER)
- .replace('@LABEL@', f'{self.label}-{self.args.session_name}'))
+ .replace('@LABEL@', self.label))
cmd = ['sh']
@@ -853,7 +853,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do
def create_systemd_cgroup_v1(self) -> str:
"""Create a unique ansible-test cgroup in the v1 systemd hierarchy and return its path."""
- self.cgroup_path = f'/sys/fs/cgroup/systemd/ansible-test-{self.label}-{self.args.session_name}'
+ self.cgroup_path = f'/sys/fs/cgroup/systemd/ansible-test-{self.label}'
# Privileged mode is required to create the cgroup directories on some hosts, such as Fedora 36 and RHEL 9.0.
# The mkdir command will fail with "Permission denied" otherwise.
diff --git a/test/lib/ansible_test/_internal/http.py b/test/lib/ansible_test/_internal/http.py
index 66afc60d..8b4154bf 100644
--- a/test/lib/ansible_test/_internal/http.py
+++ b/test/lib/ansible_test/_internal/http.py
@@ -126,7 +126,7 @@ class HttpResponse:
try:
return json.loads(self.response)
except ValueError:
- raise HttpError(self.status_code, 'Cannot parse response to %s %s as JSON:\n%s' % (self.method, self.url, self.response)) from None
+ raise HttpError(self.status_code, 'Cannot parse response to %s %s as JSON:\n%s' % (self.method, self.url, self.response))
class HttpError(ApplicationError):
diff --git a/test/lib/ansible_test/_internal/junit_xml.py b/test/lib/ansible_test/_internal/junit_xml.py
index 8c4dba01..76c8878b 100644
--- a/test/lib/ansible_test/_internal/junit_xml.py
+++ b/test/lib/ansible_test/_internal/junit_xml.py
@@ -15,7 +15,7 @@ from xml.dom import minidom
from xml.etree import ElementTree as ET
-@dataclasses.dataclass
+@dataclasses.dataclass # type: ignore[misc] # https://github.com/python/mypy/issues/5374
class TestResult(metaclass=abc.ABCMeta):
"""Base class for the result of a test case."""
diff --git a/test/lib/ansible_test/_internal/pypi_proxy.py b/test/lib/ansible_test/_internal/pypi_proxy.py
index d119efa1..5380dd9b 100644
--- a/test/lib/ansible_test/_internal/pypi_proxy.py
+++ b/test/lib/ansible_test/_internal/pypi_proxy.py
@@ -76,7 +76,7 @@ def run_pypi_proxy(args: EnvironmentConfig, targets_use_pypi: bool) -> None:
args=args,
context='__pypi_proxy__',
image=image,
- name='pypi-test-container',
+ name=f'pypi-test-container-{args.session_name}',
ports=[port],
)
diff --git a/test/lib/ansible_test/_internal/python_requirements.py b/test/lib/ansible_test/_internal/python_requirements.py
index 81006e41..506b802c 100644
--- a/test/lib/ansible_test/_internal/python_requirements.py
+++ b/test/lib/ansible_test/_internal/python_requirements.py
@@ -297,7 +297,7 @@ def run_pip(
connection.run([python.path], data=script, capture=True)
except SubprocessError as ex:
if 'pip is unavailable:' in ex.stdout + ex.stderr:
- raise PipUnavailableError(python) from None
+ raise PipUnavailableError(python)
raise
@@ -441,8 +441,8 @@ def get_venv_packages(python: PythonConfig) -> dict[str, str]:
# See: https://github.com/ansible/base-test-container/blob/main/files/installer.py
default_packages = dict(
- pip='23.1.2',
- setuptools='67.7.2',
+ pip='21.3.1',
+ setuptools='60.8.2',
wheel='0.37.1',
)
@@ -452,6 +452,11 @@ def get_venv_packages(python: PythonConfig) -> dict[str, str]:
setuptools='44.1.1', # 45.0.0 requires Python 3.5+
wheel=None,
),
+ '3.5': dict(
+ pip='20.3.4', # 21.0 requires Python 3.6+
+ setuptools='50.3.2', # 51.0.0 requires Python 3.6+
+ wheel=None,
+ ),
'3.6': dict(
pip='21.3.1', # 22.0 requires Python 3.7+
setuptools='59.6.0', # 59.7.0 requires Python 3.7+
diff --git a/test/lib/ansible_test/_internal/util.py b/test/lib/ansible_test/_internal/util.py
index 394c2632..1859be5b 100644
--- a/test/lib/ansible_test/_internal/util.py
+++ b/test/lib/ansible_test/_internal/util.py
@@ -31,6 +31,11 @@ from termios import TIOCGWINSZ
# CAUTION: Avoid third-party imports in this module whenever possible.
# Any third-party imports occurring here will result in an error if they are vendored by ansible-core.
+try:
+ from typing_extensions import TypeGuard # TypeGuard was added in Python 3.10
+except ImportError:
+ TypeGuard = None
+
from .locale_util import (
LOCALE_WARNING,
CONFIGURED_LOCALE,
@@ -431,7 +436,7 @@ def raw_command(
display.info(f'{description}: {escaped_cmd}', verbosity=cmd_verbosity, truncate=True)
display.info('Working directory: %s' % cwd, verbosity=2)
- program = find_executable(cmd[0], cwd=cwd, path=env['PATH'], required=False)
+ program = find_executable(cmd[0], cwd=cwd, path=env['PATH'], required='warning')
if program:
display.info('Program found: %s' % program, verbosity=2)
@@ -1150,7 +1155,7 @@ def verify_sys_executable(path: str) -> t.Optional[str]:
return expected_executable
-def type_guard(sequence: c.Sequence[t.Any], guard_type: t.Type[C]) -> t.TypeGuard[c.Sequence[C]]:
+def type_guard(sequence: c.Sequence[t.Any], guard_type: t.Type[C]) -> TypeGuard[c.Sequence[C]]:
"""
Raises an exception if any item in the given sequence does not match the specified guard type.
Use with assert so that type checkers are aware of the type guard.
diff --git a/test/lib/ansible_test/_internal/util_common.py b/test/lib/ansible_test/_internal/util_common.py
index 77a6165c..222366e4 100644
--- a/test/lib/ansible_test/_internal/util_common.py
+++ b/test/lib/ansible_test/_internal/util_common.py
@@ -88,7 +88,7 @@ class ExitHandler:
try:
func(*args, **kwargs)
- except BaseException as ex: # pylint: disable=broad-exception-caught
+ except BaseException as ex: # pylint: disable=broad-except
last_exception = ex
display.fatal(f'Exit handler failed: {ex}')
@@ -498,14 +498,9 @@ def run_command(
)
-def yamlcheck(python: PythonConfig, explain: bool = False) -> t.Optional[bool]:
+def yamlcheck(python: PythonConfig) -> t.Optional[bool]:
"""Return True if PyYAML has libyaml support, False if it does not and None if it was not found."""
- stdout = raw_command([python.path, os.path.join(ANSIBLE_TEST_TARGET_TOOLS_ROOT, 'yamlcheck.py')], capture=True, explain=explain)[0]
-
- if explain:
- return None
-
- result = json.loads(stdout)
+ result = json.loads(raw_command([python.path, os.path.join(ANSIBLE_TEST_TARGET_TOOLS_ROOT, 'yamlcheck.py')], capture=True)[0])
if not result['yaml']:
return None
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.json
index da4a0b10..88858aeb 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.json
@@ -2,10 +2,6 @@
"extensions": [
".py"
],
- "prefixes": [
- "lib/ansible/",
- "plugins/"
- ],
"ignore_self": true,
"output": "path-line-column-message"
}
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.json
index da4a0b10..88858aeb 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.json
@@ -2,10 +2,6 @@
"extensions": [
".py"
],
- "prefixes": [
- "lib/ansible/",
- "plugins/"
- ],
"ignore_self": true,
"output": "path-line-column-message"
}
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py
index 188d50fe..6cf27774 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py
@@ -16,19 +16,9 @@ from voluptuous.humanize import humanize_error
from ansible.module_utils.compat.version import StrictVersion, LooseVersion
from ansible.module_utils.six import string_types
-from ansible.utils.collection_loader import AnsibleCollectionRef
from ansible.utils.version import SemanticVersion
-def fqcr(value):
- """Validate a FQCR."""
- if not isinstance(value, string_types):
- raise Invalid('Must be a string that is a FQCR')
- if not AnsibleCollectionRef.is_valid_fqcr(value):
- raise Invalid('Must be a FQCR')
- return value
-
-
def isodate(value, check_deprecation_date=False, is_tombstone=False):
"""Validate a datetime.date or ISO 8601 date string."""
# datetime.date objects come from YAML dates, these are ok
@@ -136,15 +126,12 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
with open(path, 'r', encoding='utf-8') as f_path:
routing = yaml.safe_load(f_path)
except yaml.error.MarkedYAMLError as ex:
- print('%s:%d:%d: YAML load failed: %s' % (
- path,
- ex.context_mark.line + 1 if ex.context_mark else 0,
- ex.context_mark.column + 1 if ex.context_mark else 0,
- re.sub(r'\s+', ' ', str(ex)),
- ))
+ print('%s:%d:%d: YAML load failed: %s' % (path, ex.context_mark.line +
+ 1, ex.context_mark.column + 1, re.sub(r'\s+', ' ', str(ex))))
return
except Exception as ex: # pylint: disable=broad-except
- print('%s:%d:%d: YAML load failed: %s' % (path, 0, 0, re.sub(r'\s+', ' ', str(ex))))
+ print('%s:%d:%d: YAML load failed: %s' %
+ (path, 0, 0, re.sub(r'\s+', ' ', str(ex))))
return
if is_ansible:
@@ -197,37 +184,17 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
avoid_additional_data
)
- plugins_routing_common_schema = Schema({
- ('deprecation'): Any(deprecation_schema),
- ('tombstone'): Any(tombstoning_schema),
- ('redirect'): fqcr,
- }, extra=PREVENT_EXTRA)
-
- plugin_routing_schema = Any(plugins_routing_common_schema)
-
- # Adjusted schema for modules only
- plugin_routing_schema_modules = Any(
- plugins_routing_common_schema.extend({
- ('action_plugin'): fqcr}
- )
- )
-
- # Adjusted schema for module_utils
- plugin_routing_schema_mu = Any(
- plugins_routing_common_schema.extend({
- ('redirect'): Any(*string_types)}
- ),
+ plugin_routing_schema = Any(
+ Schema({
+ ('deprecation'): Any(deprecation_schema),
+ ('tombstone'): Any(tombstoning_schema),
+ ('redirect'): Any(*string_types),
+ }, extra=PREVENT_EXTRA),
)
list_dict_plugin_routing_schema = [{str_type: plugin_routing_schema}
for str_type in string_types]
- list_dict_plugin_routing_schema_mu = [{str_type: plugin_routing_schema_mu}
- for str_type in string_types]
-
- list_dict_plugin_routing_schema_modules = [{str_type: plugin_routing_schema_modules}
- for str_type in string_types]
-
plugin_schema = Schema({
('action'): Any(None, *list_dict_plugin_routing_schema),
('become'): Any(None, *list_dict_plugin_routing_schema),
@@ -240,8 +207,8 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
('httpapi'): Any(None, *list_dict_plugin_routing_schema),
('inventory'): Any(None, *list_dict_plugin_routing_schema),
('lookup'): Any(None, *list_dict_plugin_routing_schema),
- ('module_utils'): Any(None, *list_dict_plugin_routing_schema_mu),
- ('modules'): Any(None, *list_dict_plugin_routing_schema_modules),
+ ('module_utils'): Any(None, *list_dict_plugin_routing_schema),
+ ('modules'): Any(None, *list_dict_plugin_routing_schema),
('netconf'): Any(None, *list_dict_plugin_routing_schema),
('shell'): Any(None, *list_dict_plugin_routing_schema),
('strategy'): Any(None, *list_dict_plugin_routing_schema),
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.json
index ccee80a2..776590b7 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.json
@@ -2,9 +2,5 @@
"extensions": [
".py"
],
- "prefixes": [
- "lib/ansible/",
- "plugins/"
- ],
"output": "path-line-column-message"
}
diff --git a/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-core.ini b/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-core.ini
index 41d824b2..4d93f359 100644
--- a/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-core.ini
+++ b/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-core.ini
@@ -34,9 +34,6 @@ ignore_missing_imports = True
[mypy-md5.*]
ignore_missing_imports = True
-[mypy-imp.*]
-ignore_missing_imports = True
-
[mypy-scp.*]
ignore_missing_imports = True
diff --git a/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini b/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini
index 6be35724..55738f87 100644
--- a/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini
+++ b/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini
@@ -6,10 +6,10 @@
# There are ~350 errors reported in ansible-test when strict optional checking is enabled.
# Until the number of occurrences are greatly reduced, it's better to disable strict checking.
strict_optional = False
-# There are ~13 type-abstract errors reported in ansible-test.
-# This is due to assumptions mypy makes about Type and abstract types.
-# See: https://discuss.python.org/t/add-abstracttype-to-the-typing-module/21996/13
-disable_error_code = type-abstract
+# There are ~25 errors reported in ansible-test under the 'misc' code.
+# The majority of those errors are "Only concrete class can be given", which is due to a limitation of mypy.
+# See: https://github.com/python/mypy/issues/5374
+disable_error_code = misc
[mypy-argcomplete]
ignore_missing_imports = True
diff --git a/test/lib/ansible_test/_util/controller/sanity/mypy/packaging.ini b/test/lib/ansible_test/_util/controller/sanity/mypy/packaging.ini
deleted file mode 100644
index 70b0983c..00000000
--- a/test/lib/ansible_test/_util/controller/sanity/mypy/packaging.ini
+++ /dev/null
@@ -1,20 +0,0 @@
-# IMPORTANT
-# Set "ignore_missing_imports" per package below, rather than globally.
-# That will help identify missing type stubs that should be added to the sanity test environment.
-
-[mypy]
-
-[mypy-docutils]
-ignore_missing_imports = True
-
-[mypy-docutils.core]
-ignore_missing_imports = True
-
-[mypy-docutils.writers]
-ignore_missing_imports = True
-
-[mypy-docutils.writers.manpage]
-ignore_missing_imports = True
-
-[mypy-argcomplete]
-ignore_missing_imports = True
diff --git a/test/lib/ansible_test/_util/controller/sanity/pep8/current-ignore.txt b/test/lib/ansible_test/_util/controller/sanity/pep8/current-ignore.txt
index 4d1de692..659c7f59 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pep8/current-ignore.txt
+++ b/test/lib/ansible_test/_util/controller/sanity/pep8/current-ignore.txt
@@ -2,8 +2,3 @@ E402
W503
W504
E741
-
-# The E203 rule is not PEP 8 compliant.
-# Unfortunately this means it also conflicts with the output from `black`.
-# See: https://github.com/PyCQA/pycodestyle/issues/373
-E203
diff --git a/test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1 b/test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1
index 7beb38c1..2ae13b4c 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1
+++ b/test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1
@@ -4,9 +4,6 @@
Enable = $true
MaximumLineLength = 160
}
- PSAvoidSemicolonsAsLineTerminators = @{
- Enable = $true
- }
PSPlaceOpenBrace = @{
Enable = $true
OnSameLine = $true
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg
index f8a0a8af..aa347729 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg
@@ -10,7 +10,6 @@ disable=
raise-missing-from, # Python 2.x does not support raise from
super-with-arguments, # Python 2.x does not support super without arguments
redundant-u-string-prefix, # Python 2.x support still required
- broad-exception-raised, # many exceptions with no need for a custom type
too-few-public-methods,
too-many-arguments,
too-many-branches,
@@ -20,7 +19,6 @@ disable=
too-many-nested-blocks,
too-many-return-statements,
too-many-statements,
- use-dict-literal, # ignoring as a common style issue
useless-return, # complains about returning None when the return type is optional
[BASIC]
@@ -57,5 +55,3 @@ preferred-modules =
# Listing them here makes it possible to enable the import-error check.
ignored-modules =
py,
- pytest,
- _pytest.runner,
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg
index 5bec36fd..1c03472c 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg
@@ -7,7 +7,7 @@ disable=
deprecated-module, # results vary by Python version
duplicate-code, # consistent results require running with --jobs 1 and testing all files
import-outside-toplevel, # common pattern in ansible related code
- broad-exception-raised, # many exceptions with no need for a custom type
+ raise-missing-from, # Python 2.x does not support raise from
too-few-public-methods,
too-many-public-methods,
too-many-arguments,
@@ -18,7 +18,6 @@ disable=
too-many-nested-blocks,
too-many-return-statements,
too-many-statements,
- use-dict-literal, # ignoring as a common style issue
unspecified-encoding, # always run with UTF-8 encoding enforced
useless-return, # complains about returning None when the return type is optional
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/code-smell.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/code-smell.cfg
index c30eb37a..e3aa8eed 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/config/code-smell.cfg
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/code-smell.cfg
@@ -17,7 +17,6 @@ disable=
too-many-nested-blocks,
too-many-return-statements,
too-many-statements,
- use-dict-literal, # ignoring as a common style issue
unspecified-encoding, # always run with UTF-8 encoding enforced
useless-return, # complains about returning None when the return type is optional
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/collection.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/collection.cfg
index 762d488d..38b8d2d0 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/config/collection.cfg
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/collection.cfg
@@ -9,8 +9,7 @@ disable=
attribute-defined-outside-init,
bad-indentation,
bad-mcs-classmethod-argument,
- broad-exception-caught,
- broad-exception-raised,
+ broad-except,
c-extension-no-member,
cell-var-from-loop,
chained-comparison,
@@ -30,7 +29,6 @@ disable=
consider-using-max-builtin,
consider-using-min-builtin,
cyclic-import, # consistent results require running with --jobs 1 and testing all files
- deprecated-comment, # custom plugin only used by ansible-core, not collections
deprecated-method, # results vary by Python version
deprecated-module, # results vary by Python version
duplicate-code, # consistent results require running with --jobs 1 and testing all files
@@ -97,6 +95,8 @@ disable=
too-many-public-methods,
too-many-return-statements,
too-many-statements,
+ trailing-comma-tuple,
+ trailing-comma-tuple,
try-except-raise,
unbalanced-tuple-unpacking,
undefined-loop-variable,
@@ -110,9 +110,10 @@ disable=
unsupported-delete-operation,
unsupported-membership-test,
unused-argument,
+ unused-import,
unused-variable,
unspecified-encoding, # always run with UTF-8 encoding enforced
- use-dict-literal, # ignoring as a common style issue
+ use-dict-literal, # many occurrences
use-list-literal, # many occurrences
use-implicit-booleaness-not-comparison, # many occurrences
useless-object-inheritance,
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg
index 825e5df7..6a242b8d 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg
@@ -10,8 +10,7 @@ disable=
attribute-defined-outside-init,
bad-indentation,
bad-mcs-classmethod-argument,
- broad-exception-caught,
- broad-exception-raised,
+ broad-except,
c-extension-no-member,
cell-var-from-loop,
chained-comparison,
@@ -62,6 +61,8 @@ disable=
not-a-mapping,
not-an-iterable,
not-callable,
+ pointless-statement,
+ pointless-string-statement,
possibly-unused-variable,
protected-access,
raise-missing-from, # Python 2.x does not support raise from
@@ -90,6 +91,8 @@ disable=
too-many-public-methods,
too-many-return-statements,
too-many-statements,
+ trailing-comma-tuple,
+ trailing-comma-tuple,
try-except-raise,
unbalanced-tuple-unpacking,
undefined-loop-variable,
@@ -102,9 +105,10 @@ disable=
unsupported-delete-operation,
unsupported-membership-test,
unused-argument,
+ unused-import,
unused-variable,
unspecified-encoding, # always run with UTF-8 encoding enforced
- use-dict-literal, # ignoring as a common style issue
+ use-dict-literal, # many occurrences
use-list-literal, # many occurrences
use-implicit-booleaness-not-comparison, # many occurrences
useless-object-inheritance,
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py
index f6c83373..79b8bf15 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py
@@ -5,31 +5,14 @@
from __future__ import annotations
import datetime
-import functools
-import json
import re
-import shlex
import typing as t
-from tokenize import COMMENT, TokenInfo
import astroid
-# support pylint 2.x and 3.x -- remove when supporting only 3.x
-try:
- from pylint.interfaces import IAstroidChecker, ITokenChecker
-except ImportError:
- class IAstroidChecker:
- """Backwards compatibility for 2.x / 3.x support."""
-
- class ITokenChecker:
- """Backwards compatibility for 2.x / 3.x support."""
-
-try:
- from pylint.checkers.utils import check_messages
-except ImportError:
- from pylint.checkers.utils import only_required_for_messages as check_messages
-
-from pylint.checkers import BaseChecker, BaseTokenChecker
+from pylint.interfaces import IAstroidChecker
+from pylint.checkers import BaseChecker
+from pylint.checkers.utils import check_messages
from ansible.module_utils.compat.version import LooseVersion
from ansible.module_utils.six import string_types
@@ -112,7 +95,7 @@ ANSIBLE_VERSION = LooseVersion('.'.join(ansible_version_raw.split('.')[:3]))
def _get_expr_name(node):
- """Function to get either ``attrname`` or ``name`` from ``node.func.expr``
+ """Funciton to get either ``attrname`` or ``name`` from ``node.func.expr``
Created specifically for the case of ``display.deprecated`` or ``self._display.deprecated``
"""
@@ -123,17 +106,6 @@ def _get_expr_name(node):
return node.func.expr.name
-def _get_func_name(node):
- """Function to get either ``attrname`` or ``name`` from ``node.func``
-
- Created specifically for the case of ``from ansible.module_utils.common.warnings import deprecate``
- """
- try:
- return node.func.attrname
- except AttributeError:
- return node.func.name
-
-
def parse_isodate(value):
"""Parse an ISO 8601 date string."""
msg = 'Expected ISO 8601 date string (YYYY-MM-DD)'
@@ -146,7 +118,7 @@ def parse_isodate(value):
try:
return datetime.datetime.strptime(value, '%Y-%m-%d').date()
except ValueError:
- raise ValueError(msg) from None
+ raise ValueError(msg)
class AnsibleDeprecatedChecker(BaseChecker):
@@ -188,8 +160,6 @@ class AnsibleDeprecatedChecker(BaseChecker):
self.add_message('ansible-deprecated-date', node=node, args=(date,))
def _check_version(self, node, version, collection_name):
- if collection_name is None:
- collection_name = 'ansible.builtin'
if not isinstance(version, (str, float)):
if collection_name == 'ansible.builtin':
symbol = 'ansible-invalid-deprecated-version'
@@ -227,17 +197,12 @@ class AnsibleDeprecatedChecker(BaseChecker):
@property
def collection_name(self) -> t.Optional[str]:
"""Return the collection name, or None if ansible-core is being tested."""
- return self.linter.config.collection_name
+ return self.config.collection_name
@property
def collection_version(self) -> t.Optional[SemanticVersion]:
"""Return the collection version, or None if ansible-core is being tested."""
- if self.linter.config.collection_version is None:
- return None
- sem_ver = SemanticVersion(self.linter.config.collection_version)
- # Ignore pre-release for version comparison to catch issues before the final release is cut.
- sem_ver.prerelease = ()
- return sem_ver
+ return SemanticVersion(self.config.collection_version) if self.config.collection_version is not None else None
@check_messages(*(MSGS.keys()))
def visit_call(self, node):
@@ -246,9 +211,8 @@ class AnsibleDeprecatedChecker(BaseChecker):
date = None
collection_name = None
try:
- funcname = _get_func_name(node)
- if (funcname == 'deprecated' and 'display' in _get_expr_name(node) or
- funcname == 'deprecate'):
+ if (node.func.attrname == 'deprecated' and 'display' in _get_expr_name(node) or
+ node.func.attrname == 'deprecate' and _get_expr_name(node)):
if node.keywords:
for keyword in node.keywords:
if len(node.keywords) == 1 and keyword.arg is None:
@@ -294,137 +258,6 @@ class AnsibleDeprecatedChecker(BaseChecker):
pass
-class AnsibleDeprecatedCommentChecker(BaseTokenChecker):
- """Checks for ``# deprecated:`` comments to ensure that the ``version``
- has not passed or met the time for removal
- """
-
- __implements__ = (ITokenChecker,)
-
- name = 'deprecated-comment'
- msgs = {
- 'E9601': ("Deprecated core version (%r) found: %s",
- "ansible-deprecated-version-comment",
- "Used when a '# deprecated:' comment specifies a version "
- "less than or equal to the current version of Ansible",
- {'minversion': (2, 6)}),
- 'E9602': ("Deprecated comment contains invalid keys %r",
- "ansible-deprecated-version-comment-invalid-key",
- "Used when a '#deprecated:' comment specifies invalid data",
- {'minversion': (2, 6)}),
- 'E9603': ("Deprecated comment missing version",
- "ansible-deprecated-version-comment-missing-version",
- "Used when a '#deprecated:' comment specifies invalid data",
- {'minversion': (2, 6)}),
- 'E9604': ("Deprecated python version (%r) found: %s",
- "ansible-deprecated-python-version-comment",
- "Used when a '#deprecated:' comment specifies a python version "
- "less than or equal to the minimum python version",
- {'minversion': (2, 6)}),
- 'E9605': ("Deprecated comment contains invalid version %r: %s",
- "ansible-deprecated-version-comment-invalid-version",
- "Used when a '#deprecated:' comment specifies an invalid version",
- {'minversion': (2, 6)}),
- }
-
- options = (
- ('min-python-version-db', {
- 'default': None,
- 'type': 'string',
- 'metavar': '<path>',
- 'help': 'The path to the DB mapping paths to minimum Python versions.',
- }),
- )
-
- def process_tokens(self, tokens: list[TokenInfo]) -> None:
- for token in tokens:
- if token.type == COMMENT:
- self._process_comment(token)
-
- def _deprecated_string_to_dict(self, token: TokenInfo, string: str) -> dict[str, str]:
- valid_keys = {'description', 'core_version', 'python_version'}
- data = dict.fromkeys(valid_keys)
- for opt in shlex.split(string):
- if '=' not in opt:
- data[opt] = None
- continue
- key, _sep, value = opt.partition('=')
- data[key] = value
- if not any((data['core_version'], data['python_version'])):
- self.add_message(
- 'ansible-deprecated-version-comment-missing-version',
- line=token.start[0],
- col_offset=token.start[1],
- )
- bad = set(data).difference(valid_keys)
- if bad:
- self.add_message(
- 'ansible-deprecated-version-comment-invalid-key',
- line=token.start[0],
- col_offset=token.start[1],
- args=(','.join(bad),)
- )
- return data
-
- @functools.cached_property
- def _min_python_version_db(self) -> dict[str, str]:
- """A dictionary of absolute file paths and their minimum required Python version."""
- with open(self.linter.config.min_python_version_db) as db_file:
- return json.load(db_file)
-
- def _process_python_version(self, token: TokenInfo, data: dict[str, str]) -> None:
- current_file = self.linter.current_file
- check_version = self._min_python_version_db[current_file]
-
- try:
- if LooseVersion(data['python_version']) < LooseVersion(check_version):
- self.add_message(
- 'ansible-deprecated-python-version-comment',
- line=token.start[0],
- col_offset=token.start[1],
- args=(
- data['python_version'],
- data['description'] or 'description not provided',
- ),
- )
- except (ValueError, TypeError) as exc:
- self.add_message(
- 'ansible-deprecated-version-comment-invalid-version',
- line=token.start[0],
- col_offset=token.start[1],
- args=(data['python_version'], exc)
- )
-
- def _process_core_version(self, token: TokenInfo, data: dict[str, str]) -> None:
- try:
- if ANSIBLE_VERSION >= LooseVersion(data['core_version']):
- self.add_message(
- 'ansible-deprecated-version-comment',
- line=token.start[0],
- col_offset=token.start[1],
- args=(
- data['core_version'],
- data['description'] or 'description not provided',
- )
- )
- except (ValueError, TypeError) as exc:
- self.add_message(
- 'ansible-deprecated-version-comment-invalid-version',
- line=token.start[0],
- col_offset=token.start[1],
- args=(data['core_version'], exc)
- )
-
- def _process_comment(self, token: TokenInfo) -> None:
- if token.string.startswith('# deprecated:'):
- data = self._deprecated_string_to_dict(token, token.string[13:].strip())
- if data['core_version']:
- self._process_core_version(token, data)
- if data['python_version']:
- self._process_python_version(token, data)
-
-
def register(linter):
"""required method to auto register this checker """
linter.register_checker(AnsibleDeprecatedChecker(linter))
- linter.register_checker(AnsibleDeprecatedCommentChecker(linter))
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/hide_unraisable.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/hide_unraisable.py
deleted file mode 100644
index d3d0f979..00000000
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/hide_unraisable.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""Temporary plugin to prevent stdout noise pollution from finalization of abandoned generators under Python 3.12"""
-from __future__ import annotations
-
-import sys
-import typing as t
-
-if t.TYPE_CHECKING:
- from pylint.lint import PyLinter
-
-
-def _mask_finalizer_valueerror(ur: t.Any) -> None:
- """Mask only ValueErrors from finalizing abandoned generators; delegate everything else"""
- # work around Py3.12 finalizer changes that sometimes spews this error message to stdout
- # see https://github.com/pylint-dev/pylint/issues/9138
- if ur.exc_type is ValueError and 'generator already executing' in str(ur.exc_value):
- return
-
- sys.__unraisablehook__(ur)
-
-
-def register(linter: PyLinter) -> None: # pylint: disable=unused-argument
- """PyLint plugin registration entrypoint"""
- if sys.version_info >= (3, 12):
- sys.unraisablehook = _mask_finalizer_valueerror
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py
index 83c27734..934a9ae7 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py
@@ -5,26 +5,23 @@
from __future__ import annotations
import astroid
-
-# support pylint 2.x and 3.x -- remove when supporting only 3.x
-try:
- from pylint.interfaces import IAstroidChecker
-except ImportError:
- class IAstroidChecker:
- """Backwards compatibility for 2.x / 3.x support."""
-
-try:
- from pylint.checkers.utils import check_messages
-except ImportError:
- from pylint.checkers.utils import only_required_for_messages as check_messages
-
+from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
from pylint.checkers import utils
+from pylint.checkers.utils import check_messages
+try:
+ from pylint.checkers.utils import parse_format_method_string
+except ImportError:
+ # noinspection PyUnresolvedReferences
+ from pylint.checkers.strings import parse_format_method_string
MSGS = {
- 'E9305': ("disabled", # kept for backwards compatibility with inline ignores, remove after 2.14 is EOL
+ 'E9305': ("Format string contains automatic field numbering "
+ "specification",
"ansible-format-automatic-specification",
- "disabled"),
+ "Used when a PEP 3101 format string contains automatic "
+ "field numbering (e.g. '{}').",
+ {'minversion': (2, 6)}),
'E9390': ("bytes object has no .format attribute",
"ansible-no-format-on-bytestring",
"Used when a bytestring was used as a PEP 3101 format string "
@@ -67,6 +64,20 @@ class AnsibleStringFormatChecker(BaseChecker):
if isinstance(strnode.value, bytes):
self.add_message('ansible-no-format-on-bytestring', node=node)
return
+ if not isinstance(strnode.value, str):
+ return
+
+ if node.starargs or node.kwargs:
+ return
+ try:
+ num_args = parse_format_method_string(strnode.value)[1]
+ except utils.IncompleteFormatString:
+ return
+
+ if num_args:
+ self.add_message('ansible-format-automatic-specification',
+ node=node)
+ return
def register(linter):
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py
index f121ea58..1be42f51 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py
@@ -6,14 +6,8 @@ import typing as t
import astroid
-# support pylint 2.x and 3.x -- remove when supporting only 3.x
-try:
- from pylint.interfaces import IAstroidChecker
-except ImportError:
- class IAstroidChecker:
- """Backwards compatibility for 2.x / 3.x support."""
-
from pylint.checkers import BaseChecker
+from pylint.interfaces import IAstroidChecker
ANSIBLE_TEST_MODULES_PATH = os.environ['ANSIBLE_TEST_MODULES_PATH']
ANSIBLE_TEST_MODULE_UTILS_PATH = os.environ['ANSIBLE_TEST_MODULE_UTILS_PATH']
@@ -100,7 +94,10 @@ class AnsibleUnwantedChecker(BaseChecker):
)),
# see https://docs.python.org/3/library/collections.abc.html
- collections=UnwantedEntry('ansible.module_utils.six.moves.collections_abc',
+ collections=UnwantedEntry('ansible.module_utils.common._collections_compat',
+ ignore_paths=(
+ '/lib/ansible/module_utils/common/_collections_compat.py',
+ ),
names=(
'MappingView',
'ItemsView',
diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py
index 2b92a56c..25c61798 100644
--- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py
@@ -33,9 +33,6 @@ from collections.abc import Mapping
from contextlib import contextmanager
from fnmatch import fnmatch
-from antsibull_docs_parser import dom
-from antsibull_docs_parser.parser import parse, Context
-
import yaml
from voluptuous.humanize import humanize_error
@@ -66,7 +63,6 @@ setup_collection_loader()
from ansible import __version__ as ansible_version
from ansible.executor.module_common import REPLACER_WINDOWS, NEW_STYLE_PYTHON_MODULE_RE
-from ansible.module_utils.common.collections import is_iterable
from ansible.module_utils.common.parameters import DEFAULT_TYPE_VALIDATORS
from ansible.module_utils.compat.version import StrictVersion, LooseVersion
from ansible.module_utils.basic import to_bytes
@@ -78,13 +74,9 @@ from ansible.utils.version import SemanticVersion
from .module_args import AnsibleModuleImportError, AnsibleModuleNotInitialized, get_argument_spec
-from .schema import (
- ansible_module_kwargs_schema,
- doc_schema,
- return_schema,
-)
+from .schema import ansible_module_kwargs_schema, doc_schema, return_schema
-from .utils import CaptureStd, NoArgsAnsibleModule, compare_unordered_lists, parse_yaml, parse_isodate
+from .utils import CaptureStd, NoArgsAnsibleModule, compare_unordered_lists, is_empty, parse_yaml, parse_isodate
if PY3:
@@ -305,6 +297,8 @@ class ModuleValidator(Validator):
# win_dsc is a dynamic arg spec, the docs won't ever match
PS_ARG_VALIDATE_REJECTLIST = frozenset(('win_dsc.ps1', ))
+ ACCEPTLIST_FUTURE_IMPORTS = frozenset(('absolute_import', 'division', 'print_function'))
+
def __init__(self, path, git_cache: GitCache, analyze_arg_spec=False, collection=None, collection_version=None,
reporter=None, routing=None, plugin_type='module'):
super(ModuleValidator, self).__init__(reporter=reporter or Reporter())
@@ -407,10 +401,13 @@ class ModuleValidator(Validator):
if isinstance(child, ast.Expr) and isinstance(child.value, ast.Constant) and isinstance(child.value.value, str):
continue
- # allow __future__ imports (the specific allowed imports are checked by other sanity tests)
+ # allowed from __future__ imports
if isinstance(child, ast.ImportFrom) and child.module == '__future__':
- continue
-
+ for future_import in child.names:
+ if future_import.name not in self.ACCEPTLIST_FUTURE_IMPORTS:
+ break
+ else:
+ continue
return False
return True
except AttributeError:
@@ -639,21 +636,29 @@ class ModuleValidator(Validator):
)
def _ensure_imports_below_docs(self, doc_info, first_callable):
- doc_line_numbers = [lineno for lineno in (doc_info[key]['lineno'] for key in doc_info) if lineno > 0]
-
- min_doc_line = min(doc_line_numbers) if doc_line_numbers else None
+ min_doc_line = min(doc_info[key]['lineno'] for key in doc_info)
max_doc_line = max(doc_info[key]['end_lineno'] for key in doc_info)
import_lines = []
for child in self.ast.body:
if isinstance(child, (ast.Import, ast.ImportFrom)):
- # allow __future__ imports (the specific allowed imports are checked by other sanity tests)
if isinstance(child, ast.ImportFrom) and child.module == '__future__':
- continue
-
+ # allowed from __future__ imports
+ for future_import in child.names:
+ if future_import.name not in self.ACCEPTLIST_FUTURE_IMPORTS:
+ self.reporter.error(
+ path=self.object_path,
+ code='illegal-future-imports',
+ msg=('Only the following from __future__ imports are allowed: %s'
+ % ', '.join(self.ACCEPTLIST_FUTURE_IMPORTS)),
+ line=child.lineno
+ )
+ break
+ else: # for-else. If we didn't find a problem nad break out of the loop, then this is a legal import
+ continue
import_lines.append(child.lineno)
- if min_doc_line and child.lineno < min_doc_line:
+ if child.lineno < min_doc_line:
self.reporter.error(
path=self.object_path,
code='import-before-documentation',
@@ -670,7 +675,7 @@ class ModuleValidator(Validator):
for grandchild in bodies:
if isinstance(grandchild, (ast.Import, ast.ImportFrom)):
import_lines.append(grandchild.lineno)
- if min_doc_line and grandchild.lineno < min_doc_line:
+ if grandchild.lineno < min_doc_line:
self.reporter.error(
path=self.object_path,
code='import-before-documentation',
@@ -808,22 +813,22 @@ class ModuleValidator(Validator):
continue
if grandchild.id == 'DOCUMENTATION':
- docs['DOCUMENTATION']['value'] = child.value.value
+ docs['DOCUMENTATION']['value'] = child.value.s
docs['DOCUMENTATION']['lineno'] = child.lineno
docs['DOCUMENTATION']['end_lineno'] = (
- child.lineno + len(child.value.value.splitlines())
+ child.lineno + len(child.value.s.splitlines())
)
elif grandchild.id == 'EXAMPLES':
- docs['EXAMPLES']['value'] = child.value.value
+ docs['EXAMPLES']['value'] = child.value.s
docs['EXAMPLES']['lineno'] = child.lineno
docs['EXAMPLES']['end_lineno'] = (
- child.lineno + len(child.value.value.splitlines())
+ child.lineno + len(child.value.s.splitlines())
)
elif grandchild.id == 'RETURN':
- docs['RETURN']['value'] = child.value.value
+ docs['RETURN']['value'] = child.value.s
docs['RETURN']['lineno'] = child.lineno
docs['RETURN']['end_lineno'] = (
- child.lineno + len(child.value.value.splitlines())
+ child.lineno + len(child.value.s.splitlines())
)
return docs
@@ -1036,8 +1041,6 @@ class ModuleValidator(Validator):
'invalid-documentation',
)
- self._validate_all_semantic_markup(doc, returns)
-
if not self.collection:
existing_doc = self._check_for_new_args(doc)
self._check_version_added(doc, existing_doc)
@@ -1163,113 +1166,6 @@ class ModuleValidator(Validator):
return doc_info, doc
- def _check_sem_option(self, part: dom.OptionNamePart, current_plugin: dom.PluginIdentifier) -> None:
- if part.plugin is None or part.plugin != current_plugin:
- return
- if part.entrypoint is not None:
- return
- if tuple(part.link) not in self._all_options:
- self.reporter.error(
- path=self.object_path,
- code='invalid-documentation-markup',
- msg='Directive "%s" contains a non-existing option "%s"' % (part.source, part.name)
- )
-
- def _check_sem_return_value(self, part: dom.ReturnValuePart, current_plugin: dom.PluginIdentifier) -> None:
- if part.plugin is None or part.plugin != current_plugin:
- return
- if part.entrypoint is not None:
- return
- if tuple(part.link) not in self._all_return_values:
- self.reporter.error(
- path=self.object_path,
- code='invalid-documentation-markup',
- msg='Directive "%s" contains a non-existing return value "%s"' % (part.source, part.name)
- )
-
- def _validate_semantic_markup(self, object) -> None:
- # Make sure we operate on strings
- if is_iterable(object):
- for entry in object:
- self._validate_semantic_markup(entry)
- return
- if not isinstance(object, string_types):
- return
-
- if self.collection:
- fqcn = f'{self.collection_name}.{self.name}'
- else:
- fqcn = f'ansible.builtin.{self.name}'
- current_plugin = dom.PluginIdentifier(fqcn=fqcn, type=self.plugin_type)
- for par in parse(object, Context(current_plugin=current_plugin), errors='message', add_source=True):
- for part in par:
- # Errors are already covered during schema validation, we only check for option and
- # return value references
- if part.type == dom.PartType.OPTION_NAME:
- self._check_sem_option(part, current_plugin)
- if part.type == dom.PartType.RETURN_VALUE:
- self._check_sem_return_value(part, current_plugin)
-
- def _validate_semantic_markup_collect(self, destination, sub_key, data, all_paths):
- if not isinstance(data, dict):
- return
- for key, value in data.items():
- if not isinstance(value, dict):
- continue
- keys = {key}
- if is_iterable(value.get('aliases')):
- keys.update(value['aliases'])
- new_paths = [path + [key] for path in all_paths for key in keys]
- destination.update([tuple(path) for path in new_paths])
- self._validate_semantic_markup_collect(destination, sub_key, value.get(sub_key), new_paths)
-
- def _validate_semantic_markup_options(self, options):
- if not isinstance(options, dict):
- return
- for key, value in options.items():
- self._validate_semantic_markup(value.get('description'))
- self._validate_semantic_markup_options(value.get('suboptions'))
-
- def _validate_semantic_markup_return_values(self, return_vars):
- if not isinstance(return_vars, dict):
- return
- for key, value in return_vars.items():
- self._validate_semantic_markup(value.get('description'))
- self._validate_semantic_markup(value.get('returned'))
- self._validate_semantic_markup_return_values(value.get('contains'))
-
- def _validate_all_semantic_markup(self, docs, return_docs):
- if not isinstance(docs, dict):
- docs = {}
- if not isinstance(return_docs, dict):
- return_docs = {}
-
- self._all_options = set()
- self._all_return_values = set()
- self._validate_semantic_markup_collect(self._all_options, 'suboptions', docs.get('options'), [[]])
- self._validate_semantic_markup_collect(self._all_return_values, 'contains', return_docs, [[]])
-
- for string_keys in ('short_description', 'description', 'notes', 'requirements', 'todo'):
- self._validate_semantic_markup(docs.get(string_keys))
-
- if is_iterable(docs.get('seealso')):
- for entry in docs.get('seealso'):
- if isinstance(entry, dict):
- self._validate_semantic_markup(entry.get('description'))
-
- if isinstance(docs.get('attributes'), dict):
- for entry in docs.get('attributes').values():
- if isinstance(entry, dict):
- for key in ('description', 'details'):
- self._validate_semantic_markup(entry.get(key))
-
- if isinstance(docs.get('deprecated'), dict):
- for key in ('why', 'alternative'):
- self._validate_semantic_markup(docs.get('deprecated').get(key))
-
- self._validate_semantic_markup_options(docs.get('options'))
- self._validate_semantic_markup_return_values(return_docs)
-
def _check_version_added(self, doc, existing_doc):
version_added_raw = doc.get('version_added')
try:
@@ -1337,31 +1233,6 @@ class ModuleValidator(Validator):
self._validate_argument_spec(docs, spec, kwargs)
- if isinstance(docs, Mapping) and isinstance(docs.get('attributes'), Mapping):
- if isinstance(docs['attributes'].get('check_mode'), Mapping):
- support_value = docs['attributes']['check_mode'].get('support')
- if not kwargs.get('supports_check_mode', False):
- if support_value != 'none':
- self.reporter.error(
- path=self.object_path,
- code='attributes-check-mode',
- msg="The module does not declare support for check mode, but the check_mode attribute's"
- " support value is '%s' and not 'none'" % support_value
- )
- else:
- if support_value not in ('full', 'partial', 'N/A'):
- self.reporter.error(
- path=self.object_path,
- code='attributes-check-mode',
- msg="The module does declare support for check mode, but the check_mode attribute's support value is '%s'" % support_value
- )
- if support_value in ('partial', 'N/A') and docs['attributes']['check_mode'].get('details') in (None, '', []):
- self.reporter.error(
- path=self.object_path,
- code='attributes-check-mode-details',
- msg="The module declares it does not fully support check mode, but has no details on what exactly that means"
- )
-
def _validate_list_of_module_args(self, name, terms, spec, context):
if terms is None:
return
@@ -1877,7 +1748,7 @@ class ModuleValidator(Validator):
)
arg_default = None
- if 'default' in data and data['default'] is not None:
+ if 'default' in data and not is_empty(data['default']):
try:
with CaptureStd():
arg_default = _type_checker(data['default'])
@@ -1918,7 +1789,7 @@ class ModuleValidator(Validator):
try:
doc_default = None
- if 'default' in doc_options_arg and doc_options_arg['default'] is not None:
+ if 'default' in doc_options_arg and not is_empty(doc_options_arg['default']):
with CaptureStd():
doc_default = _type_checker(doc_options_arg['default'])
except (Exception, SystemExit):
diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py
index 1b712171..03a14019 100644
--- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py
@@ -29,7 +29,7 @@ from contextlib import contextmanager
from ansible.executor.powershell.module_manifest import PSModuleDepFinder
from ansible.module_utils.basic import FILE_COMMON_ARGUMENTS, AnsibleModule
from ansible.module_utils.six import reraise
-from ansible.module_utils.common.text.converters import to_bytes, to_text
+from ansible.module_utils._text import to_bytes, to_text
from .utils import CaptureStd, find_executable, get_module_name_from_filename
diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py
index a6068c60..b2623ff7 100644
--- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py
@@ -11,8 +11,7 @@ from ansible.module_utils.compat.version import StrictVersion
from functools import partial
from urllib.parse import urlparse
-from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, MultipleInvalid, Required, Schema, Self, ValueInvalid, Exclusive
-from ansible.constants import DOCUMENTABLE_PLUGINS
+from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, Required, Schema, Self, ValueInvalid, Exclusive
from ansible.module_utils.six import string_types
from ansible.module_utils.common.collections import is_iterable
from ansible.module_utils.parsing.convert_bool import boolean
@@ -20,9 +19,6 @@ from ansible.parsing.quoting import unquote
from ansible.utils.version import SemanticVersion
from ansible.release import __version__
-from antsibull_docs_parser import dom
-from antsibull_docs_parser.parser import parse, Context
-
from .utils import parse_isodate
list_string_types = list(string_types)
@@ -84,8 +80,26 @@ def date(error_code=None):
return Any(isodate, error_code=error_code)
-# Roles can also be referenced by semantic markup
-_VALID_PLUGIN_TYPES = set(DOCUMENTABLE_PLUGINS + ('role', ))
+_MODULE = re.compile(r"\bM\(([^)]+)\)")
+_LINK = re.compile(r"\bL\(([^)]+)\)")
+_URL = re.compile(r"\bU\(([^)]+)\)")
+_REF = re.compile(r"\bR\(([^)]+)\)")
+
+
+def _check_module_link(directive, content):
+ if not FULLY_QUALIFIED_COLLECTION_RESOURCE_RE.match(content):
+ raise _add_ansible_error_code(
+ Invalid('Directive "%s" must contain a FQCN' % directive), 'invalid-documentation-markup')
+
+
+def _check_link(directive, content):
+ if ',' not in content:
+ raise _add_ansible_error_code(
+ Invalid('Directive "%s" must contain a comma' % directive), 'invalid-documentation-markup')
+ idx = content.rindex(',')
+ title = content[:idx]
+ url = content[idx + 1:].lstrip(' ')
+ _check_url(directive, url)
def _check_url(directive, content):
@@ -93,10 +107,15 @@ def _check_url(directive, content):
parsed_url = urlparse(content)
if parsed_url.scheme not in ('', 'http', 'https'):
raise ValueError('Schema must be HTTP, HTTPS, or not specified')
- return []
- except ValueError:
- return [_add_ansible_error_code(
- Invalid('Directive %s must contain a valid URL' % directive), 'invalid-documentation-markup')]
+ except ValueError as exc:
+ raise _add_ansible_error_code(
+ Invalid('Directive "%s" must contain an URL' % directive), 'invalid-documentation-markup')
+
+
+def _check_ref(directive, content):
+ if ',' not in content:
+ raise _add_ansible_error_code(
+ Invalid('Directive "%s" must contain a comma' % directive), 'invalid-documentation-markup')
def doc_string(v):
@@ -104,55 +123,25 @@ def doc_string(v):
if not isinstance(v, string_types):
raise _add_ansible_error_code(
Invalid('Must be a string'), 'invalid-documentation')
- errors = []
- for par in parse(v, Context(), errors='message', strict=True, add_source=True):
- for part in par:
- if part.type == dom.PartType.ERROR:
- errors.append(_add_ansible_error_code(Invalid(part.message), 'invalid-documentation-markup'))
- if part.type == dom.PartType.URL:
- errors.extend(_check_url('U()', part.url))
- if part.type == dom.PartType.LINK:
- errors.extend(_check_url('L()', part.url))
- if part.type == dom.PartType.MODULE:
- if not FULLY_QUALIFIED_COLLECTION_RESOURCE_RE.match(part.fqcn):
- errors.append(_add_ansible_error_code(Invalid(
- 'Directive "%s" must contain a FQCN; found "%s"' % (part.source, part.fqcn)),
- 'invalid-documentation-markup'))
- if part.type == dom.PartType.PLUGIN:
- if not FULLY_QUALIFIED_COLLECTION_RESOURCE_RE.match(part.plugin.fqcn):
- errors.append(_add_ansible_error_code(Invalid(
- 'Directive "%s" must contain a FQCN; found "%s"' % (part.source, part.plugin.fqcn)),
- 'invalid-documentation-markup'))
- if part.plugin.type not in _VALID_PLUGIN_TYPES:
- errors.append(_add_ansible_error_code(Invalid(
- 'Directive "%s" must contain a valid plugin type; found "%s"' % (part.source, part.plugin.type)),
- 'invalid-documentation-markup'))
- if part.type == dom.PartType.OPTION_NAME:
- if part.plugin is not None and not FULLY_QUALIFIED_COLLECTION_RESOURCE_RE.match(part.plugin.fqcn):
- errors.append(_add_ansible_error_code(Invalid(
- 'Directive "%s" must contain a FQCN; found "%s"' % (part.source, part.plugin.fqcn)),
- 'invalid-documentation-markup'))
- if part.plugin is not None and part.plugin.type not in _VALID_PLUGIN_TYPES:
- errors.append(_add_ansible_error_code(Invalid(
- 'Directive "%s" must contain a valid plugin type; found "%s"' % (part.source, part.plugin.type)),
- 'invalid-documentation-markup'))
- if part.type == dom.PartType.RETURN_VALUE:
- if part.plugin is not None and not FULLY_QUALIFIED_COLLECTION_RESOURCE_RE.match(part.plugin.fqcn):
- errors.append(_add_ansible_error_code(Invalid(
- 'Directive "%s" must contain a FQCN; found "%s"' % (part.source, part.plugin.fqcn)),
- 'invalid-documentation-markup'))
- if part.plugin is not None and part.plugin.type not in _VALID_PLUGIN_TYPES:
- errors.append(_add_ansible_error_code(Invalid(
- 'Directive "%s" must contain a valid plugin type; found "%s"' % (part.source, part.plugin.type)),
- 'invalid-documentation-markup'))
- if len(errors) == 1:
- raise errors[0]
- if errors:
- raise MultipleInvalid(errors)
+ for m in _MODULE.finditer(v):
+ _check_module_link(m.group(0), m.group(1))
+ for m in _LINK.finditer(v):
+ _check_link(m.group(0), m.group(1))
+ for m in _URL.finditer(v):
+ _check_url(m.group(0), m.group(1))
+ for m in _REF.finditer(v):
+ _check_ref(m.group(0), m.group(1))
return v
-doc_string_or_strings = Any(doc_string, [doc_string])
+def doc_string_or_strings(v):
+ """Match a documentation string, or list of strings."""
+ if isinstance(v, string_types):
+ return doc_string(v)
+ if isinstance(v, (list, tuple)):
+ return [doc_string(vv) for vv in v]
+ raise _add_ansible_error_code(
+ Invalid('Must be a string or list of strings'), 'invalid-documentation')
def is_callable(v):
@@ -184,11 +173,6 @@ seealso_schema = Schema(
'description': doc_string,
},
{
- Required('plugin'): Any(*string_types),
- Required('plugin_type'): Any(*DOCUMENTABLE_PLUGINS),
- 'description': doc_string,
- },
- {
Required('ref'): Any(*string_types),
Required('description'): doc_string,
},
@@ -810,7 +794,7 @@ def author(value):
def doc_schema(module_name, for_collection=False, deprecated_module=False, plugin_type='module'):
- if module_name.startswith('_') and not for_collection:
+ if module_name.startswith('_'):
module_name = module_name[1:]
deprecated_module = True
if for_collection is False and plugin_type == 'connection' and module_name == 'paramiko_ssh':
@@ -880,6 +864,9 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False, plugi
'action_group': add_default_attributes({
Required('membership'): list_string_types,
}),
+ 'forced_action_plugin': add_default_attributes({
+ Required('action_plugin'): any_string_types,
+ }),
'platform': add_default_attributes({
Required('platforms'): Any(list_string_types, *string_types)
}),
diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py
index 15cb7037..88d5b01a 100644
--- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py
@@ -28,7 +28,7 @@ from io import BytesIO, TextIOWrapper
import yaml
import yaml.reader
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.yaml import SafeLoader
from ansible.module_utils.six import string_types
diff --git a/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py b/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py
index ed1afcf3..d6de6117 100644
--- a/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py
+++ b/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py
@@ -181,15 +181,15 @@ class YamlChecker:
if doc_types and target.id not in doc_types:
continue
- fmt_match = fmt_re.match(statement.value.value.lstrip())
+ fmt_match = fmt_re.match(statement.value.s.lstrip())
fmt = 'yaml'
if fmt_match:
fmt = fmt_match.group(1)
docs[target.id] = dict(
- yaml=statement.value.value,
+ yaml=statement.value.s,
lineno=statement.lineno,
- end_lineno=statement.lineno + len(statement.value.value.splitlines()),
+ end_lineno=statement.lineno + len(statement.value.s.splitlines()),
fmt=fmt.lower(),
)
diff --git a/test/lib/ansible_test/_util/controller/tools/collection_detail.py b/test/lib/ansible_test/_util/controller/tools/collection_detail.py
index df52d099..870ea59e 100644
--- a/test/lib/ansible_test/_util/controller/tools/collection_detail.py
+++ b/test/lib/ansible_test/_util/controller/tools/collection_detail.py
@@ -50,7 +50,7 @@ def read_manifest_json(collection_path):
)
validate_version(result['version'])
except Exception as ex: # pylint: disable=broad-except
- raise Exception('{0}: {1}'.format(os.path.basename(manifest_path), ex)) from None
+ raise Exception('{0}: {1}'.format(os.path.basename(manifest_path), ex))
return result
@@ -71,7 +71,7 @@ def read_galaxy_yml(collection_path):
)
validate_version(result['version'])
except Exception as ex: # pylint: disable=broad-except
- raise Exception('{0}: {1}'.format(os.path.basename(galaxy_path), ex)) from None
+ raise Exception('{0}: {1}'.format(os.path.basename(galaxy_path), ex))
return result
diff --git a/test/lib/ansible_test/_util/target/common/constants.py b/test/lib/ansible_test/_util/target/common/constants.py
index 36a5a2c4..9bddfaf4 100644
--- a/test/lib/ansible_test/_util/target/common/constants.py
+++ b/test/lib/ansible_test/_util/target/common/constants.py
@@ -7,14 +7,14 @@ __metaclass__ = type
REMOTE_ONLY_PYTHON_VERSIONS = (
'2.7',
+ '3.5',
'3.6',
'3.7',
'3.8',
- '3.9',
)
CONTROLLER_PYTHON_VERSIONS = (
+ '3.9',
'3.10',
'3.11',
- '3.12',
)
diff --git a/test/lib/ansible_test/_util/target/pytest/plugins/ansible_forked.py b/test/lib/ansible_test/_util/target/pytest/plugins/ansible_forked.py
deleted file mode 100644
index d00d9e93..00000000
--- a/test/lib/ansible_test/_util/target/pytest/plugins/ansible_forked.py
+++ /dev/null
@@ -1,103 +0,0 @@
-"""Run each test in its own fork. PYTEST_DONT_REWRITE"""
-# MIT License (see licenses/MIT-license.txt or https://opensource.org/licenses/MIT)
-# Based on code originally from:
-# https://github.com/pytest-dev/pytest-forked
-# https://github.com/pytest-dev/py
-# TIP: Disable pytest-xdist when debugging internal errors in this plugin.
-from __future__ import absolute_import, division, print_function
-
-__metaclass__ = type
-
-import os
-import pickle
-import tempfile
-import warnings
-
-from pytest import Item, hookimpl
-
-try:
- from pytest import TestReport
-except ImportError:
- from _pytest.runner import TestReport # Backwards compatibility with pytest < 7. Remove once Python 2.7 is not supported.
-
-from _pytest.runner import runtestprotocol
-
-
-@hookimpl(tryfirst=True)
-def pytest_runtest_protocol(item, nextitem): # type: (Item, Item | None) -> object | None
- """Entry point for enabling this plugin."""
- # This is needed because pytest-xdist creates an OS thread (using execnet).
- # See: https://github.com/pytest-dev/execnet/blob/d6aa1a56773c2e887515d63e50b1d08338cb78a7/execnet/gateway_base.py#L51
- warnings.filterwarnings("ignore", "^This process .* is multi-threaded, use of .* may lead to deadlocks in the child.$", DeprecationWarning)
-
- item_hook = item.ihook
- item_hook.pytest_runtest_logstart(nodeid=item.nodeid, location=item.location)
-
- reports = run_item(item, nextitem)
-
- for report in reports:
- item_hook.pytest_runtest_logreport(report=report)
-
- item_hook.pytest_runtest_logfinish(nodeid=item.nodeid, location=item.location)
-
- return True
-
-
-def run_item(item, nextitem): # type: (Item, Item | None) -> list[TestReport]
- """Run the item in a child process and return a list of reports."""
- with tempfile.NamedTemporaryFile() as temp_file:
- pid = os.fork()
-
- if not pid:
- temp_file.delete = False
- run_child(item, nextitem, temp_file.name)
-
- return run_parent(item, pid, temp_file.name)
-
-
-def run_child(item, nextitem, result_path): # type: (Item, Item | None, str) -> None
- """Run the item, record the result and exit. Called in the child process."""
- with warnings.catch_warnings(record=True) as captured_warnings:
- reports = runtestprotocol(item, nextitem=nextitem, log=False)
-
- with open(result_path, "wb") as result_file:
- pickle.dump((reports, captured_warnings), result_file)
-
- os._exit(0) # noqa
-
-
-def run_parent(item, pid, result_path): # type: (Item, int, str) -> list[TestReport]
- """Wait for the child process to exit and return the test reports. Called in the parent process."""
- exit_code = waitstatus_to_exitcode(os.waitpid(pid, 0)[1])
-
- if exit_code:
- reason = "Test CRASHED with exit code {}.".format(exit_code)
- report = TestReport(item.nodeid, item.location, {x: 1 for x in item.keywords}, "failed", reason, "call", user_properties=item.user_properties)
-
- if item.get_closest_marker("xfail"):
- report.outcome = "skipped"
- report.wasxfail = reason
-
- reports = [report]
- else:
- with open(result_path, "rb") as result_file:
- reports, captured_warnings = pickle.load(result_file) # type: list[TestReport], list[warnings.WarningMessage]
-
- for warning in captured_warnings:
- warnings.warn_explicit(warning.message, warning.category, warning.filename, warning.lineno)
-
- return reports
-
-
-def waitstatus_to_exitcode(status): # type: (int) -> int
- """Convert a wait status to an exit code."""
- # This function was added in Python 3.9.
- # See: https://docs.python.org/3/library/os.html#os.waitstatus_to_exitcode
-
- if os.WIFEXITED(status):
- return os.WEXITSTATUS(status)
-
- if os.WIFSIGNALED(status):
- return -os.WTERMSIG(status)
-
- raise ValueError(status)
diff --git a/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py b/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py
index 2f77c03b..fefd6b0f 100644
--- a/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py
+++ b/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py
@@ -32,50 +32,6 @@ def collection_pypkgpath(self):
raise Exception('File "%s" not found in collection path "%s".' % (self.strpath, ANSIBLE_COLLECTIONS_PATH))
-def enable_assertion_rewriting_hook(): # type: () -> None
- """
- Enable pytest's AssertionRewritingHook on Python 3.x.
- This is necessary because the Ansible collection loader intercepts imports before the pytest provided loader ever sees them.
- """
- import sys
-
- if sys.version_info[0] == 2:
- return # Python 2.x is not supported
-
- hook_name = '_pytest.assertion.rewrite.AssertionRewritingHook'
- hooks = [hook for hook in sys.meta_path if hook.__class__.__module__ + '.' + hook.__class__.__qualname__ == hook_name]
-
- if len(hooks) != 1:
- raise Exception('Found {} instance(s) of "{}" in sys.meta_path.'.format(len(hooks), hook_name))
-
- assertion_rewriting_hook = hooks[0]
-
- # This is based on `_AnsibleCollectionPkgLoaderBase.exec_module` from `ansible/utils/collection_loader/_collection_finder.py`.
- def exec_module(self, module):
- # short-circuit redirect; avoid reinitializing existing modules
- if self._redirect_module: # pylint: disable=protected-access
- return
-
- # execute the module's code in its namespace
- code_obj = self.get_code(self._fullname) # pylint: disable=protected-access
-
- if code_obj is not None: # things like NS packages that can't have code on disk will return None
- # This logic is loosely based on `AssertionRewritingHook._should_rewrite` from pytest.
- # See: https://github.com/pytest-dev/pytest/blob/779a87aada33af444f14841a04344016a087669e/src/_pytest/assertion/rewrite.py#L209
- should_rewrite = self._package_to_load == 'conftest' or self._package_to_load.startswith('test_') # pylint: disable=protected-access
-
- if should_rewrite:
- # noinspection PyUnresolvedReferences
- assertion_rewriting_hook.exec_module(module)
- else:
- exec(code_obj, module.__dict__) # pylint: disable=exec-used
-
- # noinspection PyProtectedMember
- from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionPkgLoaderBase
-
- _AnsibleCollectionPkgLoaderBase.exec_module = exec_module
-
-
def pytest_configure():
"""Configure this pytest plugin."""
try:
@@ -84,8 +40,6 @@ def pytest_configure():
except AttributeError:
pytest_configure.executed = True
- enable_assertion_rewriting_hook()
-
# noinspection PyProtectedMember
from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder
diff --git a/test/lib/ansible_test/_util/target/sanity/import/importer.py b/test/lib/ansible_test/_util/target/sanity/import/importer.py
index 38a73643..44a5ddc9 100644
--- a/test/lib/ansible_test/_util/target/sanity/import/importer.py
+++ b/test/lib/ansible_test/_util/target/sanity/import/importer.py
@@ -552,11 +552,13 @@ def main():
"Python 2 is no longer supported by the Python core team. Support for it is now deprecated in cryptography,"
" and will be removed in the next release.")
- # ansible.utils.unsafe_proxy attempts patching sys.intern generating a warning if it was already patched
- warnings.filterwarnings(
- "ignore",
- "skipped sys.intern patch; appears to have already been patched"
- )
+ if sys.version_info[:2] == (3, 5):
+ warnings.filterwarnings(
+ "ignore",
+ "Python 3.5 support will be dropped in the next release ofcryptography. Please upgrade your Python.")
+ warnings.filterwarnings(
+ "ignore",
+ "Python 3.5 support will be dropped in the next release of cryptography. Please upgrade your Python.")
try:
yield
diff --git a/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1 b/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1
new file mode 100644
index 00000000..c1cb91e4
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1
@@ -0,0 +1,435 @@
+#Requires -Version 3.0
+
+# Configure a Windows host for remote management with Ansible
+# -----------------------------------------------------------
+#
+# This script checks the current WinRM (PS Remoting) configuration and makes
+# the necessary changes to allow Ansible to connect, authenticate and
+# execute PowerShell commands.
+#
+# IMPORTANT: This script uses self-signed certificates and authentication mechanisms
+# that are intended for development environments and evaluation purposes only.
+# Production environments and deployments that are exposed on the network should
+# use CA-signed certificates and secure authentication mechanisms such as Kerberos.
+#
+# To run this script in Powershell:
+#
+# [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
+# $url = "https://raw.githubusercontent.com/ansible/ansible/devel/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1"
+# $file = "$env:temp\ConfigureRemotingForAnsible.ps1"
+#
+# (New-Object -TypeName System.Net.WebClient).DownloadFile($url, $file)
+#
+# powershell.exe -ExecutionPolicy ByPass -File $file
+#
+# All events are logged to the Windows EventLog, useful for unattended runs.
+#
+# Use option -Verbose in order to see the verbose output messages.
+#
+# Use option -CertValidityDays to specify how long this certificate is valid
+# starting from today. So you would specify -CertValidityDays 3650 to get
+# a 10-year valid certificate.
+#
+# Use option -ForceNewSSLCert if the system has been SysPreped and a new
+# SSL Certificate must be forced on the WinRM Listener when re-running this
+# script. This is necessary when a new SID and CN name is created.
+#
+# Use option -EnableCredSSP to enable CredSSP as an authentication option.
+#
+# Use option -DisableBasicAuth to disable basic authentication.
+#
+# Use option -SkipNetworkProfileCheck to skip the network profile check.
+# Without specifying this the script will only run if the device's interfaces
+# are in DOMAIN or PRIVATE zones. Provide this switch if you want to enable
+# WinRM on a device with an interface in PUBLIC zone.
+#
+# Use option -SubjectName to specify the CN name of the certificate. This
+# defaults to the system's hostname and generally should not be specified.
+
+# Written by Trond Hindenes <trond@hindenes.com>
+# Updated by Chris Church <cchurch@ansible.com>
+# Updated by Michael Crilly <mike@autologic.cm>
+# Updated by Anton Ouzounov <Anton.Ouzounov@careerbuilder.com>
+# Updated by Nicolas Simond <contact@nicolas-simond.com>
+# Updated by Dag Wieërs <dag@wieers.com>
+# Updated by Jordan Borean <jborean93@gmail.com>
+# Updated by Erwan Quélin <erwan.quelin@gmail.com>
+# Updated by David Norman <david@dkn.email>
+#
+# Version 1.0 - 2014-07-06
+# Version 1.1 - 2014-11-11
+# Version 1.2 - 2015-05-15
+# Version 1.3 - 2016-04-04
+# Version 1.4 - 2017-01-05
+# Version 1.5 - 2017-02-09
+# Version 1.6 - 2017-04-18
+# Version 1.7 - 2017-11-23
+# Version 1.8 - 2018-02-23
+# Version 1.9 - 2018-09-21
+
+# Support -Verbose option
+[CmdletBinding()]
+
+Param (
+ [string]$SubjectName = $env:COMPUTERNAME,
+ [int]$CertValidityDays = 1095,
+ [switch]$SkipNetworkProfileCheck,
+ $CreateSelfSignedCert = $true,
+ [switch]$ForceNewSSLCert,
+ [switch]$GlobalHttpFirewallAccess,
+ [switch]$DisableBasicAuth = $false,
+ [switch]$EnableCredSSP
+)
+
+Function Write-ProgressLog {
+ $Message = $args[0]
+ Write-EventLog -LogName Application -Source $EventSource -EntryType Information -EventId 1 -Message $Message
+}
+
+Function Write-VerboseLog {
+ $Message = $args[0]
+ Write-Verbose $Message
+ Write-ProgressLog $Message
+}
+
+Function Write-HostLog {
+ $Message = $args[0]
+ Write-Output $Message
+ Write-ProgressLog $Message
+}
+
+Function New-LegacySelfSignedCert {
+ Param (
+ [string]$SubjectName,
+ [int]$ValidDays = 1095
+ )
+
+ $hostnonFQDN = $env:computerName
+ $hostFQDN = [System.Net.Dns]::GetHostByName(($env:computerName)).Hostname
+ $SignatureAlgorithm = "SHA256"
+
+ $name = New-Object -COM "X509Enrollment.CX500DistinguishedName.1"
+ $name.Encode("CN=$SubjectName", 0)
+
+ $key = New-Object -COM "X509Enrollment.CX509PrivateKey.1"
+ $key.ProviderName = "Microsoft Enhanced RSA and AES Cryptographic Provider"
+ $key.KeySpec = 1
+ $key.Length = 4096
+ $key.SecurityDescriptor = "D:PAI(A;;0xd01f01ff;;;SY)(A;;0xd01f01ff;;;BA)(A;;0x80120089;;;NS)"
+ $key.MachineContext = 1
+ $key.Create()
+
+ $serverauthoid = New-Object -COM "X509Enrollment.CObjectId.1"
+ $serverauthoid.InitializeFromValue("1.3.6.1.5.5.7.3.1")
+ $ekuoids = New-Object -COM "X509Enrollment.CObjectIds.1"
+ $ekuoids.Add($serverauthoid)
+ $ekuext = New-Object -COM "X509Enrollment.CX509ExtensionEnhancedKeyUsage.1"
+ $ekuext.InitializeEncode($ekuoids)
+
+ $cert = New-Object -COM "X509Enrollment.CX509CertificateRequestCertificate.1"
+ $cert.InitializeFromPrivateKey(2, $key, "")
+ $cert.Subject = $name
+ $cert.Issuer = $cert.Subject
+ $cert.NotBefore = (Get-Date).AddDays(-1)
+ $cert.NotAfter = $cert.NotBefore.AddDays($ValidDays)
+
+ $SigOID = New-Object -ComObject X509Enrollment.CObjectId
+ $SigOID.InitializeFromValue(([Security.Cryptography.Oid]$SignatureAlgorithm).Value)
+
+ [string[]] $AlternativeName += $hostnonFQDN
+ $AlternativeName += $hostFQDN
+ $IAlternativeNames = New-Object -ComObject X509Enrollment.CAlternativeNames
+
+ foreach ($AN in $AlternativeName) {
+ $AltName = New-Object -ComObject X509Enrollment.CAlternativeName
+ $AltName.InitializeFromString(0x3, $AN)
+ $IAlternativeNames.Add($AltName)
+ }
+
+ $SubjectAlternativeName = New-Object -ComObject X509Enrollment.CX509ExtensionAlternativeNames
+ $SubjectAlternativeName.InitializeEncode($IAlternativeNames)
+
+ [String[]]$KeyUsage = ("DigitalSignature", "KeyEncipherment")
+ $KeyUsageObj = New-Object -ComObject X509Enrollment.CX509ExtensionKeyUsage
+ $KeyUsageObj.InitializeEncode([int][Security.Cryptography.X509Certificates.X509KeyUsageFlags]($KeyUsage))
+ $KeyUsageObj.Critical = $true
+
+ $cert.X509Extensions.Add($KeyUsageObj)
+ $cert.X509Extensions.Add($ekuext)
+ $cert.SignatureInformation.HashAlgorithm = $SigOID
+ $CERT.X509Extensions.Add($SubjectAlternativeName)
+ $cert.Encode()
+
+ $enrollment = New-Object -COM "X509Enrollment.CX509Enrollment.1"
+ $enrollment.InitializeFromRequest($cert)
+ $certdata = $enrollment.CreateRequest(0)
+ $enrollment.InstallResponse(2, $certdata, 0, "")
+
+ # extract/return the thumbprint from the generated cert
+ $parsed_cert = New-Object System.Security.Cryptography.X509Certificates.X509Certificate2
+ $parsed_cert.Import([System.Text.Encoding]::UTF8.GetBytes($certdata))
+
+ return $parsed_cert.Thumbprint
+}
+
+Function Enable-GlobalHttpFirewallAccess {
+ Write-Verbose "Forcing global HTTP firewall access"
+ # this is a fairly naive implementation; could be more sophisticated about rule matching/collapsing
+ $fw = New-Object -ComObject HNetCfg.FWPolicy2
+
+ # try to find/enable the default rule first
+ $add_rule = $false
+ $matching_rules = $fw.Rules | Where-Object { $_.Name -eq "Windows Remote Management (HTTP-In)" }
+ $rule = $null
+ If ($matching_rules) {
+ If ($matching_rules -isnot [Array]) {
+ Write-Verbose "Editing existing single HTTP firewall rule"
+ $rule = $matching_rules
+ }
+ Else {
+ # try to find one with the All or Public profile first
+ Write-Verbose "Found multiple existing HTTP firewall rules..."
+ $rule = $matching_rules | ForEach-Object { $_.Profiles -band 4 }[0]
+
+ If (-not $rule -or $rule -is [Array]) {
+ Write-Verbose "Editing an arbitrary single HTTP firewall rule (multiple existed)"
+ # oh well, just pick the first one
+ $rule = $matching_rules[0]
+ }
+ }
+ }
+
+ If (-not $rule) {
+ Write-Verbose "Creating a new HTTP firewall rule"
+ $rule = New-Object -ComObject HNetCfg.FWRule
+ $rule.Name = "Windows Remote Management (HTTP-In)"
+ $rule.Description = "Inbound rule for Windows Remote Management via WS-Management. [TCP 5985]"
+ $add_rule = $true
+ }
+
+ $rule.Profiles = 0x7FFFFFFF
+ $rule.Protocol = 6
+ $rule.LocalPorts = 5985
+ $rule.RemotePorts = "*"
+ $rule.LocalAddresses = "*"
+ $rule.RemoteAddresses = "*"
+ $rule.Enabled = $true
+ $rule.Direction = 1
+ $rule.Action = 1
+ $rule.Grouping = "Windows Remote Management"
+
+ If ($add_rule) {
+ $fw.Rules.Add($rule)
+ }
+
+ Write-Verbose "HTTP firewall rule $($rule.Name) updated"
+}
+
+# Setup error handling.
+Trap {
+ $_
+ Exit 1
+}
+$ErrorActionPreference = "Stop"
+
+# Get the ID and security principal of the current user account
+$myWindowsID = [System.Security.Principal.WindowsIdentity]::GetCurrent()
+$myWindowsPrincipal = new-object System.Security.Principal.WindowsPrincipal($myWindowsID)
+
+# Get the security principal for the Administrator role
+$adminRole = [System.Security.Principal.WindowsBuiltInRole]::Administrator
+
+# Check to see if we are currently running "as Administrator"
+if (-Not $myWindowsPrincipal.IsInRole($adminRole)) {
+ Write-Output "ERROR: You need elevated Administrator privileges in order to run this script."
+ Write-Output " Start Windows PowerShell by using the Run as Administrator option."
+ Exit 2
+}
+
+$EventSource = $MyInvocation.MyCommand.Name
+If (-Not $EventSource) {
+ $EventSource = "Powershell CLI"
+}
+
+If ([System.Diagnostics.EventLog]::Exists('Application') -eq $False -or [System.Diagnostics.EventLog]::SourceExists($EventSource) -eq $False) {
+ New-EventLog -LogName Application -Source $EventSource
+}
+
+# Detect PowerShell version.
+If ($PSVersionTable.PSVersion.Major -lt 3) {
+ Write-ProgressLog "PowerShell version 3 or higher is required."
+ Throw "PowerShell version 3 or higher is required."
+}
+
+# Find and start the WinRM service.
+Write-Verbose "Verifying WinRM service."
+If (!(Get-Service "WinRM")) {
+ Write-ProgressLog "Unable to find the WinRM service."
+ Throw "Unable to find the WinRM service."
+}
+ElseIf ((Get-Service "WinRM").Status -ne "Running") {
+ Write-Verbose "Setting WinRM service to start automatically on boot."
+ Set-Service -Name "WinRM" -StartupType Automatic
+ Write-ProgressLog "Set WinRM service to start automatically on boot."
+ Write-Verbose "Starting WinRM service."
+ Start-Service -Name "WinRM" -ErrorAction Stop
+ Write-ProgressLog "Started WinRM service."
+
+}
+
+# WinRM should be running; check that we have a PS session config.
+If (!(Get-PSSessionConfiguration -Verbose:$false) -or (!(Get-ChildItem WSMan:\localhost\Listener))) {
+ If ($SkipNetworkProfileCheck) {
+ Write-Verbose "Enabling PS Remoting without checking Network profile."
+ Enable-PSRemoting -SkipNetworkProfileCheck -Force -ErrorAction Stop
+ Write-ProgressLog "Enabled PS Remoting without checking Network profile."
+ }
+ Else {
+ Write-Verbose "Enabling PS Remoting."
+ Enable-PSRemoting -Force -ErrorAction Stop
+ Write-ProgressLog "Enabled PS Remoting."
+ }
+}
+Else {
+ Write-Verbose "PS Remoting is already enabled."
+}
+
+# Ensure LocalAccountTokenFilterPolicy is set to 1
+# https://github.com/ansible/ansible/issues/42978
+$token_path = "HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\Policies\System"
+$token_prop_name = "LocalAccountTokenFilterPolicy"
+$token_key = Get-Item -Path $token_path
+$token_value = $token_key.GetValue($token_prop_name, $null)
+if ($token_value -ne 1) {
+ Write-Verbose "Setting LocalAccountTOkenFilterPolicy to 1"
+ if ($null -ne $token_value) {
+ Remove-ItemProperty -Path $token_path -Name $token_prop_name
+ }
+ New-ItemProperty -Path $token_path -Name $token_prop_name -Value 1 -PropertyType DWORD > $null
+}
+
+# Make sure there is a SSL listener.
+$listeners = Get-ChildItem WSMan:\localhost\Listener
+If (!($listeners | Where-Object { $_.Keys -like "TRANSPORT=HTTPS" })) {
+ # We cannot use New-SelfSignedCertificate on 2012R2 and earlier
+ $thumbprint = New-LegacySelfSignedCert -SubjectName $SubjectName -ValidDays $CertValidityDays
+ Write-HostLog "Self-signed SSL certificate generated; thumbprint: $thumbprint"
+
+ # Create the hashtables of settings to be used.
+ $valueset = @{
+ Hostname = $SubjectName
+ CertificateThumbprint = $thumbprint
+ }
+
+ $selectorset = @{
+ Transport = "HTTPS"
+ Address = "*"
+ }
+
+ Write-Verbose "Enabling SSL listener."
+ New-WSManInstance -ResourceURI 'winrm/config/Listener' -SelectorSet $selectorset -ValueSet $valueset
+ Write-ProgressLog "Enabled SSL listener."
+}
+Else {
+ Write-Verbose "SSL listener is already active."
+
+ # Force a new SSL cert on Listener if the $ForceNewSSLCert
+ If ($ForceNewSSLCert) {
+
+ # We cannot use New-SelfSignedCertificate on 2012R2 and earlier
+ $thumbprint = New-LegacySelfSignedCert -SubjectName $SubjectName -ValidDays $CertValidityDays
+ Write-HostLog "Self-signed SSL certificate generated; thumbprint: $thumbprint"
+
+ $valueset = @{
+ CertificateThumbprint = $thumbprint
+ Hostname = $SubjectName
+ }
+
+ # Delete the listener for SSL
+ $selectorset = @{
+ Address = "*"
+ Transport = "HTTPS"
+ }
+ Remove-WSManInstance -ResourceURI 'winrm/config/Listener' -SelectorSet $selectorset
+
+ # Add new Listener with new SSL cert
+ New-WSManInstance -ResourceURI 'winrm/config/Listener' -SelectorSet $selectorset -ValueSet $valueset
+ }
+}
+
+# Check for basic authentication.
+$basicAuthSetting = Get-ChildItem WSMan:\localhost\Service\Auth | Where-Object { $_.Name -eq "Basic" }
+
+If ($DisableBasicAuth) {
+ If (($basicAuthSetting.Value) -eq $true) {
+ Write-Verbose "Disabling basic auth support."
+ Set-Item -Path "WSMan:\localhost\Service\Auth\Basic" -Value $false
+ Write-ProgressLog "Disabled basic auth support."
+ }
+ Else {
+ Write-Verbose "Basic auth is already disabled."
+ }
+}
+Else {
+ If (($basicAuthSetting.Value) -eq $false) {
+ Write-Verbose "Enabling basic auth support."
+ Set-Item -Path "WSMan:\localhost\Service\Auth\Basic" -Value $true
+ Write-ProgressLog "Enabled basic auth support."
+ }
+ Else {
+ Write-Verbose "Basic auth is already enabled."
+ }
+}
+
+# If EnableCredSSP if set to true
+If ($EnableCredSSP) {
+ # Check for CredSSP authentication
+ $credsspAuthSetting = Get-ChildItem WSMan:\localhost\Service\Auth | Where-Object { $_.Name -eq "CredSSP" }
+ If (($credsspAuthSetting.Value) -eq $false) {
+ Write-Verbose "Enabling CredSSP auth support."
+ Enable-WSManCredSSP -role server -Force
+ Write-ProgressLog "Enabled CredSSP auth support."
+ }
+}
+
+If ($GlobalHttpFirewallAccess) {
+ Enable-GlobalHttpFirewallAccess
+}
+
+# Configure firewall to allow WinRM HTTPS connections.
+$fwtest1 = netsh advfirewall firewall show rule name="Allow WinRM HTTPS"
+$fwtest2 = netsh advfirewall firewall show rule name="Allow WinRM HTTPS" profile=any
+If ($fwtest1.count -lt 5) {
+ Write-Verbose "Adding firewall rule to allow WinRM HTTPS."
+ netsh advfirewall firewall add rule profile=any name="Allow WinRM HTTPS" dir=in localport=5986 protocol=TCP action=allow
+ Write-ProgressLog "Added firewall rule to allow WinRM HTTPS."
+}
+ElseIf (($fwtest1.count -ge 5) -and ($fwtest2.count -lt 5)) {
+ Write-Verbose "Updating firewall rule to allow WinRM HTTPS for any profile."
+ netsh advfirewall firewall set rule name="Allow WinRM HTTPS" new profile=any
+ Write-ProgressLog "Updated firewall rule to allow WinRM HTTPS for any profile."
+}
+Else {
+ Write-Verbose "Firewall rule already exists to allow WinRM HTTPS."
+}
+
+# Test a remoting connection to localhost, which should work.
+$httpResult = Invoke-Command -ComputerName "localhost" -ScriptBlock { $using:env:COMPUTERNAME } -ErrorVariable httpError -ErrorAction SilentlyContinue
+$httpsOptions = New-PSSessionOption -SkipCACheck -SkipCNCheck -SkipRevocationCheck
+
+$httpsResult = New-PSSession -UseSSL -ComputerName "localhost" -SessionOption $httpsOptions -ErrorVariable httpsError -ErrorAction SilentlyContinue
+
+If ($httpResult -and $httpsResult) {
+ Write-Verbose "HTTP: Enabled | HTTPS: Enabled"
+}
+ElseIf ($httpsResult -and !$httpResult) {
+ Write-Verbose "HTTP: Disabled | HTTPS: Enabled"
+}
+ElseIf ($httpResult -and !$httpsResult) {
+ Write-Verbose "HTTP: Enabled | HTTPS: Disabled"
+}
+Else {
+ Write-ProgressLog "Unable to establish an HTTP or HTTPS remoting session."
+ Throw "Unable to establish an HTTP or HTTPS remoting session."
+}
+Write-VerboseLog "PS Remoting has been successfully configured for Ansible."
diff --git a/test/lib/ansible_test/_util/target/setup/bootstrap.sh b/test/lib/ansible_test/_util/target/setup/bootstrap.sh
index 65673da5..ea17dad3 100644
--- a/test/lib/ansible_test/_util/target/setup/bootstrap.sh
+++ b/test/lib/ansible_test/_util/target/setup/bootstrap.sh
@@ -53,7 +53,7 @@ install_pip() {
pip_bootstrap_url="https://ci-files.testing.ansible.com/ansible-test/get-pip-20.3.4.py"
;;
*)
- pip_bootstrap_url="https://ci-files.testing.ansible.com/ansible-test/get-pip-23.1.2.py"
+ pip_bootstrap_url="https://ci-files.testing.ansible.com/ansible-test/get-pip-21.3.1.py"
;;
esac
@@ -111,15 +111,6 @@ bootstrap_remote_alpine()
echo "Failed to install packages. Sleeping before trying again..."
sleep 10
done
-
- # Upgrade the `libexpat` package to ensure that an upgraded Python (`pyexpat`) continues to work.
- while true; do
- # shellcheck disable=SC2086
- apk upgrade -q libexpat \
- && break
- echo "Failed to upgrade libexpat. Sleeping before trying again..."
- sleep 10
- done
}
bootstrap_remote_fedora()
@@ -172,6 +163,8 @@ bootstrap_remote_freebsd()
# Declare platform/python version combinations which do not have supporting OS packages available.
# For these combinations ansible-test will use pip to install the requirements instead.
case "${platform_version}/${python_version}" in
+ "12.4/3.9")
+ ;;
*)
jinja2_pkg="" # not available
cryptography_pkg="" # not available
@@ -268,7 +261,7 @@ bootstrap_remote_rhel_8()
if [ "${python_version}" = "3.6" ]; then
py_pkg_prefix="python3"
else
- py_pkg_prefix="python${python_version}"
+ py_pkg_prefix="python${python_package_version}"
fi
packages="
@@ -276,14 +269,6 @@ bootstrap_remote_rhel_8()
${py_pkg_prefix}-devel
"
- # pip isn't included in the Python devel package under Python 3.11
- if [ "${python_version}" != "3.6" ]; then
- packages="
- ${packages}
- ${py_pkg_prefix}-pip
- "
- fi
-
# Jinja2 is not installed with an OS package since the provided version is too old.
# Instead, ansible-test will install it using pip.
if [ "${controller}" ]; then
@@ -293,19 +278,9 @@ bootstrap_remote_rhel_8()
"
fi
- # Python 3.11 isn't a module like the earlier versions
- if [ "${python_version}" = "3.6" ]; then
- while true; do
- # shellcheck disable=SC2086
- yum module install -q -y "python${python_package_version}" \
- && break
- echo "Failed to install packages. Sleeping before trying again..."
- sleep 10
- done
- fi
-
while true; do
# shellcheck disable=SC2086
+ yum module install -q -y "python${python_package_version}" && \
yum install -q -y ${packages} \
&& break
echo "Failed to install packages. Sleeping before trying again..."
@@ -317,34 +292,22 @@ bootstrap_remote_rhel_8()
bootstrap_remote_rhel_9()
{
- if [ "${python_version}" = "3.9" ]; then
- py_pkg_prefix="python3"
- else
- py_pkg_prefix="python${python_version}"
- fi
+ py_pkg_prefix="python3"
packages="
gcc
${py_pkg_prefix}-devel
"
- # pip is not included in the Python devel package under Python 3.11
- if [ "${python_version}" != "3.9" ]; then
- packages="
- ${packages}
- ${py_pkg_prefix}-pip
- "
- fi
-
# Jinja2 is not installed with an OS package since the provided version is too old.
# Instead, ansible-test will install it using pip.
- # packaging and resolvelib are missing for Python 3.11 (and possible later) so we just
- # skip them and let ansible-test install them from PyPI.
if [ "${controller}" ]; then
packages="
${packages}
${py_pkg_prefix}-cryptography
+ ${py_pkg_prefix}-packaging
${py_pkg_prefix}-pyyaml
+ ${py_pkg_prefix}-resolvelib
"
fi
@@ -424,6 +387,14 @@ bootstrap_remote_ubuntu()
echo "Failed to install packages. Sleeping before trying again..."
sleep 10
done
+
+ if [ "${controller}" ]; then
+ if [ "${platform_version}/${python_version}" = "20.04/3.9" ]; then
+ # Install pyyaml using pip so libyaml support is available on Python 3.9.
+ # The OS package install (which is installed by default) only has a .so file for Python 3.8.
+ pip_install "--upgrade pyyaml"
+ fi
+ fi
}
bootstrap_docker()
diff --git a/test/lib/ansible_test/_util/target/setup/quiet_pip.py b/test/lib/ansible_test/_util/target/setup/quiet_pip.py
index 171ff8f3..54f0f860 100644
--- a/test/lib/ansible_test/_util/target/setup/quiet_pip.py
+++ b/test/lib/ansible_test/_util/target/setup/quiet_pip.py
@@ -27,6 +27,10 @@ WARNING_MESSAGE_FILTERS = (
# pip 21.0 will drop support for Python 2.7 in January 2021.
# More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support
'DEPRECATION: Python 2.7 reached the end of its life ',
+
+ # DEPRECATION: Python 3.5 reached the end of its life on September 13th, 2020. Please upgrade your Python as Python 3.5 is no longer maintained.
+ # pip 21.0 will drop support for Python 3.5 in January 2021. pip 21.0 will remove support for this functionality.
+ 'DEPRECATION: Python 3.5 reached the end of its life ',
)
diff --git a/test/lib/ansible_test/config/cloud-config-aws.ini.template b/test/lib/ansible_test/config/cloud-config-aws.ini.template
index 503a14b3..88b9fea6 100644
--- a/test/lib/ansible_test/config/cloud-config-aws.ini.template
+++ b/test/lib/ansible_test/config/cloud-config-aws.ini.template
@@ -6,9 +6,7 @@
# 2) Using the automatically provisioned AWS credentials in ansible-test.
#
# If you do not want to use the automatically provisioned temporary AWS credentials,
-# fill in the @VAR placeholders below and save this file without the .template extension,
-# into the tests/integration directory of the collection you're testing.
-# If you need to omit optional fields like security_token, comment out that line.
+# fill in the @VAR placeholders below and save this file without the .template extension.
# This will cause ansible-test to use the given configuration instead of temporary credentials.
#
# NOTE: Automatic provisioning of AWS credentials requires an ansible-core-ci API key.
diff --git a/test/lib/ansible_test/config/cloud-config-azure.ini.template b/test/lib/ansible_test/config/cloud-config-azure.ini.template
index bf7cc022..766553d1 100644
--- a/test/lib/ansible_test/config/cloud-config-azure.ini.template
+++ b/test/lib/ansible_test/config/cloud-config-azure.ini.template
@@ -6,8 +6,7 @@
# 2) Using the automatically provisioned Azure credentials in ansible-test.
#
# If you do not want to use the automatically provisioned temporary Azure credentials,
-# fill in the values below and save this file without the .template extension,
-# into the tests/integration directory of the collection you're testing.
+# fill in the values below and save this file without the .template extension.
# This will cause ansible-test to use the given configuration instead of temporary credentials.
#
# NOTE: Automatic provisioning of Azure credentials requires an ansible-core-ci API key in ~/.ansible-core-ci.key
diff --git a/test/lib/ansible_test/config/cloud-config-cloudscale.ini.template b/test/lib/ansible_test/config/cloud-config-cloudscale.ini.template
index 8396e4c8..1c99e9b8 100644
--- a/test/lib/ansible_test/config/cloud-config-cloudscale.ini.template
+++ b/test/lib/ansible_test/config/cloud-config-cloudscale.ini.template
@@ -4,8 +4,6 @@
#
# 1) Running integration tests without using ansible-test.
#
-# Fill in the value below and save this file without the .template extension,
-# into the tests/integration directory of the collection you're testing.
[default]
cloudscale_api_token = @API_TOKEN
diff --git a/test/lib/ansible_test/config/cloud-config-cs.ini.template b/test/lib/ansible_test/config/cloud-config-cs.ini.template
index 0589fd5f..f8d8a915 100644
--- a/test/lib/ansible_test/config/cloud-config-cs.ini.template
+++ b/test/lib/ansible_test/config/cloud-config-cs.ini.template
@@ -6,8 +6,7 @@
# 2) Using the automatically provisioned cloudstack-sim docker container in ansible-test.
#
# If you do not want to use the automatically provided CloudStack simulator,
-# fill in the @VAR placeholders below and save this file without the .template extension,
-# into the tests/integration directory of the collection you're testing.
+# fill in the @VAR placeholders below and save this file without the .template extension.
# This will cause ansible-test to use the given configuration and not launch the simulator.
#
# It is recommended that you DO NOT use this template unless you cannot use the simulator.
diff --git a/test/lib/ansible_test/config/cloud-config-gcp.ini.template b/test/lib/ansible_test/config/cloud-config-gcp.ini.template
index 626063da..00a20971 100644
--- a/test/lib/ansible_test/config/cloud-config-gcp.ini.template
+++ b/test/lib/ansible_test/config/cloud-config-gcp.ini.template
@@ -6,8 +6,7 @@
# 2) Using the automatically provisioned cloudstack-sim docker container in ansible-test.
#
# If you do not want to use the automatically provided GCP simulator,
-# fill in the @VAR placeholders below and save this file without the .template extension,
-# into the tests/integration directory of the collection you're testing.
+# fill in the @VAR placeholders below and save this file without the .template extension.
# This will cause ansible-test to use the given configuration and not launch the simulator.
#
# It is recommended that you DO NOT use this template unless you cannot use the simulator.
diff --git a/test/lib/ansible_test/config/cloud-config-hcloud.ini.template b/test/lib/ansible_test/config/cloud-config-hcloud.ini.template
index 8fc7fa77..8db658db 100644
--- a/test/lib/ansible_test/config/cloud-config-hcloud.ini.template
+++ b/test/lib/ansible_test/config/cloud-config-hcloud.ini.template
@@ -6,8 +6,7 @@
# 2) Using the automatically provisioned Hetzner Cloud credentials in ansible-test.
#
# If you do not want to use the automatically provisioned temporary Hetzner Cloud credentials,
-# fill in the @VAR placeholders below and save this file without the .template extension,
-# into the tests/integration directory of the collection you're testing.
+# fill in the @VAR placeholders below and save this file without the .template extension.
# This will cause ansible-test to use the given configuration instead of temporary credentials.
#
# NOTE: Automatic provisioning of Hetzner Cloud credentials requires an ansible-core-ci API key.
diff --git a/test/lib/ansible_test/config/cloud-config-opennebula.ini.template b/test/lib/ansible_test/config/cloud-config-opennebula.ini.template
index f155d987..00c56db1 100644
--- a/test/lib/ansible_test/config/cloud-config-opennebula.ini.template
+++ b/test/lib/ansible_test/config/cloud-config-opennebula.ini.template
@@ -6,8 +6,7 @@
# 2) Running integration tests against previously recorded XMLRPC fixtures
#
# If you want to test against a Live OpenNebula platform,
-# fill in the values below and save this file without the .template extension,
-# into the tests/integration directory of the collection you're testing.
+# fill in the values below and save this file without the .template extension.
# This will cause ansible-test to use the given configuration.
#
# If you run with @FIXTURES enabled (true) then you can decide if you want to
@@ -18,4 +17,4 @@ opennebula_url: @URL
opennebula_username: @USERNAME
opennebula_password: @PASSWORD
opennebula_test_fixture: @FIXTURES
-opennebula_test_fixture_replay: @REPLAY
+opennebula_test_fixture_replay: @REPLAY \ No newline at end of file
diff --git a/test/lib/ansible_test/config/cloud-config-openshift.kubeconfig.template b/test/lib/ansible_test/config/cloud-config-openshift.kubeconfig.template
index 5c022cde..0a10f23b 100644
--- a/test/lib/ansible_test/config/cloud-config-openshift.kubeconfig.template
+++ b/test/lib/ansible_test/config/cloud-config-openshift.kubeconfig.template
@@ -6,8 +6,7 @@
# 2) Using the automatically provisioned openshift-origin docker container in ansible-test.
#
# If you do not want to use the automatically provided OpenShift container,
-# place your kubeconfig file next into the tests/integration directory of the collection you're testing,
-# with the same name is this file, but without the .template extension.
+# place your kubeconfig file next to this file, with the same name, but without the .template extension.
# This will cause ansible-test to use the given configuration and not launch the automatically provided container.
#
# It is recommended that you DO NOT use this template unless you cannot use the automatically provided container.
diff --git a/test/lib/ansible_test/config/cloud-config-scaleway.ini.template b/test/lib/ansible_test/config/cloud-config-scaleway.ini.template
index 63e4e48f..f10419e0 100644
--- a/test/lib/ansible_test/config/cloud-config-scaleway.ini.template
+++ b/test/lib/ansible_test/config/cloud-config-scaleway.ini.template
@@ -5,8 +5,7 @@
# 1) Running integration tests without using ansible-test.
#
# If you want to test against the Vultr public API,
-# fill in the values below and save this file without the .template extension,
-# into the tests/integration directory of the collection you're testing.
+# fill in the values below and save this file without the .template extension.
# This will cause ansible-test to use the given configuration.
[default]
diff --git a/test/lib/ansible_test/config/cloud-config-vcenter.ini.template b/test/lib/ansible_test/config/cloud-config-vcenter.ini.template
index 4e980137..eff8bf74 100644
--- a/test/lib/ansible_test/config/cloud-config-vcenter.ini.template
+++ b/test/lib/ansible_test/config/cloud-config-vcenter.ini.template
@@ -6,8 +6,7 @@
# 2) Using the automatically provisioned VMware credentials in ansible-test.
#
# If you do not want to use the automatically provisioned temporary VMware credentials,
-# fill in the @VAR placeholders below and save this file without the .template extension,
-# into the tests/integration directory of the collection you're testing.
+# fill in the @VAR placeholders below and save this file without the .template extension.
# This will cause ansible-test to use the given configuration instead of temporary credentials.
#
# NOTE: Automatic provisioning of VMware credentials requires an ansible-core-ci API key.
diff --git a/test/lib/ansible_test/config/cloud-config-vultr.ini.template b/test/lib/ansible_test/config/cloud-config-vultr.ini.template
index 4530c326..48b82108 100644
--- a/test/lib/ansible_test/config/cloud-config-vultr.ini.template
+++ b/test/lib/ansible_test/config/cloud-config-vultr.ini.template
@@ -5,8 +5,7 @@
# 1) Running integration tests without using ansible-test.
#
# If you want to test against the Vultr public API,
-# fill in the values below and save this file without the .template extension,
-# into the tests/integration directory of the collection you're testing.
+# fill in the values below and save this file without the .template extension.
# This will cause ansible-test to use the given configuration.
[default]
diff --git a/test/lib/ansible_test/config/inventory.networking.template b/test/lib/ansible_test/config/inventory.networking.template
index 40a9f207..a1545684 100644
--- a/test/lib/ansible_test/config/inventory.networking.template
+++ b/test/lib/ansible_test/config/inventory.networking.template
@@ -6,8 +6,7 @@
# 2) Using the `--platform` option to provision temporary network instances on EC2.
#
# If you do not want to use the automatically provisioned temporary network instances,
-# fill in the @VAR placeholders below and save this file without the .template extension,
-# into the tests/integration directory of the collection you're testing.
+# fill in the @VAR placeholders below and save this file without the .template extension.
#
# NOTE: Automatic provisioning of network instances on EC2 requires an ansible-core-ci API key.
diff --git a/test/lib/ansible_test/config/inventory.winrm.template b/test/lib/ansible_test/config/inventory.winrm.template
index 3238b22e..34bbee2d 100644
--- a/test/lib/ansible_test/config/inventory.winrm.template
+++ b/test/lib/ansible_test/config/inventory.winrm.template
@@ -6,8 +6,7 @@
# 1) Using the `--windows` option to provision temporary Windows instances on EC2.
#
# If you do not want to use the automatically provisioned temporary Windows instances,
-# fill in the @VAR placeholders below and save this file without the .template extension,
-# into the tests/integration directory of the collection you're testing.
+# fill in the @VAR placeholders below and save this file without the .template extension.
#
# NOTE: Automatic provisioning of Windows instances on EC2 requires an ansible-core-ci API key.
#
diff --git a/test/sanity/code-smell/ansible-requirements.py b/test/sanity/code-smell/ansible-requirements.py
index 25d4ec88..4d1a652f 100644
--- a/test/sanity/code-smell/ansible-requirements.py
+++ b/test/sanity/code-smell/ansible-requirements.py
@@ -1,6 +1,7 @@
from __future__ import annotations
import re
+import sys
def read_file(path):
diff --git a/test/sanity/code-smell/deprecated-config.requirements.in b/test/sanity/code-smell/deprecated-config.requirements.in
index 4e859bb8..859c4ee7 100644
--- a/test/sanity/code-smell/deprecated-config.requirements.in
+++ b/test/sanity/code-smell/deprecated-config.requirements.in
@@ -1,2 +1,2 @@
-jinja2
+jinja2 # ansible-core requirement
pyyaml
diff --git a/test/sanity/code-smell/deprecated-config.requirements.txt b/test/sanity/code-smell/deprecated-config.requirements.txt
index ae96cdf4..338e3f38 100644
--- a/test/sanity/code-smell/deprecated-config.requirements.txt
+++ b/test/sanity/code-smell/deprecated-config.requirements.txt
@@ -1,4 +1,6 @@
# edit "deprecated-config.requirements.in" and generate with: hacking/update-sanity-requirements.py --test deprecated-config
+# pre-build requirement: pyyaml == 6.0
+# pre-build constraint: Cython < 3.0
Jinja2==3.1.2
-MarkupSafe==2.1.3
-PyYAML==6.0.1
+MarkupSafe==2.1.1
+PyYAML==6.0
diff --git a/test/sanity/code-smell/obsolete-files.json b/test/sanity/code-smell/obsolete-files.json
index 3f69cdd6..02d39204 100644
--- a/test/sanity/code-smell/obsolete-files.json
+++ b/test/sanity/code-smell/obsolete-files.json
@@ -1,8 +1,6 @@
{
"include_symlinks": true,
"prefixes": [
- "docs/",
- "examples/",
"test/runner/",
"test/sanity/ansible-doc/",
"test/sanity/compile/",
diff --git a/test/sanity/code-smell/package-data.requirements.in b/test/sanity/code-smell/package-data.requirements.in
index 81b58bcf..3162feb6 100644
--- a/test/sanity/code-smell/package-data.requirements.in
+++ b/test/sanity/code-smell/package-data.requirements.in
@@ -1,8 +1,8 @@
build # required to build sdist
wheel # required to build wheel
jinja2
-pyyaml
-resolvelib < 1.1.0
-rstcheck < 6 # newer versions have too many dependencies
+pyyaml # ansible-core requirement
+resolvelib < 0.9.0
+rstcheck < 4 # match version used in other sanity tests
antsibull-changelog
-setuptools == 66.1.0 # minimum supported setuptools
+setuptools == 45.2.0 # minimum supported setuptools
diff --git a/test/sanity/code-smell/package-data.requirements.txt b/test/sanity/code-smell/package-data.requirements.txt
index ce0fb9cf..b66079d0 100644
--- a/test/sanity/code-smell/package-data.requirements.txt
+++ b/test/sanity/code-smell/package-data.requirements.txt
@@ -1,17 +1,18 @@
# edit "package-data.requirements.in" and generate with: hacking/update-sanity-requirements.py --test package-data
-antsibull-changelog==0.23.0
-build==1.0.3
-docutils==0.18.1
+# pre-build requirement: pyyaml == 6.0
+# pre-build constraint: Cython < 3.0
+antsibull-changelog==0.16.0
+build==0.10.0
+docutils==0.17.1
Jinja2==3.1.2
-MarkupSafe==2.1.3
-packaging==23.2
+MarkupSafe==2.1.1
+packaging==21.3
pyproject_hooks==1.0.0
-PyYAML==6.0.1
-resolvelib==1.0.1
-rstcheck==5.0.0
+pyparsing==3.0.9
+PyYAML==6.0
+resolvelib==0.8.1
+rstcheck==3.5.0
semantic-version==2.10.0
-setuptools==66.1.0
+setuptools==45.2.0
tomli==2.0.1
-types-docutils==0.18.3
-typing_extensions==4.8.0
-wheel==0.41.2
+wheel==0.41.0
diff --git a/test/sanity/code-smell/pymarkdown.config.json b/test/sanity/code-smell/pymarkdown.config.json
deleted file mode 100644
index afe83a35..00000000
--- a/test/sanity/code-smell/pymarkdown.config.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
- "plugins": {
- "line-length": {
- "line_length": 160,
- "code_block_line_length": 160
- },
- "first-line-heading": {
- "enabled": false
- }
- }
-}
diff --git a/test/sanity/code-smell/pymarkdown.json b/test/sanity/code-smell/pymarkdown.json
deleted file mode 100644
index 986848db..00000000
--- a/test/sanity/code-smell/pymarkdown.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "output": "path-line-column-code-message",
- "error_code": "ansible-test",
- "extensions": [
- ".md"
- ]
-}
diff --git a/test/sanity/code-smell/pymarkdown.py b/test/sanity/code-smell/pymarkdown.py
deleted file mode 100644
index 721c8937..00000000
--- a/test/sanity/code-smell/pymarkdown.py
+++ /dev/null
@@ -1,64 +0,0 @@
-"""Sanity test for Markdown files."""
-from __future__ import annotations
-
-import pathlib
-import re
-import subprocess
-import sys
-
-import typing as t
-
-
-def main() -> None:
- paths = sys.argv[1:] or sys.stdin.read().splitlines()
-
- cmd = [
- sys.executable,
- '-m', 'pymarkdown',
- '--config', pathlib.Path(__file__).parent / 'pymarkdown.config.json',
- '--strict-config',
- 'scan',
- ] + paths
-
- process = subprocess.run(
- cmd,
- stdin=subprocess.DEVNULL,
- capture_output=True,
- check=False,
- text=True,
- )
-
- if process.stderr:
- print(process.stderr.strip(), file=sys.stderr)
- sys.exit(1)
-
- if not (stdout := process.stdout.strip()):
- return
-
- pattern = re.compile(r'^(?P<path_line_column>[^:]*:[0-9]+:[0-9]+): (?P<code>[^:]*): (?P<message>.*) \((?P<aliases>.*)\)$')
- matches = parse_to_list_of_dict(pattern, stdout)
- results = [f"{match['path_line_column']}: {match['aliases'].split(', ')[0]}: {match['message']}" for match in matches]
-
- print('\n'.join(results))
-
-
-def parse_to_list_of_dict(pattern: re.Pattern, value: str) -> list[dict[str, t.Any]]:
- matched = []
- unmatched = []
-
- for line in value.splitlines():
- match = re.search(pattern, line)
-
- if match:
- matched.append(match.groupdict())
- else:
- unmatched.append(line)
-
- if unmatched:
- raise Exception('Pattern {pattern!r} did not match values:\n' + '\n'.join(unmatched))
-
- return matched
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/sanity/code-smell/pymarkdown.requirements.in b/test/sanity/code-smell/pymarkdown.requirements.in
deleted file mode 100644
index f0077713..00000000
--- a/test/sanity/code-smell/pymarkdown.requirements.in
+++ /dev/null
@@ -1 +0,0 @@
-pymarkdownlnt
diff --git a/test/sanity/code-smell/pymarkdown.requirements.txt b/test/sanity/code-smell/pymarkdown.requirements.txt
deleted file mode 100644
index f906e140..00000000
--- a/test/sanity/code-smell/pymarkdown.requirements.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-# edit "pymarkdown.requirements.in" and generate with: hacking/update-sanity-requirements.py --test pymarkdown
-application-properties==0.8.1
-Columnar==1.4.1
-pymarkdownlnt==0.9.13.4
-PyYAML==6.0.1
-tomli==2.0.1
-toolz==0.12.0
-typing_extensions==4.8.0
-wcwidth==0.2.8
diff --git a/test/sanity/code-smell/release-names.py b/test/sanity/code-smell/release-names.py
index cac3071d..81d90d81 100644
--- a/test/sanity/code-smell/release-names.py
+++ b/test/sanity/code-smell/release-names.py
@@ -22,7 +22,7 @@ Test that the release name is present in the list of used up release names
from __future__ import annotations
-import pathlib
+from yaml import safe_load
from ansible.release import __codename__
@@ -30,7 +30,8 @@ from ansible.release import __codename__
def main():
"""Entrypoint to the script"""
- releases = pathlib.Path('.github/RELEASE_NAMES.txt').read_text().splitlines()
+ with open('.github/RELEASE_NAMES.yml') as f:
+ releases = safe_load(f.read())
# Why this format? The file's sole purpose is to be read by a human when they need to know
# which release names have already been used. So:
@@ -40,7 +41,7 @@ def main():
if __codename__ == name:
break
else:
- print(f'.github/RELEASE_NAMES.txt: Current codename {__codename__!r} not present in the file')
+ print('.github/RELEASE_NAMES.yml: Current codename was not present in the file')
if __name__ == '__main__':
diff --git a/test/sanity/code-smell/release-names.requirements.in b/test/sanity/code-smell/release-names.requirements.in
new file mode 100644
index 00000000..c3726e8b
--- /dev/null
+++ b/test/sanity/code-smell/release-names.requirements.in
@@ -0,0 +1 @@
+pyyaml
diff --git a/test/sanity/code-smell/release-names.requirements.txt b/test/sanity/code-smell/release-names.requirements.txt
new file mode 100644
index 00000000..bb6a130c
--- /dev/null
+++ b/test/sanity/code-smell/release-names.requirements.txt
@@ -0,0 +1,4 @@
+# edit "release-names.requirements.in" and generate with: hacking/update-sanity-requirements.py --test release-names
+# pre-build requirement: pyyaml == 6.0
+# pre-build constraint: Cython < 3.0
+PyYAML==6.0
diff --git a/test/sanity/code-smell/test-constraints.py b/test/sanity/code-smell/test-constraints.py
index ac5bb4eb..df30fe12 100644
--- a/test/sanity/code-smell/test-constraints.py
+++ b/test/sanity/code-smell/test-constraints.py
@@ -65,6 +65,12 @@ def main():
# keeping constraints for tests other than sanity tests in one file helps avoid conflicts
print('%s:%d:%d: put the constraint (%s%s) in `%s`' % (path, lineno, 1, name, raw_constraints, constraints_path))
+ for name, requirements in frozen_sanity.items():
+ if len(set(req[3].group('constraints').strip() for req in requirements)) != 1:
+ for req in requirements:
+ print('%s:%d:%d: sanity constraint (%s) does not match others for package `%s`' % (
+ req[0], req[1], req[3].start('constraints') + 1, req[3].group('constraints'), name))
+
def check_ansible_test(path: str, requirements: list[tuple[int, str, re.Match]]) -> None:
sys.path.insert(0, str(pathlib.Path(__file__).parent.parent.parent.joinpath('lib')))
diff --git a/test/sanity/code-smell/update-bundled.requirements.txt b/test/sanity/code-smell/update-bundled.requirements.txt
index 53f1e434..d9785e7b 100644
--- a/test/sanity/code-smell/update-bundled.requirements.txt
+++ b/test/sanity/code-smell/update-bundled.requirements.txt
@@ -1,2 +1,3 @@
# edit "update-bundled.requirements.in" and generate with: hacking/update-sanity-requirements.py --test update-bundled
-packaging==23.2
+packaging==21.3
+pyparsing==3.0.9
diff --git a/test/sanity/ignore.txt b/test/sanity/ignore.txt
index c683fbe7..869522b1 100644
--- a/test/sanity/ignore.txt
+++ b/test/sanity/ignore.txt
@@ -1,21 +1,16 @@
+.azure-pipelines/scripts/publish-codecov.py replace-urlopen
lib/ansible/cli/scripts/ansible_connection_cli_stub.py shebang
lib/ansible/config/base.yml no-unwanted-files
+lib/ansible/executor/playbook_executor.py pylint:disallowed-name
lib/ansible/executor/powershell/async_watchdog.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/async_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/exec_wrapper.ps1 pslint:PSCustomUseLiteralPath
-lib/ansible/galaxy/collection/__init__.py mypy-3.10:attr-defined # inline ignore has no effect
-lib/ansible/galaxy/collection/__init__.py mypy-3.11:attr-defined # inline ignore has no effect
-lib/ansible/galaxy/collection/__init__.py mypy-3.12:attr-defined # inline ignore has no effect
-lib/ansible/galaxy/collection/gpg.py mypy-3.10:arg-type
-lib/ansible/galaxy/collection/gpg.py mypy-3.11:arg-type
-lib/ansible/galaxy/collection/gpg.py mypy-3.12:arg-type
-lib/ansible/parsing/yaml/constructor.py mypy-3.10:type-var # too many occurrences to ignore inline
-lib/ansible/parsing/yaml/constructor.py mypy-3.11:type-var # too many occurrences to ignore inline
-lib/ansible/parsing/yaml/constructor.py mypy-3.12:type-var # too many occurrences to ignore inline
+lib/ansible/executor/task_queue_manager.py pylint:disallowed-name
lib/ansible/keyword_desc.yml no-unwanted-files
lib/ansible/modules/apt.py validate-modules:parameter-invalid
lib/ansible/modules/apt_repository.py validate-modules:parameter-invalid
lib/ansible/modules/assemble.py validate-modules:nonexistent-parameter-documented
+lib/ansible/modules/async_status.py use-argspec-type-path
lib/ansible/modules/async_status.py validate-modules!skip
lib/ansible/modules/async_wrapper.py ansible-doc!skip # not an actual module
lib/ansible/modules/async_wrapper.py pylint:ansible-bad-function # ignore, required
@@ -26,48 +21,61 @@ lib/ansible/modules/command.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/command.py validate-modules:doc-missing-type
lib/ansible/modules/command.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/command.py validate-modules:undocumented-parameter
+lib/ansible/modules/copy.py pylint:disallowed-name
lib/ansible/modules/copy.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/copy.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/copy.py validate-modules:undocumented-parameter
+lib/ansible/modules/dnf.py validate-modules:doc-required-mismatch
lib/ansible/modules/dnf.py validate-modules:parameter-invalid
-lib/ansible/modules/dnf5.py validate-modules:parameter-invalid
lib/ansible/modules/file.py validate-modules:undocumented-parameter
lib/ansible/modules/find.py use-argspec-type-path # fix needed
+lib/ansible/modules/git.py pylint:disallowed-name
lib/ansible/modules/git.py use-argspec-type-path
+lib/ansible/modules/git.py validate-modules:doc-missing-type
lib/ansible/modules/git.py validate-modules:doc-required-mismatch
+lib/ansible/modules/iptables.py pylint:disallowed-name
lib/ansible/modules/lineinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/package_facts.py validate-modules:doc-choices-do-not-match-spec
+lib/ansible/modules/pip.py pylint:disallowed-name
lib/ansible/modules/replace.py validate-modules:nonexistent-parameter-documented
-lib/ansible/modules/replace.py pylint:used-before-assignment # false positive detection by pylint
lib/ansible/modules/service.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:use-run-command-not-popen
+lib/ansible/modules/stat.py validate-modules:doc-default-does-not-match-spec # get_md5 is undocumented
lib/ansible/modules/stat.py validate-modules:parameter-invalid
+lib/ansible/modules/stat.py validate-modules:parameter-type-not-in-doc
+lib/ansible/modules/stat.py validate-modules:undocumented-parameter
lib/ansible/modules/systemd_service.py validate-modules:parameter-invalid
+lib/ansible/modules/systemd_service.py validate-modules:return-syntax-error
+lib/ansible/modules/sysvinit.py validate-modules:return-syntax-error
lib/ansible/modules/uri.py validate-modules:doc-required-mismatch
lib/ansible/modules/user.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/user.py validate-modules:use-run-command-not-popen
+lib/ansible/modules/yum.py pylint:disallowed-name
lib/ansible/modules/yum.py validate-modules:parameter-invalid
-lib/ansible/module_utils/basic.py pylint:unused-import # deferring resolution to allow enabling the rule now
+lib/ansible/modules/yum_repository.py validate-modules:doc-default-does-not-match-spec
+lib/ansible/modules/yum_repository.py validate-modules:parameter-type-not-in-doc
+lib/ansible/modules/yum_repository.py validate-modules:undocumented-parameter
lib/ansible/module_utils/compat/_selectors2.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/compat/_selectors2.py metaclass-boilerplate # ignore bundled
+lib/ansible/module_utils/compat/_selectors2.py pylint:disallowed-name
lib/ansible/module_utils/compat/selinux.py import-2.7!skip # pass/fail depends on presence of libselinux.so
+lib/ansible/module_utils/compat/selinux.py import-3.5!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.8!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.9!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.10!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.11!skip # pass/fail depends on presence of libselinux.so
-lib/ansible/module_utils/compat/selinux.py import-3.12!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/distro/_distro.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py no-assert
-lib/ansible/module_utils/distro/_distro.py pep8!skip # bundled code we don't want to modify
-lib/ansible/module_utils/distro/_distro.py pylint:undefined-variable # ignore bundled
lib/ansible/module_utils/distro/_distro.py pylint:using-constant-test # bundled code we don't want to modify
+lib/ansible/module_utils/distro/_distro.py pep8!skip # bundled code we don't want to modify
lib/ansible/module_utils/distro/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/facts/__init__.py empty-init # breaks namespacing, deprecate and eventually remove
+lib/ansible/module_utils/facts/network/linux.py pylint:disallowed-name
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.ArgvParser.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSProvideCommentHelp # need to agree on best format for comment location
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSUseApprovedVerbs
@@ -85,23 +93,33 @@ lib/ansible/module_utils/six/__init__.py no-dict-iteritems
lib/ansible/module_utils/six/__init__.py no-dict-iterkeys
lib/ansible/module_utils/six/__init__.py no-dict-itervalues
lib/ansible/module_utils/six/__init__.py pylint:self-assigning-variable
-lib/ansible/module_utils/six/__init__.py pylint:trailing-comma-tuple
lib/ansible/module_utils/six/__init__.py replace-urlopen
+lib/ansible/module_utils/urls.py pylint:arguments-renamed
+lib/ansible/module_utils/urls.py pylint:disallowed-name
lib/ansible/module_utils/urls.py replace-urlopen
+lib/ansible/parsing/vault/__init__.py pylint:disallowed-name
lib/ansible/parsing/yaml/objects.py pylint:arguments-renamed
+lib/ansible/playbook/base.py pylint:disallowed-name
lib/ansible/playbook/collectionsearch.py required-and-default-attributes # https://github.com/ansible/ansible/issues/61460
+lib/ansible/playbook/helpers.py pylint:disallowed-name
+lib/ansible/playbook/playbook_include.py pylint:arguments-renamed
lib/ansible/playbook/role/include.py pylint:arguments-renamed
lib/ansible/plugins/action/normal.py action-plugin-docs # default action plugin for modules without a dedicated action plugin
lib/ansible/plugins/cache/base.py ansible-doc!skip # not a plugin, but a stub for backwards compatibility
lib/ansible/plugins/callback/__init__.py pylint:arguments-renamed
lib/ansible/plugins/inventory/advanced_host_list.py pylint:arguments-renamed
lib/ansible/plugins/inventory/host_list.py pylint:arguments-renamed
+lib/ansible/plugins/lookup/random_choice.py pylint:arguments-renamed
+lib/ansible/plugins/lookup/sequence.py pylint:disallowed-name
+lib/ansible/plugins/shell/cmd.py pylint:arguments-renamed
+lib/ansible/plugins/strategy/__init__.py pylint:disallowed-name
+lib/ansible/plugins/strategy/linear.py pylint:disallowed-name
lib/ansible/utils/collection_loader/_collection_finder.py pylint:deprecated-class
lib/ansible/utils/collection_loader/_collection_meta.py pylint:deprecated-class
+lib/ansible/vars/hostvars.py pylint:disallowed-name
test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-function # ignore, required for testing
test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import-from # ignore, required for testing
test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import # ignore, required for testing
-test/integration/targets/ansible-test-sanity/ansible_collections/ns/col/plugins/plugin_utils/check_pylint.py pylint:disallowed-name # ignore, required for testing
test/integration/targets/ansible-test-integration/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-units/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-units/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
@@ -114,10 +132,8 @@ test/integration/targets/collections_relative_imports/collection_root/ansible_co
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util2.py pylint:relative-beyond-top-level
test/integration/targets/fork_safe_stdio/vendored_pty.py pep8!skip # vendored code
test/integration/targets/gathering_facts/library/bogus_facts shebang
-test/integration/targets/gathering_facts/library/dummy1 shebang
test/integration/targets/gathering_facts/library/facts_one shebang
test/integration/targets/gathering_facts/library/facts_two shebang
-test/integration/targets/gathering_facts/library/slow shebang
test/integration/targets/incidental_win_reboot/templates/post_reboot.ps1 pslint!skip
test/integration/targets/json_cleanup/library/bad_json shebang
test/integration/targets/lookup_csvfile/files/crlf.csv line-endings
@@ -127,6 +143,11 @@ test/integration/targets/module_precedence/lib_with_extension/ping.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/a.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/ping.ini shebang
test/integration/targets/module_utils/library/test.py future-import-boilerplate # allow testing of Python 2.x implicit relative imports
+test/integration/targets/module_utils/module_utils/bar0/foo.py pylint:disallowed-name
+test/integration/targets/module_utils/module_utils/foo.py pylint:disallowed-name
+test/integration/targets/module_utils/module_utils/sub/bar/bar.py pylint:disallowed-name
+test/integration/targets/module_utils/module_utils/sub/bar/__init__.py pylint:disallowed-name
+test/integration/targets/module_utils/module_utils/yak/zebra/foo.py pylint:disallowed-name
test/integration/targets/old_style_modules_posix/library/helloworld.sh shebang
test/integration/targets/template/files/encoding_1252_utf-8.expected no-smart-quotes
test/integration/targets/template/files/encoding_1252_windows-1252.expected no-smart-quotes
@@ -144,9 +165,28 @@ test/integration/targets/win_script/files/test_script_removes_file.ps1 pslint:PS
test/integration/targets/win_script/files/test_script_with_args.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_with_splatting.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/lib/ansible_test/_data/requirements/sanity.pslint.ps1 pslint:PSCustomUseLiteralPath # Uses wildcards on purpose
+test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
+test/lib/ansible_test/_util/target/setup/requirements.py replace-urlopen
+test/support/integration/plugins/modules/timezone.py pylint:disallowed-name
+test/support/integration/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
+test/support/integration/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
+test/support/integration/plugins/module_utils/compat/ipaddress.py no-unicode-literals
+test/support/integration/plugins/module_utils/network/common/utils.py future-import-boilerplate
+test/support/integration/plugins/module_utils/network/common/utils.py metaclass-boilerplate
+test/support/integration/plugins/module_utils/network/common/utils.py pylint:use-a-generator
+test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py pylint:used-before-assignment
+test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/filter/network.py pylint:consider-using-dict-comprehension
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py no-unicode-literals
+test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py pep8:E203
+test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py pylint:unnecessary-comprehension
+test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py pylint:use-a-generator
+test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/cliconf/ios.py pylint:arguments-renamed
+test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py pep8:E501
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/cliconf/vyos.py pylint:arguments-renamed
+test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pep8:E231
+test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pylint:disallowed-name
+test/support/windows-integration/plugins/action/win_copy.py pylint:used-before-assignment
test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/module_utils/WebRequest.psm1 pslint!skip
test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_uri.ps1 pslint!skip
test/support/windows-integration/plugins/modules/async_status.ps1 pslint!skip
@@ -167,11 +207,19 @@ test/support/windows-integration/plugins/modules/win_user_right.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_user.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_wait_for.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_whoami.ps1 pslint!skip
+test/units/executor/test_play_iterator.py pylint:disallowed-name
+test/units/modules/test_apt.py pylint:disallowed-name
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-no-version
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-version
-test/units/module_utils/common/warnings/test_deprecate.py pylint:ansible-deprecated-no-version # testing Display.deprecated call without a version or date
-test/units/module_utils/common/warnings/test_deprecate.py pylint:ansible-deprecated-version # testing Deprecated version found in call to Display.deprecated or AnsibleModule.deprecate
+test/units/module_utils/basic/test_run_command.py pylint:disallowed-name
test/units/module_utils/urls/fixtures/multipart.txt line-endings # Fixture for HTTP tests that use CRLF
+test/units/module_utils/urls/test_fetch_url.py replace-urlopen
+test/units/module_utils/urls/test_gzip.py replace-urlopen
+test/units/module_utils/urls/test_Request.py replace-urlopen
+test/units/parsing/vault/test_vault.py pylint:disallowed-name
+test/units/playbook/role/test_role.py pylint:disallowed-name
+test/units/plugins/test_plugins.py pylint:disallowed-name
+test/units/template/test_templar.py pylint:disallowed-name
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py pylint:relative-beyond-top-level
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/modules/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/ansible/__init__.py empty-init # testing that collections don't need inits
@@ -179,26 +227,3 @@ test/units/utils/collection_loader/fixtures/collections_masked/ansible_collectio
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/test_collection_loader.py pylint:undefined-variable # magic runtime local var splatting
-.github/CONTRIBUTING.md pymarkdown:line-length
-hacking/backport/README.md pymarkdown:no-bare-urls
-hacking/ticket_stubs/bug_internal_api.md pymarkdown:no-bare-urls
-hacking/ticket_stubs/bug_wrong_repo.md pymarkdown:no-bare-urls
-hacking/ticket_stubs/collections.md pymarkdown:line-length
-hacking/ticket_stubs/collections.md pymarkdown:no-bare-urls
-hacking/ticket_stubs/guide_newbie_about_gh_and_contributing_to_ansible.md pymarkdown:no-bare-urls
-hacking/ticket_stubs/no_thanks.md pymarkdown:line-length
-hacking/ticket_stubs/no_thanks.md pymarkdown:no-bare-urls
-hacking/ticket_stubs/pr_duplicate.md pymarkdown:no-bare-urls
-hacking/ticket_stubs/pr_merged.md pymarkdown:no-bare-urls
-hacking/ticket_stubs/proposal.md pymarkdown:no-bare-urls
-hacking/ticket_stubs/question_not_bug.md pymarkdown:no-bare-urls
-hacking/ticket_stubs/resolved.md pymarkdown:no-bare-urls
-hacking/ticket_stubs/wider_discussion.md pymarkdown:no-bare-urls
-lib/ansible/galaxy/data/apb/README.md pymarkdown:line-length
-lib/ansible/galaxy/data/container/README.md pymarkdown:line-length
-lib/ansible/galaxy/data/default/role/README.md pymarkdown:line-length
-lib/ansible/galaxy/data/network/README.md pymarkdown:line-length
-README.md pymarkdown:line-length
-test/integration/targets/ansible-vault/invalid_format/README.md pymarkdown:no-bare-urls
-test/support/README.md pymarkdown:no-bare-urls
-test/units/cli/test_data/role_skeleton/README.md pymarkdown:line-length
diff --git a/test/support/README.md b/test/support/README.md
index d5244823..850bc921 100644
--- a/test/support/README.md
+++ b/test/support/README.md
@@ -1,4 +1,4 @@
-# IMPORTANT
+# IMPORTANT!
Files under this directory are not actual plugins and modules used by Ansible
and as such should **not be modified**. They are used for testing purposes
diff --git a/test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll2/__init__.py b/test/support/integration/plugins/module_utils/compat/__init__.py
index e69de29b..e69de29b 100644
--- a/test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll2/__init__.py
+++ b/test/support/integration/plugins/module_utils/compat/__init__.py
diff --git a/test/support/integration/plugins/module_utils/compat/ipaddress.py b/test/support/integration/plugins/module_utils/compat/ipaddress.py
new file mode 100644
index 00000000..c46ad72a
--- /dev/null
+++ b/test/support/integration/plugins/module_utils/compat/ipaddress.py
@@ -0,0 +1,2476 @@
+# -*- coding: utf-8 -*-
+
+# This code is part of Ansible, but is an independent component.
+# This particular file, and this file only, is based on
+# Lib/ipaddress.py of cpython
+# It is licensed under the PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+#
+# 1. This LICENSE AGREEMENT is between the Python Software Foundation
+# ("PSF"), and the Individual or Organization ("Licensee") accessing and
+# otherwise using this software ("Python") in source or binary form and
+# its associated documentation.
+#
+# 2. Subject to the terms and conditions of this License Agreement, PSF hereby
+# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+# analyze, test, perform and/or display publicly, prepare derivative works,
+# distribute, and otherwise use Python alone or in any derivative version,
+# provided, however, that PSF's License Agreement and PSF's notice of copyright,
+# i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+# 2011, 2012, 2013, 2014, 2015 Python Software Foundation; All Rights Reserved"
+# are retained in Python alone or in any derivative version prepared by Licensee.
+#
+# 3. In the event Licensee prepares a derivative work that is based on
+# or incorporates Python or any part thereof, and wants to make
+# the derivative work available to others as provided herein, then
+# Licensee hereby agrees to include in any such work a brief summary of
+# the changes made to Python.
+#
+# 4. PSF is making Python available to Licensee on an "AS IS"
+# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+# INFRINGE ANY THIRD PARTY RIGHTS.
+#
+# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+#
+# 6. This License Agreement will automatically terminate upon a material
+# breach of its terms and conditions.
+#
+# 7. Nothing in this License Agreement shall be deemed to create any
+# relationship of agency, partnership, or joint venture between PSF and
+# Licensee. This License Agreement does not grant permission to use PSF
+# trademarks or trade name in a trademark sense to endorse or promote
+# products or services of Licensee, or any third party.
+#
+# 8. By copying, installing or otherwise using Python, Licensee
+# agrees to be bound by the terms and conditions of this License
+# Agreement.
+
+# Copyright 2007 Google Inc.
+# Licensed to PSF under a Contributor Agreement.
+
+"""A fast, lightweight IPv4/IPv6 manipulation library in Python.
+
+This library is used to create/poke/manipulate IPv4 and IPv6 addresses
+and networks.
+
+"""
+
+from __future__ import unicode_literals
+
+
+import itertools
+import struct
+
+
+# The following makes it easier for us to script updates of the bundled code and is not part of
+# upstream
+_BUNDLED_METADATA = {"pypi_name": "ipaddress", "version": "1.0.22"}
+
+__version__ = '1.0.22'
+
+# Compatibility functions
+_compat_int_types = (int,)
+try:
+ _compat_int_types = (int, long)
+except NameError:
+ pass
+try:
+ _compat_str = unicode
+except NameError:
+ _compat_str = str
+ assert bytes != str
+if b'\0'[0] == 0: # Python 3 semantics
+ def _compat_bytes_to_byte_vals(byt):
+ return byt
+else:
+ def _compat_bytes_to_byte_vals(byt):
+ return [struct.unpack(b'!B', b)[0] for b in byt]
+try:
+ _compat_int_from_byte_vals = int.from_bytes
+except AttributeError:
+ def _compat_int_from_byte_vals(bytvals, endianess):
+ assert endianess == 'big'
+ res = 0
+ for bv in bytvals:
+ assert isinstance(bv, _compat_int_types)
+ res = (res << 8) + bv
+ return res
+
+
+def _compat_to_bytes(intval, length, endianess):
+ assert isinstance(intval, _compat_int_types)
+ assert endianess == 'big'
+ if length == 4:
+ if intval < 0 or intval >= 2 ** 32:
+ raise struct.error("integer out of range for 'I' format code")
+ return struct.pack(b'!I', intval)
+ elif length == 16:
+ if intval < 0 or intval >= 2 ** 128:
+ raise struct.error("integer out of range for 'QQ' format code")
+ return struct.pack(b'!QQ', intval >> 64, intval & 0xffffffffffffffff)
+ else:
+ raise NotImplementedError()
+
+
+if hasattr(int, 'bit_length'):
+ # Not int.bit_length , since that won't work in 2.7 where long exists
+ def _compat_bit_length(i):
+ return i.bit_length()
+else:
+ def _compat_bit_length(i):
+ for res in itertools.count():
+ if i >> res == 0:
+ return res
+
+
+def _compat_range(start, end, step=1):
+ assert step > 0
+ i = start
+ while i < end:
+ yield i
+ i += step
+
+
+class _TotalOrderingMixin(object):
+ __slots__ = ()
+
+ # Helper that derives the other comparison operations from
+ # __lt__ and __eq__
+ # We avoid functools.total_ordering because it doesn't handle
+ # NotImplemented correctly yet (http://bugs.python.org/issue10042)
+ def __eq__(self, other):
+ raise NotImplementedError
+
+ def __ne__(self, other):
+ equal = self.__eq__(other)
+ if equal is NotImplemented:
+ return NotImplemented
+ return not equal
+
+ def __lt__(self, other):
+ raise NotImplementedError
+
+ def __le__(self, other):
+ less = self.__lt__(other)
+ if less is NotImplemented or not less:
+ return self.__eq__(other)
+ return less
+
+ def __gt__(self, other):
+ less = self.__lt__(other)
+ if less is NotImplemented:
+ return NotImplemented
+ equal = self.__eq__(other)
+ if equal is NotImplemented:
+ return NotImplemented
+ return not (less or equal)
+
+ def __ge__(self, other):
+ less = self.__lt__(other)
+ if less is NotImplemented:
+ return NotImplemented
+ return not less
+
+
+IPV4LENGTH = 32
+IPV6LENGTH = 128
+
+
+class AddressValueError(ValueError):
+ """A Value Error related to the address."""
+
+
+class NetmaskValueError(ValueError):
+ """A Value Error related to the netmask."""
+
+
+def ip_address(address):
+ """Take an IP string/int and return an object of the correct type.
+
+ Args:
+ address: A string or integer, the IP address. Either IPv4 or
+ IPv6 addresses may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+
+ Returns:
+ An IPv4Address or IPv6Address object.
+
+ Raises:
+ ValueError: if the *address* passed isn't either a v4 or a v6
+ address
+
+ """
+ try:
+ return IPv4Address(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ try:
+ return IPv6Address(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ if isinstance(address, bytes):
+ raise AddressValueError(
+ '%r does not appear to be an IPv4 or IPv6 address. '
+ 'Did you pass in a bytes (str in Python 2) instead of'
+ ' a unicode object?' % address)
+
+ raise ValueError('%r does not appear to be an IPv4 or IPv6 address' %
+ address)
+
+
+def ip_network(address, strict=True):
+ """Take an IP string/int and return an object of the correct type.
+
+ Args:
+ address: A string or integer, the IP network. Either IPv4 or
+ IPv6 networks may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+
+ Returns:
+ An IPv4Network or IPv6Network object.
+
+ Raises:
+ ValueError: if the string passed isn't either a v4 or a v6
+ address. Or if the network has host bits set.
+
+ """
+ try:
+ return IPv4Network(address, strict)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ try:
+ return IPv6Network(address, strict)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ if isinstance(address, bytes):
+ raise AddressValueError(
+ '%r does not appear to be an IPv4 or IPv6 network. '
+ 'Did you pass in a bytes (str in Python 2) instead of'
+ ' a unicode object?' % address)
+
+ raise ValueError('%r does not appear to be an IPv4 or IPv6 network' %
+ address)
+
+
+def ip_interface(address):
+ """Take an IP string/int and return an object of the correct type.
+
+ Args:
+ address: A string or integer, the IP address. Either IPv4 or
+ IPv6 addresses may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+
+ Returns:
+ An IPv4Interface or IPv6Interface object.
+
+ Raises:
+ ValueError: if the string passed isn't either a v4 or a v6
+ address.
+
+ Notes:
+ The IPv?Interface classes describe an Address on a particular
+ Network, so they're basically a combination of both the Address
+ and Network classes.
+
+ """
+ try:
+ return IPv4Interface(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ try:
+ return IPv6Interface(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' %
+ address)
+
+
+def v4_int_to_packed(address):
+ """Represent an address as 4 packed bytes in network (big-endian) order.
+
+ Args:
+ address: An integer representation of an IPv4 IP address.
+
+ Returns:
+ The integer address packed as 4 bytes in network (big-endian) order.
+
+ Raises:
+ ValueError: If the integer is negative or too large to be an
+ IPv4 IP address.
+
+ """
+ try:
+ return _compat_to_bytes(address, 4, 'big')
+ except (struct.error, OverflowError):
+ raise ValueError("Address negative or too large for IPv4")
+
+
+def v6_int_to_packed(address):
+ """Represent an address as 16 packed bytes in network (big-endian) order.
+
+ Args:
+ address: An integer representation of an IPv6 IP address.
+
+ Returns:
+ The integer address packed as 16 bytes in network (big-endian) order.
+
+ """
+ try:
+ return _compat_to_bytes(address, 16, 'big')
+ except (struct.error, OverflowError):
+ raise ValueError("Address negative or too large for IPv6")
+
+
+def _split_optional_netmask(address):
+ """Helper to split the netmask and raise AddressValueError if needed"""
+ addr = _compat_str(address).split('/')
+ if len(addr) > 2:
+ raise AddressValueError("Only one '/' permitted in %r" % address)
+ return addr
+
+
+def _find_address_range(addresses):
+ """Find a sequence of sorted deduplicated IPv#Address.
+
+ Args:
+ addresses: a list of IPv#Address objects.
+
+ Yields:
+ A tuple containing the first and last IP addresses in the sequence.
+
+ """
+ it = iter(addresses)
+ first = last = next(it) # pylint: disable=stop-iteration-return
+ for ip in it:
+ if ip._ip != last._ip + 1:
+ yield first, last
+ first = ip
+ last = ip
+ yield first, last
+
+
+def _count_righthand_zero_bits(number, bits):
+ """Count the number of zero bits on the right hand side.
+
+ Args:
+ number: an integer.
+ bits: maximum number of bits to count.
+
+ Returns:
+ The number of zero bits on the right hand side of the number.
+
+ """
+ if number == 0:
+ return bits
+ return min(bits, _compat_bit_length(~number & (number - 1)))
+
+
+def summarize_address_range(first, last):
+ """Summarize a network range given the first and last IP addresses.
+
+ Example:
+ >>> list(summarize_address_range(IPv4Address('192.0.2.0'),
+ ... IPv4Address('192.0.2.130')))
+ ... #doctest: +NORMALIZE_WHITESPACE
+ [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'),
+ IPv4Network('192.0.2.130/32')]
+
+ Args:
+ first: the first IPv4Address or IPv6Address in the range.
+ last: the last IPv4Address or IPv6Address in the range.
+
+ Returns:
+ An iterator of the summarized IPv(4|6) network objects.
+
+ Raise:
+ TypeError:
+ If the first and last objects are not IP addresses.
+ If the first and last objects are not the same version.
+ ValueError:
+ If the last object is not greater than the first.
+ If the version of the first address is not 4 or 6.
+
+ """
+ if (not (isinstance(first, _BaseAddress) and
+ isinstance(last, _BaseAddress))):
+ raise TypeError('first and last must be IP addresses, not networks')
+ if first.version != last.version:
+ raise TypeError("%s and %s are not of the same version" % (
+ first, last))
+ if first > last:
+ raise ValueError('last IP address must be greater than first')
+
+ if first.version == 4:
+ ip = IPv4Network
+ elif first.version == 6:
+ ip = IPv6Network
+ else:
+ raise ValueError('unknown IP version')
+
+ ip_bits = first._max_prefixlen
+ first_int = first._ip
+ last_int = last._ip
+ while first_int <= last_int:
+ nbits = min(_count_righthand_zero_bits(first_int, ip_bits),
+ _compat_bit_length(last_int - first_int + 1) - 1)
+ net = ip((first_int, ip_bits - nbits))
+ yield net
+ first_int += 1 << nbits
+ if first_int - 1 == ip._ALL_ONES:
+ break
+
+
+def _collapse_addresses_internal(addresses):
+ """Loops through the addresses, collapsing concurrent netblocks.
+
+ Example:
+
+ ip1 = IPv4Network('192.0.2.0/26')
+ ip2 = IPv4Network('192.0.2.64/26')
+ ip3 = IPv4Network('192.0.2.128/26')
+ ip4 = IPv4Network('192.0.2.192/26')
+
+ _collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->
+ [IPv4Network('192.0.2.0/24')]
+
+ This shouldn't be called directly; it is called via
+ collapse_addresses([]).
+
+ Args:
+ addresses: A list of IPv4Network's or IPv6Network's
+
+ Returns:
+ A list of IPv4Network's or IPv6Network's depending on what we were
+ passed.
+
+ """
+ # First merge
+ to_merge = list(addresses)
+ subnets = {}
+ while to_merge:
+ net = to_merge.pop()
+ supernet = net.supernet()
+ existing = subnets.get(supernet)
+ if existing is None:
+ subnets[supernet] = net
+ elif existing != net:
+ # Merge consecutive subnets
+ del subnets[supernet]
+ to_merge.append(supernet)
+ # Then iterate over resulting networks, skipping subsumed subnets
+ last = None
+ for net in sorted(subnets.values()):
+ if last is not None:
+ # Since they are sorted,
+ # last.network_address <= net.network_address is a given.
+ if last.broadcast_address >= net.broadcast_address:
+ continue
+ yield net
+ last = net
+
+
+def collapse_addresses(addresses):
+ """Collapse a list of IP objects.
+
+ Example:
+ collapse_addresses([IPv4Network('192.0.2.0/25'),
+ IPv4Network('192.0.2.128/25')]) ->
+ [IPv4Network('192.0.2.0/24')]
+
+ Args:
+ addresses: An iterator of IPv4Network or IPv6Network objects.
+
+ Returns:
+ An iterator of the collapsed IPv(4|6)Network objects.
+
+ Raises:
+ TypeError: If passed a list of mixed version objects.
+
+ """
+ addrs = []
+ ips = []
+ nets = []
+
+ # split IP addresses and networks
+ for ip in addresses:
+ if isinstance(ip, _BaseAddress):
+ if ips and ips[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ ip, ips[-1]))
+ ips.append(ip)
+ elif ip._prefixlen == ip._max_prefixlen:
+ if ips and ips[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ ip, ips[-1]))
+ try:
+ ips.append(ip.ip)
+ except AttributeError:
+ ips.append(ip.network_address)
+ else:
+ if nets and nets[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ ip, nets[-1]))
+ nets.append(ip)
+
+ # sort and dedup
+ ips = sorted(set(ips))
+
+ # find consecutive address ranges in the sorted sequence and summarize them
+ if ips:
+ for first, last in _find_address_range(ips):
+ addrs.extend(summarize_address_range(first, last))
+
+ return _collapse_addresses_internal(addrs + nets)
+
+
+def get_mixed_type_key(obj):
+ """Return a key suitable for sorting between networks and addresses.
+
+ Address and Network objects are not sortable by default; they're
+ fundamentally different so the expression
+
+ IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24')
+
+ doesn't make any sense. There are some times however, where you may wish
+ to have ipaddress sort these for you anyway. If you need to do this, you
+ can use this function as the key= argument to sorted().
+
+ Args:
+ obj: either a Network or Address object.
+ Returns:
+ appropriate key.
+
+ """
+ if isinstance(obj, _BaseNetwork):
+ return obj._get_networks_key()
+ elif isinstance(obj, _BaseAddress):
+ return obj._get_address_key()
+ return NotImplemented
+
+
+class _IPAddressBase(_TotalOrderingMixin):
+
+ """The mother class."""
+
+ __slots__ = ()
+
+ @property
+ def exploded(self):
+ """Return the longhand version of the IP address as a string."""
+ return self._explode_shorthand_ip_string()
+
+ @property
+ def compressed(self):
+ """Return the shorthand version of the IP address as a string."""
+ return _compat_str(self)
+
+ @property
+ def reverse_pointer(self):
+ """The name of the reverse DNS pointer for the IP address, e.g.:
+ >>> ipaddress.ip_address("127.0.0.1").reverse_pointer
+ '1.0.0.127.in-addr.arpa'
+ >>> ipaddress.ip_address("2001:db8::1").reverse_pointer
+ '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa'
+
+ """
+ return self._reverse_pointer()
+
+ @property
+ def version(self):
+ msg = '%200s has no version specified' % (type(self),)
+ raise NotImplementedError(msg)
+
+ def _check_int_address(self, address):
+ if address < 0:
+ msg = "%d (< 0) is not permitted as an IPv%d address"
+ raise AddressValueError(msg % (address, self._version))
+ if address > self._ALL_ONES:
+ msg = "%d (>= 2**%d) is not permitted as an IPv%d address"
+ raise AddressValueError(msg % (address, self._max_prefixlen,
+ self._version))
+
+ def _check_packed_address(self, address, expected_len):
+ address_len = len(address)
+ if address_len != expected_len:
+ msg = (
+ '%r (len %d != %d) is not permitted as an IPv%d address. '
+ 'Did you pass in a bytes (str in Python 2) instead of'
+ ' a unicode object?')
+ raise AddressValueError(msg % (address, address_len,
+ expected_len, self._version))
+
+ @classmethod
+ def _ip_int_from_prefix(cls, prefixlen):
+ """Turn the prefix length into a bitwise netmask
+
+ Args:
+ prefixlen: An integer, the prefix length.
+
+ Returns:
+ An integer.
+
+ """
+ return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen)
+
+ @classmethod
+ def _prefix_from_ip_int(cls, ip_int):
+ """Return prefix length from the bitwise netmask.
+
+ Args:
+ ip_int: An integer, the netmask in expanded bitwise format
+
+ Returns:
+ An integer, the prefix length.
+
+ Raises:
+ ValueError: If the input intermingles zeroes & ones
+ """
+ trailing_zeroes = _count_righthand_zero_bits(ip_int,
+ cls._max_prefixlen)
+ prefixlen = cls._max_prefixlen - trailing_zeroes
+ leading_ones = ip_int >> trailing_zeroes
+ all_ones = (1 << prefixlen) - 1
+ if leading_ones != all_ones:
+ byteslen = cls._max_prefixlen // 8
+ details = _compat_to_bytes(ip_int, byteslen, 'big')
+ msg = 'Netmask pattern %r mixes zeroes & ones'
+ raise ValueError(msg % details)
+ return prefixlen
+
+ @classmethod
+ def _report_invalid_netmask(cls, netmask_str):
+ msg = '%r is not a valid netmask' % netmask_str
+ raise NetmaskValueError(msg)
+
+ @classmethod
+ def _prefix_from_prefix_string(cls, prefixlen_str):
+ """Return prefix length from a numeric string
+
+ Args:
+ prefixlen_str: The string to be converted
+
+ Returns:
+ An integer, the prefix length.
+
+ Raises:
+ NetmaskValueError: If the input is not a valid netmask
+ """
+ # int allows a leading +/- as well as surrounding whitespace,
+ # so we ensure that isn't the case
+ if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str):
+ cls._report_invalid_netmask(prefixlen_str)
+ try:
+ prefixlen = int(prefixlen_str)
+ except ValueError:
+ cls._report_invalid_netmask(prefixlen_str)
+ if not (0 <= prefixlen <= cls._max_prefixlen):
+ cls._report_invalid_netmask(prefixlen_str)
+ return prefixlen
+
+ @classmethod
+ def _prefix_from_ip_string(cls, ip_str):
+ """Turn a netmask/hostmask string into a prefix length
+
+ Args:
+ ip_str: The netmask/hostmask to be converted
+
+ Returns:
+ An integer, the prefix length.
+
+ Raises:
+ NetmaskValueError: If the input is not a valid netmask/hostmask
+ """
+ # Parse the netmask/hostmask like an IP address.
+ try:
+ ip_int = cls._ip_int_from_string(ip_str)
+ except AddressValueError:
+ cls._report_invalid_netmask(ip_str)
+
+ # Try matching a netmask (this would be /1*0*/ as a bitwise regexp).
+ # Note that the two ambiguous cases (all-ones and all-zeroes) are
+ # treated as netmasks.
+ try:
+ return cls._prefix_from_ip_int(ip_int)
+ except ValueError:
+ pass
+
+ # Invert the bits, and try matching a /0+1+/ hostmask instead.
+ ip_int ^= cls._ALL_ONES
+ try:
+ return cls._prefix_from_ip_int(ip_int)
+ except ValueError:
+ cls._report_invalid_netmask(ip_str)
+
+ def __reduce__(self):
+ return self.__class__, (_compat_str(self),)
+
+
+class _BaseAddress(_IPAddressBase):
+
+ """A generic IP object.
+
+ This IP class contains the version independent methods which are
+ used by single IP addresses.
+ """
+
+ __slots__ = ()
+
+ def __int__(self):
+ return self._ip
+
+ def __eq__(self, other):
+ try:
+ return (self._ip == other._ip and
+ self._version == other._version)
+ except AttributeError:
+ return NotImplemented
+
+ def __lt__(self, other):
+ if not isinstance(other, _IPAddressBase):
+ return NotImplemented
+ if not isinstance(other, _BaseAddress):
+ raise TypeError('%s and %s are not of the same type' % (
+ self, other))
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same version' % (
+ self, other))
+ if self._ip != other._ip:
+ return self._ip < other._ip
+ return False
+
+ # Shorthand for Integer addition and subtraction. This is not
+ # meant to ever support addition/subtraction of addresses.
+ def __add__(self, other):
+ if not isinstance(other, _compat_int_types):
+ return NotImplemented
+ return self.__class__(int(self) + other)
+
+ def __sub__(self, other):
+ if not isinstance(other, _compat_int_types):
+ return NotImplemented
+ return self.__class__(int(self) - other)
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
+
+ def __str__(self):
+ return _compat_str(self._string_from_ip_int(self._ip))
+
+ def __hash__(self):
+ return hash(hex(int(self._ip)))
+
+ def _get_address_key(self):
+ return (self._version, self)
+
+ def __reduce__(self):
+ return self.__class__, (self._ip,)
+
+
+class _BaseNetwork(_IPAddressBase):
+
+ """A generic IP network object.
+
+ This IP class contains the version independent methods which are
+ used by networks.
+
+ """
+ def __init__(self, address):
+ self._cache = {}
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
+
+ def __str__(self):
+ return '%s/%d' % (self.network_address, self.prefixlen)
+
+ def hosts(self):
+ """Generate Iterator over usable hosts in a network.
+
+ This is like __iter__ except it doesn't return the network
+ or broadcast addresses.
+
+ """
+ network = int(self.network_address)
+ broadcast = int(self.broadcast_address)
+ for x in _compat_range(network + 1, broadcast):
+ yield self._address_class(x)
+
+ def __iter__(self):
+ network = int(self.network_address)
+ broadcast = int(self.broadcast_address)
+ for x in _compat_range(network, broadcast + 1):
+ yield self._address_class(x)
+
+ def __getitem__(self, n):
+ network = int(self.network_address)
+ broadcast = int(self.broadcast_address)
+ if n >= 0:
+ if network + n > broadcast:
+ raise IndexError('address out of range')
+ return self._address_class(network + n)
+ else:
+ n += 1
+ if broadcast + n < network:
+ raise IndexError('address out of range')
+ return self._address_class(broadcast + n)
+
+ def __lt__(self, other):
+ if not isinstance(other, _IPAddressBase):
+ return NotImplemented
+ if not isinstance(other, _BaseNetwork):
+ raise TypeError('%s and %s are not of the same type' % (
+ self, other))
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same version' % (
+ self, other))
+ if self.network_address != other.network_address:
+ return self.network_address < other.network_address
+ if self.netmask != other.netmask:
+ return self.netmask < other.netmask
+ return False
+
+ def __eq__(self, other):
+ try:
+ return (self._version == other._version and
+ self.network_address == other.network_address and
+ int(self.netmask) == int(other.netmask))
+ except AttributeError:
+ return NotImplemented
+
+ def __hash__(self):
+ return hash(int(self.network_address) ^ int(self.netmask))
+
+ def __contains__(self, other):
+ # always false if one is v4 and the other is v6.
+ if self._version != other._version:
+ return False
+ # dealing with another network.
+ if isinstance(other, _BaseNetwork):
+ return False
+ # dealing with another address
+ else:
+ # address
+ return (int(self.network_address) <= int(other._ip) <=
+ int(self.broadcast_address))
+
+ def overlaps(self, other):
+ """Tell if self is partly contained in other."""
+ return self.network_address in other or (
+ self.broadcast_address in other or (
+ other.network_address in self or (
+ other.broadcast_address in self)))
+
+ @property
+ def broadcast_address(self):
+ x = self._cache.get('broadcast_address')
+ if x is None:
+ x = self._address_class(int(self.network_address) |
+ int(self.hostmask))
+ self._cache['broadcast_address'] = x
+ return x
+
+ @property
+ def hostmask(self):
+ x = self._cache.get('hostmask')
+ if x is None:
+ x = self._address_class(int(self.netmask) ^ self._ALL_ONES)
+ self._cache['hostmask'] = x
+ return x
+
+ @property
+ def with_prefixlen(self):
+ return '%s/%d' % (self.network_address, self._prefixlen)
+
+ @property
+ def with_netmask(self):
+ return '%s/%s' % (self.network_address, self.netmask)
+
+ @property
+ def with_hostmask(self):
+ return '%s/%s' % (self.network_address, self.hostmask)
+
+ @property
+ def num_addresses(self):
+ """Number of hosts in the current subnet."""
+ return int(self.broadcast_address) - int(self.network_address) + 1
+
+ @property
+ def _address_class(self):
+ # Returning bare address objects (rather than interfaces) allows for
+ # more consistent behaviour across the network address, broadcast
+ # address and individual host addresses.
+ msg = '%200s has no associated address class' % (type(self),)
+ raise NotImplementedError(msg)
+
+ @property
+ def prefixlen(self):
+ return self._prefixlen
+
+ def address_exclude(self, other):
+ """Remove an address from a larger block.
+
+ For example:
+
+ addr1 = ip_network('192.0.2.0/28')
+ addr2 = ip_network('192.0.2.1/32')
+ list(addr1.address_exclude(addr2)) =
+ [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'),
+ IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')]
+
+ or IPv6:
+
+ addr1 = ip_network('2001:db8::1/32')
+ addr2 = ip_network('2001:db8::1/128')
+ list(addr1.address_exclude(addr2)) =
+ [ip_network('2001:db8::1/128'),
+ ip_network('2001:db8::2/127'),
+ ip_network('2001:db8::4/126'),
+ ip_network('2001:db8::8/125'),
+ ...
+ ip_network('2001:db8:8000::/33')]
+
+ Args:
+ other: An IPv4Network or IPv6Network object of the same type.
+
+ Returns:
+ An iterator of the IPv(4|6)Network objects which is self
+ minus other.
+
+ Raises:
+ TypeError: If self and other are of differing address
+ versions, or if other is not a network object.
+ ValueError: If other is not completely contained by self.
+
+ """
+ if not self._version == other._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ self, other))
+
+ if not isinstance(other, _BaseNetwork):
+ raise TypeError("%s is not a network object" % other)
+
+ if not other.subnet_of(self):
+ raise ValueError('%s not contained in %s' % (other, self))
+ if other == self:
+ return
+
+ # Make sure we're comparing the network of other.
+ other = other.__class__('%s/%s' % (other.network_address,
+ other.prefixlen))
+
+ s1, s2 = self.subnets()
+ while s1 != other and s2 != other:
+ if other.subnet_of(s1):
+ yield s2
+ s1, s2 = s1.subnets()
+ elif other.subnet_of(s2):
+ yield s1
+ s1, s2 = s2.subnets()
+ else:
+ # If we got here, there's a bug somewhere.
+ raise AssertionError('Error performing exclusion: '
+ 's1: %s s2: %s other: %s' %
+ (s1, s2, other))
+ if s1 == other:
+ yield s2
+ elif s2 == other:
+ yield s1
+ else:
+ # If we got here, there's a bug somewhere.
+ raise AssertionError('Error performing exclusion: '
+ 's1: %s s2: %s other: %s' %
+ (s1, s2, other))
+
+ def compare_networks(self, other):
+ """Compare two IP objects.
+
+ This is only concerned about the comparison of the integer
+ representation of the network addresses. This means that the
+ host bits aren't considered at all in this method. If you want
+ to compare host bits, you can easily enough do a
+ 'HostA._ip < HostB._ip'
+
+ Args:
+ other: An IP object.
+
+ Returns:
+ If the IP versions of self and other are the same, returns:
+
+ -1 if self < other:
+ eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25')
+ IPv6Network('2001:db8::1000/124') <
+ IPv6Network('2001:db8::2000/124')
+ 0 if self == other
+ eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24')
+ IPv6Network('2001:db8::1000/124') ==
+ IPv6Network('2001:db8::1000/124')
+ 1 if self > other
+ eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25')
+ IPv6Network('2001:db8::2000/124') >
+ IPv6Network('2001:db8::1000/124')
+
+ Raises:
+ TypeError if the IP versions are different.
+
+ """
+ # does this need to raise a ValueError?
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same type' % (
+ self, other))
+ # self._version == other._version below here:
+ if self.network_address < other.network_address:
+ return -1
+ if self.network_address > other.network_address:
+ return 1
+ # self.network_address == other.network_address below here:
+ if self.netmask < other.netmask:
+ return -1
+ if self.netmask > other.netmask:
+ return 1
+ return 0
+
+ def _get_networks_key(self):
+ """Network-only key function.
+
+ Returns an object that identifies this address' network and
+ netmask. This function is a suitable "key" argument for sorted()
+ and list.sort().
+
+ """
+ return (self._version, self.network_address, self.netmask)
+
+ def subnets(self, prefixlen_diff=1, new_prefix=None):
+ """The subnets which join to make the current subnet.
+
+ In the case that self contains only one IP
+ (self._prefixlen == 32 for IPv4 or self._prefixlen == 128
+ for IPv6), yield an iterator with just ourself.
+
+ Args:
+ prefixlen_diff: An integer, the amount the prefix length
+ should be increased by. This should not be set if
+ new_prefix is also set.
+ new_prefix: The desired new prefix length. This must be a
+ larger number (smaller prefix) than the existing prefix.
+ This should not be set if prefixlen_diff is also set.
+
+ Returns:
+ An iterator of IPv(4|6) objects.
+
+ Raises:
+ ValueError: The prefixlen_diff is too small or too large.
+ OR
+ prefixlen_diff and new_prefix are both set or new_prefix
+ is a smaller number than the current prefix (smaller
+ number means a larger network)
+
+ """
+ if self._prefixlen == self._max_prefixlen:
+ yield self
+ return
+
+ if new_prefix is not None:
+ if new_prefix < self._prefixlen:
+ raise ValueError('new prefix must be longer')
+ if prefixlen_diff != 1:
+ raise ValueError('cannot set prefixlen_diff and new_prefix')
+ prefixlen_diff = new_prefix - self._prefixlen
+
+ if prefixlen_diff < 0:
+ raise ValueError('prefix length diff must be > 0')
+ new_prefixlen = self._prefixlen + prefixlen_diff
+
+ if new_prefixlen > self._max_prefixlen:
+ raise ValueError(
+ 'prefix length diff %d is invalid for netblock %s' % (
+ new_prefixlen, self))
+
+ start = int(self.network_address)
+ end = int(self.broadcast_address) + 1
+ step = (int(self.hostmask) + 1) >> prefixlen_diff
+ for new_addr in _compat_range(start, end, step):
+ current = self.__class__((new_addr, new_prefixlen))
+ yield current
+
+ def supernet(self, prefixlen_diff=1, new_prefix=None):
+ """The supernet containing the current network.
+
+ Args:
+ prefixlen_diff: An integer, the amount the prefix length of
+ the network should be decreased by. For example, given a
+ /24 network and a prefixlen_diff of 3, a supernet with a
+ /21 netmask is returned.
+
+ Returns:
+ An IPv4 network object.
+
+ Raises:
+ ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have
+ a negative prefix length.
+ OR
+ If prefixlen_diff and new_prefix are both set or new_prefix is a
+ larger number than the current prefix (larger number means a
+ smaller network)
+
+ """
+ if self._prefixlen == 0:
+ return self
+
+ if new_prefix is not None:
+ if new_prefix > self._prefixlen:
+ raise ValueError('new prefix must be shorter')
+ if prefixlen_diff != 1:
+ raise ValueError('cannot set prefixlen_diff and new_prefix')
+ prefixlen_diff = self._prefixlen - new_prefix
+
+ new_prefixlen = self.prefixlen - prefixlen_diff
+ if new_prefixlen < 0:
+ raise ValueError(
+ 'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
+ (self.prefixlen, prefixlen_diff))
+ return self.__class__((
+ int(self.network_address) & (int(self.netmask) << prefixlen_diff),
+ new_prefixlen))
+
+ @property
+ def is_multicast(self):
+ """Test if the address is reserved for multicast use.
+
+ Returns:
+ A boolean, True if the address is a multicast address.
+ See RFC 2373 2.7 for details.
+
+ """
+ return (self.network_address.is_multicast and
+ self.broadcast_address.is_multicast)
+
+ @staticmethod
+ def _is_subnet_of(a, b):
+ try:
+ # Always false if one is v4 and the other is v6.
+ if a._version != b._version:
+ raise TypeError("%s and %s are not of the same version" % (a, b))
+ return (b.network_address <= a.network_address and
+ b.broadcast_address >= a.broadcast_address)
+ except AttributeError:
+ raise TypeError("Unable to test subnet containment "
+ "between %s and %s" % (a, b))
+
+ def subnet_of(self, other):
+ """Return True if this network is a subnet of other."""
+ return self._is_subnet_of(self, other)
+
+ def supernet_of(self, other):
+ """Return True if this network is a supernet of other."""
+ return self._is_subnet_of(other, self)
+
+ @property
+ def is_reserved(self):
+ """Test if the address is otherwise IETF reserved.
+
+ Returns:
+ A boolean, True if the address is within one of the
+ reserved IPv6 Network ranges.
+
+ """
+ return (self.network_address.is_reserved and
+ self.broadcast_address.is_reserved)
+
+ @property
+ def is_link_local(self):
+ """Test if the address is reserved for link-local.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 4291.
+
+ """
+ return (self.network_address.is_link_local and
+ self.broadcast_address.is_link_local)
+
+ @property
+ def is_private(self):
+ """Test if this address is allocated for private networks.
+
+ Returns:
+ A boolean, True if the address is reserved per
+ iana-ipv4-special-registry or iana-ipv6-special-registry.
+
+ """
+ return (self.network_address.is_private and
+ self.broadcast_address.is_private)
+
+ @property
+ def is_global(self):
+ """Test if this address is allocated for public networks.
+
+ Returns:
+ A boolean, True if the address is not reserved per
+ iana-ipv4-special-registry or iana-ipv6-special-registry.
+
+ """
+ return not self.is_private
+
+ @property
+ def is_unspecified(self):
+ """Test if the address is unspecified.
+
+ Returns:
+ A boolean, True if this is the unspecified address as defined in
+ RFC 2373 2.5.2.
+
+ """
+ return (self.network_address.is_unspecified and
+ self.broadcast_address.is_unspecified)
+
+ @property
+ def is_loopback(self):
+ """Test if the address is a loopback address.
+
+ Returns:
+ A boolean, True if the address is a loopback address as defined in
+ RFC 2373 2.5.3.
+
+ """
+ return (self.network_address.is_loopback and
+ self.broadcast_address.is_loopback)
+
+
+class _BaseV4(object):
+
+ """Base IPv4 object.
+
+ The following methods are used by IPv4 objects in both single IP
+ addresses and networks.
+
+ """
+
+ __slots__ = ()
+ _version = 4
+ # Equivalent to 255.255.255.255 or 32 bits of 1's.
+ _ALL_ONES = (2 ** IPV4LENGTH) - 1
+ _DECIMAL_DIGITS = frozenset('0123456789')
+
+ # the valid octets for host and netmasks. only useful for IPv4.
+ _valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0])
+
+ _max_prefixlen = IPV4LENGTH
+ # There are only a handful of valid v4 netmasks, so we cache them all
+ # when constructed (see _make_netmask()).
+ _netmask_cache = {}
+
+ def _explode_shorthand_ip_string(self):
+ return _compat_str(self)
+
+ @classmethod
+ def _make_netmask(cls, arg):
+ """Make a (netmask, prefix_len) tuple from the given argument.
+
+ Argument can be:
+ - an integer (the prefix length)
+ - a string representing the prefix length (e.g. "24")
+ - a string representing the prefix netmask (e.g. "255.255.255.0")
+ """
+ if arg not in cls._netmask_cache:
+ if isinstance(arg, _compat_int_types):
+ prefixlen = arg
+ else:
+ try:
+ # Check for a netmask in prefix length form
+ prefixlen = cls._prefix_from_prefix_string(arg)
+ except NetmaskValueError:
+ # Check for a netmask or hostmask in dotted-quad form.
+ # This may raise NetmaskValueError.
+ prefixlen = cls._prefix_from_ip_string(arg)
+ netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen))
+ cls._netmask_cache[arg] = netmask, prefixlen
+ return cls._netmask_cache[arg]
+
+ @classmethod
+ def _ip_int_from_string(cls, ip_str):
+ """Turn the given IP string into an integer for comparison.
+
+ Args:
+ ip_str: A string, the IP ip_str.
+
+ Returns:
+ The IP ip_str as an integer.
+
+ Raises:
+ AddressValueError: if ip_str isn't a valid IPv4 Address.
+
+ """
+ if not ip_str:
+ raise AddressValueError('Address cannot be empty')
+
+ octets = ip_str.split('.')
+ if len(octets) != 4:
+ raise AddressValueError("Expected 4 octets in %r" % ip_str)
+
+ try:
+ return _compat_int_from_byte_vals(
+ map(cls._parse_octet, octets), 'big')
+ except ValueError as exc:
+ raise AddressValueError("%s in %r" % (exc, ip_str))
+
+ @classmethod
+ def _parse_octet(cls, octet_str):
+ """Convert a decimal octet into an integer.
+
+ Args:
+ octet_str: A string, the number to parse.
+
+ Returns:
+ The octet as an integer.
+
+ Raises:
+ ValueError: if the octet isn't strictly a decimal from [0..255].
+
+ """
+ if not octet_str:
+ raise ValueError("Empty octet not permitted")
+ # Whitelist the characters, since int() allows a lot of bizarre stuff.
+ if not cls._DECIMAL_DIGITS.issuperset(octet_str):
+ msg = "Only decimal digits permitted in %r"
+ raise ValueError(msg % octet_str)
+ # We do the length check second, since the invalid character error
+ # is likely to be more informative for the user
+ if len(octet_str) > 3:
+ msg = "At most 3 characters permitted in %r"
+ raise ValueError(msg % octet_str)
+ # Convert to integer (we know digits are legal)
+ octet_int = int(octet_str, 10)
+ # Any octets that look like they *might* be written in octal,
+ # and which don't look exactly the same in both octal and
+ # decimal are rejected as ambiguous
+ if octet_int > 7 and octet_str[0] == '0':
+ msg = "Ambiguous (octal/decimal) value in %r not permitted"
+ raise ValueError(msg % octet_str)
+ if octet_int > 255:
+ raise ValueError("Octet %d (> 255) not permitted" % octet_int)
+ return octet_int
+
+ @classmethod
+ def _string_from_ip_int(cls, ip_int):
+ """Turns a 32-bit integer into dotted decimal notation.
+
+ Args:
+ ip_int: An integer, the IP address.
+
+ Returns:
+ The IP address as a string in dotted decimal notation.
+
+ """
+ return '.'.join(_compat_str(struct.unpack(b'!B', b)[0]
+ if isinstance(b, bytes)
+ else b)
+ for b in _compat_to_bytes(ip_int, 4, 'big'))
+
+ def _is_hostmask(self, ip_str):
+ """Test if the IP string is a hostmask (rather than a netmask).
+
+ Args:
+ ip_str: A string, the potential hostmask.
+
+ Returns:
+ A boolean, True if the IP string is a hostmask.
+
+ """
+ bits = ip_str.split('.')
+ try:
+ parts = [x for x in map(int, bits) if x in self._valid_mask_octets]
+ except ValueError:
+ return False
+ if len(parts) != len(bits):
+ return False
+ if parts[0] < parts[-1]:
+ return True
+ return False
+
+ def _reverse_pointer(self):
+ """Return the reverse DNS pointer name for the IPv4 address.
+
+ This implements the method described in RFC1035 3.5.
+
+ """
+ reverse_octets = _compat_str(self).split('.')[::-1]
+ return '.'.join(reverse_octets) + '.in-addr.arpa'
+
+ @property
+ def max_prefixlen(self):
+ return self._max_prefixlen
+
+ @property
+ def version(self):
+ return self._version
+
+
+class IPv4Address(_BaseV4, _BaseAddress):
+
+ """Represent and manipulate single IPv4 Addresses."""
+
+ __slots__ = ('_ip', '__weakref__')
+
+ def __init__(self, address):
+
+ """
+ Args:
+ address: A string or integer representing the IP
+
+ Additionally, an integer can be passed, so
+ IPv4Address('192.0.2.1') == IPv4Address(3221225985).
+ or, more generally
+ IPv4Address(int(IPv4Address('192.0.2.1'))) ==
+ IPv4Address('192.0.2.1')
+
+ Raises:
+ AddressValueError: If ipaddress isn't a valid IPv4 address.
+
+ """
+ # Efficient constructor from integer.
+ if isinstance(address, _compat_int_types):
+ self._check_int_address(address)
+ self._ip = address
+ return
+
+ # Constructing from a packed address
+ if isinstance(address, bytes):
+ self._check_packed_address(address, 4)
+ bvs = _compat_bytes_to_byte_vals(address)
+ self._ip = _compat_int_from_byte_vals(bvs, 'big')
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP string.
+ addr_str = _compat_str(address)
+ if '/' in addr_str:
+ raise AddressValueError("Unexpected '/' in %r" % address)
+ self._ip = self._ip_int_from_string(addr_str)
+
+ @property
+ def packed(self):
+ """The binary representation of this address."""
+ return v4_int_to_packed(self._ip)
+
+ @property
+ def is_reserved(self):
+ """Test if the address is otherwise IETF reserved.
+
+ Returns:
+ A boolean, True if the address is within the
+ reserved IPv4 Network range.
+
+ """
+ return self in self._constants._reserved_network
+
+ @property
+ def is_private(self):
+ """Test if this address is allocated for private networks.
+
+ Returns:
+ A boolean, True if the address is reserved per
+ iana-ipv4-special-registry.
+
+ """
+ return any(self in net for net in self._constants._private_networks)
+
+ @property
+ def is_global(self):
+ return (
+ self not in self._constants._public_network and
+ not self.is_private)
+
+ @property
+ def is_multicast(self):
+ """Test if the address is reserved for multicast use.
+
+ Returns:
+ A boolean, True if the address is multicast.
+ See RFC 3171 for details.
+
+ """
+ return self in self._constants._multicast_network
+
+ @property
+ def is_unspecified(self):
+ """Test if the address is unspecified.
+
+ Returns:
+ A boolean, True if this is the unspecified address as defined in
+ RFC 5735 3.
+
+ """
+ return self == self._constants._unspecified_address
+
+ @property
+ def is_loopback(self):
+ """Test if the address is a loopback address.
+
+ Returns:
+ A boolean, True if the address is a loopback per RFC 3330.
+
+ """
+ return self in self._constants._loopback_network
+
+ @property
+ def is_link_local(self):
+ """Test if the address is reserved for link-local.
+
+ Returns:
+ A boolean, True if the address is link-local per RFC 3927.
+
+ """
+ return self in self._constants._linklocal_network
+
+
+class IPv4Interface(IPv4Address):
+
+ def __init__(self, address):
+ if isinstance(address, (bytes, _compat_int_types)):
+ IPv4Address.__init__(self, address)
+ self.network = IPv4Network(self._ip)
+ self._prefixlen = self._max_prefixlen
+ return
+
+ if isinstance(address, tuple):
+ IPv4Address.__init__(self, address[0])
+ if len(address) > 1:
+ self._prefixlen = int(address[1])
+ else:
+ self._prefixlen = self._max_prefixlen
+
+ self.network = IPv4Network(address, strict=False)
+ self.netmask = self.network.netmask
+ self.hostmask = self.network.hostmask
+ return
+
+ addr = _split_optional_netmask(address)
+ IPv4Address.__init__(self, addr[0])
+
+ self.network = IPv4Network(address, strict=False)
+ self._prefixlen = self.network._prefixlen
+
+ self.netmask = self.network.netmask
+ self.hostmask = self.network.hostmask
+
+ def __str__(self):
+ return '%s/%d' % (self._string_from_ip_int(self._ip),
+ self.network.prefixlen)
+
+ def __eq__(self, other):
+ address_equal = IPv4Address.__eq__(self, other)
+ if not address_equal or address_equal is NotImplemented:
+ return address_equal
+ try:
+ return self.network == other.network
+ except AttributeError:
+ # An interface with an associated network is NOT the
+ # same as an unassociated address. That's why the hash
+ # takes the extra info into account.
+ return False
+
+ def __lt__(self, other):
+ address_less = IPv4Address.__lt__(self, other)
+ if address_less is NotImplemented:
+ return NotImplemented
+ try:
+ return (self.network < other.network or
+ self.network == other.network and address_less)
+ except AttributeError:
+ # We *do* allow addresses and interfaces to be sorted. The
+ # unassociated address is considered less than all interfaces.
+ return False
+
+ def __hash__(self):
+ return self._ip ^ self._prefixlen ^ int(self.network.network_address)
+
+ __reduce__ = _IPAddressBase.__reduce__
+
+ @property
+ def ip(self):
+ return IPv4Address(self._ip)
+
+ @property
+ def with_prefixlen(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self._prefixlen)
+
+ @property
+ def with_netmask(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self.netmask)
+
+ @property
+ def with_hostmask(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self.hostmask)
+
+
+class IPv4Network(_BaseV4, _BaseNetwork):
+
+ """This class represents and manipulates 32-bit IPv4 network + addresses..
+
+ Attributes: [examples for IPv4Network('192.0.2.0/27')]
+ .network_address: IPv4Address('192.0.2.0')
+ .hostmask: IPv4Address('0.0.0.31')
+ .broadcast_address: IPv4Address('192.0.2.32')
+ .netmask: IPv4Address('255.255.255.224')
+ .prefixlen: 27
+
+ """
+ # Class to use when creating address objects
+ _address_class = IPv4Address
+
+ def __init__(self, address, strict=True):
+
+ """Instantiate a new IPv4 network object.
+
+ Args:
+ address: A string or integer representing the IP [& network].
+ '192.0.2.0/24'
+ '192.0.2.0/255.255.255.0'
+ '192.0.0.2/0.0.0.255'
+ are all functionally the same in IPv4. Similarly,
+ '192.0.2.1'
+ '192.0.2.1/255.255.255.255'
+ '192.0.2.1/32'
+ are also functionally equivalent. That is to say, failing to
+ provide a subnetmask will create an object with a mask of /32.
+
+ If the mask (portion after the / in the argument) is given in
+ dotted quad form, it is treated as a netmask if it starts with a
+ non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
+ starts with a zero field (e.g. 0.255.255.255 == /8), with the
+ single exception of an all-zero mask which is treated as a
+ netmask == /0. If no mask is given, a default of /32 is used.
+
+ Additionally, an integer can be passed, so
+ IPv4Network('192.0.2.1') == IPv4Network(3221225985)
+ or, more generally
+ IPv4Interface(int(IPv4Interface('192.0.2.1'))) ==
+ IPv4Interface('192.0.2.1')
+
+ Raises:
+ AddressValueError: If ipaddress isn't a valid IPv4 address.
+ NetmaskValueError: If the netmask isn't valid for
+ an IPv4 address.
+ ValueError: If strict is True and a network address is not
+ supplied.
+
+ """
+ _BaseNetwork.__init__(self, address)
+
+ # Constructing from a packed address or integer
+ if isinstance(address, (_compat_int_types, bytes)):
+ self.network_address = IPv4Address(address)
+ self.netmask, self._prefixlen = self._make_netmask(
+ self._max_prefixlen)
+ # fixme: address/network test here.
+ return
+
+ if isinstance(address, tuple):
+ if len(address) > 1:
+ arg = address[1]
+ else:
+ # We weren't given an address[1]
+ arg = self._max_prefixlen
+ self.network_address = IPv4Address(address[0])
+ self.netmask, self._prefixlen = self._make_netmask(arg)
+ packed = int(self.network_address)
+ if packed & int(self.netmask) != packed:
+ if strict:
+ raise ValueError('%s has host bits set' % self)
+ else:
+ self.network_address = IPv4Address(packed &
+ int(self.netmask))
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP prefix string.
+ addr = _split_optional_netmask(address)
+ self.network_address = IPv4Address(self._ip_int_from_string(addr[0]))
+
+ if len(addr) == 2:
+ arg = addr[1]
+ else:
+ arg = self._max_prefixlen
+ self.netmask, self._prefixlen = self._make_netmask(arg)
+
+ if strict:
+ if (IPv4Address(int(self.network_address) & int(self.netmask)) !=
+ self.network_address):
+ raise ValueError('%s has host bits set' % self)
+ self.network_address = IPv4Address(int(self.network_address) &
+ int(self.netmask))
+
+ if self._prefixlen == (self._max_prefixlen - 1):
+ self.hosts = self.__iter__
+
+ @property
+ def is_global(self):
+ """Test if this address is allocated for public networks.
+
+ Returns:
+ A boolean, True if the address is not reserved per
+ iana-ipv4-special-registry.
+
+ """
+ return (not (self.network_address in IPv4Network('100.64.0.0/10') and
+ self.broadcast_address in IPv4Network('100.64.0.0/10')) and
+ not self.is_private)
+
+
+class _IPv4Constants(object):
+
+ _linklocal_network = IPv4Network('169.254.0.0/16')
+
+ _loopback_network = IPv4Network('127.0.0.0/8')
+
+ _multicast_network = IPv4Network('224.0.0.0/4')
+
+ _public_network = IPv4Network('100.64.0.0/10')
+
+ _private_networks = [
+ IPv4Network('0.0.0.0/8'),
+ IPv4Network('10.0.0.0/8'),
+ IPv4Network('127.0.0.0/8'),
+ IPv4Network('169.254.0.0/16'),
+ IPv4Network('172.16.0.0/12'),
+ IPv4Network('192.0.0.0/29'),
+ IPv4Network('192.0.0.170/31'),
+ IPv4Network('192.0.2.0/24'),
+ IPv4Network('192.168.0.0/16'),
+ IPv4Network('198.18.0.0/15'),
+ IPv4Network('198.51.100.0/24'),
+ IPv4Network('203.0.113.0/24'),
+ IPv4Network('240.0.0.0/4'),
+ IPv4Network('255.255.255.255/32'),
+ ]
+
+ _reserved_network = IPv4Network('240.0.0.0/4')
+
+ _unspecified_address = IPv4Address('0.0.0.0')
+
+
+IPv4Address._constants = _IPv4Constants
+
+
+class _BaseV6(object):
+
+ """Base IPv6 object.
+
+ The following methods are used by IPv6 objects in both single IP
+ addresses and networks.
+
+ """
+
+ __slots__ = ()
+ _version = 6
+ _ALL_ONES = (2 ** IPV6LENGTH) - 1
+ _HEXTET_COUNT = 8
+ _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef')
+ _max_prefixlen = IPV6LENGTH
+
+ # There are only a bunch of valid v6 netmasks, so we cache them all
+ # when constructed (see _make_netmask()).
+ _netmask_cache = {}
+
+ @classmethod
+ def _make_netmask(cls, arg):
+ """Make a (netmask, prefix_len) tuple from the given argument.
+
+ Argument can be:
+ - an integer (the prefix length)
+ - a string representing the prefix length (e.g. "24")
+ - a string representing the prefix netmask (e.g. "255.255.255.0")
+ """
+ if arg not in cls._netmask_cache:
+ if isinstance(arg, _compat_int_types):
+ prefixlen = arg
+ else:
+ prefixlen = cls._prefix_from_prefix_string(arg)
+ netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen))
+ cls._netmask_cache[arg] = netmask, prefixlen
+ return cls._netmask_cache[arg]
+
+ @classmethod
+ def _ip_int_from_string(cls, ip_str):
+ """Turn an IPv6 ip_str into an integer.
+
+ Args:
+ ip_str: A string, the IPv6 ip_str.
+
+ Returns:
+ An int, the IPv6 address
+
+ Raises:
+ AddressValueError: if ip_str isn't a valid IPv6 Address.
+
+ """
+ if not ip_str:
+ raise AddressValueError('Address cannot be empty')
+
+ parts = ip_str.split(':')
+
+ # An IPv6 address needs at least 2 colons (3 parts).
+ _min_parts = 3
+ if len(parts) < _min_parts:
+ msg = "At least %d parts expected in %r" % (_min_parts, ip_str)
+ raise AddressValueError(msg)
+
+ # If the address has an IPv4-style suffix, convert it to hexadecimal.
+ if '.' in parts[-1]:
+ try:
+ ipv4_int = IPv4Address(parts.pop())._ip
+ except AddressValueError as exc:
+ raise AddressValueError("%s in %r" % (exc, ip_str))
+ parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF))
+ parts.append('%x' % (ipv4_int & 0xFFFF))
+
+ # An IPv6 address can't have more than 8 colons (9 parts).
+ # The extra colon comes from using the "::" notation for a single
+ # leading or trailing zero part.
+ _max_parts = cls._HEXTET_COUNT + 1
+ if len(parts) > _max_parts:
+ msg = "At most %d colons permitted in %r" % (
+ _max_parts - 1, ip_str)
+ raise AddressValueError(msg)
+
+ # Disregarding the endpoints, find '::' with nothing in between.
+ # This indicates that a run of zeroes has been skipped.
+ skip_index = None
+ for i in _compat_range(1, len(parts) - 1):
+ if not parts[i]:
+ if skip_index is not None:
+ # Can't have more than one '::'
+ msg = "At most one '::' permitted in %r" % ip_str
+ raise AddressValueError(msg)
+ skip_index = i
+
+ # parts_hi is the number of parts to copy from above/before the '::'
+ # parts_lo is the number of parts to copy from below/after the '::'
+ if skip_index is not None:
+ # If we found a '::', then check if it also covers the endpoints.
+ parts_hi = skip_index
+ parts_lo = len(parts) - skip_index - 1
+ if not parts[0]:
+ parts_hi -= 1
+ if parts_hi:
+ msg = "Leading ':' only permitted as part of '::' in %r"
+ raise AddressValueError(msg % ip_str) # ^: requires ^::
+ if not parts[-1]:
+ parts_lo -= 1
+ if parts_lo:
+ msg = "Trailing ':' only permitted as part of '::' in %r"
+ raise AddressValueError(msg % ip_str) # :$ requires ::$
+ parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo)
+ if parts_skipped < 1:
+ msg = "Expected at most %d other parts with '::' in %r"
+ raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str))
+ else:
+ # Otherwise, allocate the entire address to parts_hi. The
+ # endpoints could still be empty, but _parse_hextet() will check
+ # for that.
+ if len(parts) != cls._HEXTET_COUNT:
+ msg = "Exactly %d parts expected without '::' in %r"
+ raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str))
+ if not parts[0]:
+ msg = "Leading ':' only permitted as part of '::' in %r"
+ raise AddressValueError(msg % ip_str) # ^: requires ^::
+ if not parts[-1]:
+ msg = "Trailing ':' only permitted as part of '::' in %r"
+ raise AddressValueError(msg % ip_str) # :$ requires ::$
+ parts_hi = len(parts)
+ parts_lo = 0
+ parts_skipped = 0
+
+ try:
+ # Now, parse the hextets into a 128-bit integer.
+ ip_int = 0
+ for i in range(parts_hi):
+ ip_int <<= 16
+ ip_int |= cls._parse_hextet(parts[i])
+ ip_int <<= 16 * parts_skipped
+ for i in range(-parts_lo, 0):
+ ip_int <<= 16
+ ip_int |= cls._parse_hextet(parts[i])
+ return ip_int
+ except ValueError as exc:
+ raise AddressValueError("%s in %r" % (exc, ip_str))
+
+ @classmethod
+ def _parse_hextet(cls, hextet_str):
+ """Convert an IPv6 hextet string into an integer.
+
+ Args:
+ hextet_str: A string, the number to parse.
+
+ Returns:
+ The hextet as an integer.
+
+ Raises:
+ ValueError: if the input isn't strictly a hex number from
+ [0..FFFF].
+
+ """
+ # Whitelist the characters, since int() allows a lot of bizarre stuff.
+ if not cls._HEX_DIGITS.issuperset(hextet_str):
+ raise ValueError("Only hex digits permitted in %r" % hextet_str)
+ # We do the length check second, since the invalid character error
+ # is likely to be more informative for the user
+ if len(hextet_str) > 4:
+ msg = "At most 4 characters permitted in %r"
+ raise ValueError(msg % hextet_str)
+ # Length check means we can skip checking the integer value
+ return int(hextet_str, 16)
+
+ @classmethod
+ def _compress_hextets(cls, hextets):
+ """Compresses a list of hextets.
+
+ Compresses a list of strings, replacing the longest continuous
+ sequence of "0" in the list with "" and adding empty strings at
+ the beginning or at the end of the string such that subsequently
+ calling ":".join(hextets) will produce the compressed version of
+ the IPv6 address.
+
+ Args:
+ hextets: A list of strings, the hextets to compress.
+
+ Returns:
+ A list of strings.
+
+ """
+ best_doublecolon_start = -1
+ best_doublecolon_len = 0
+ doublecolon_start = -1
+ doublecolon_len = 0
+ for index, hextet in enumerate(hextets):
+ if hextet == '0':
+ doublecolon_len += 1
+ if doublecolon_start == -1:
+ # Start of a sequence of zeros.
+ doublecolon_start = index
+ if doublecolon_len > best_doublecolon_len:
+ # This is the longest sequence of zeros so far.
+ best_doublecolon_len = doublecolon_len
+ best_doublecolon_start = doublecolon_start
+ else:
+ doublecolon_len = 0
+ doublecolon_start = -1
+
+ if best_doublecolon_len > 1:
+ best_doublecolon_end = (best_doublecolon_start +
+ best_doublecolon_len)
+ # For zeros at the end of the address.
+ if best_doublecolon_end == len(hextets):
+ hextets += ['']
+ hextets[best_doublecolon_start:best_doublecolon_end] = ['']
+ # For zeros at the beginning of the address.
+ if best_doublecolon_start == 0:
+ hextets = [''] + hextets
+
+ return hextets
+
+ @classmethod
+ def _string_from_ip_int(cls, ip_int=None):
+ """Turns a 128-bit integer into hexadecimal notation.
+
+ Args:
+ ip_int: An integer, the IP address.
+
+ Returns:
+ A string, the hexadecimal representation of the address.
+
+ Raises:
+ ValueError: The address is bigger than 128 bits of all ones.
+
+ """
+ if ip_int is None:
+ ip_int = int(cls._ip)
+
+ if ip_int > cls._ALL_ONES:
+ raise ValueError('IPv6 address is too large')
+
+ hex_str = '%032x' % ip_int
+ hextets = ['%x' % int(hex_str[x:x + 4], 16) for x in range(0, 32, 4)]
+
+ hextets = cls._compress_hextets(hextets)
+ return ':'.join(hextets)
+
+ def _explode_shorthand_ip_string(self):
+ """Expand a shortened IPv6 address.
+
+ Args:
+ ip_str: A string, the IPv6 address.
+
+ Returns:
+ A string, the expanded IPv6 address.
+
+ """
+ if isinstance(self, IPv6Network):
+ ip_str = _compat_str(self.network_address)
+ elif isinstance(self, IPv6Interface):
+ ip_str = _compat_str(self.ip)
+ else:
+ ip_str = _compat_str(self)
+
+ ip_int = self._ip_int_from_string(ip_str)
+ hex_str = '%032x' % ip_int
+ parts = [hex_str[x:x + 4] for x in range(0, 32, 4)]
+ if isinstance(self, (_BaseNetwork, IPv6Interface)):
+ return '%s/%d' % (':'.join(parts), self._prefixlen)
+ return ':'.join(parts)
+
+ def _reverse_pointer(self):
+ """Return the reverse DNS pointer name for the IPv6 address.
+
+ This implements the method described in RFC3596 2.5.
+
+ """
+ reverse_chars = self.exploded[::-1].replace(':', '')
+ return '.'.join(reverse_chars) + '.ip6.arpa'
+
+ @property
+ def max_prefixlen(self):
+ return self._max_prefixlen
+
+ @property
+ def version(self):
+ return self._version
+
+
+class IPv6Address(_BaseV6, _BaseAddress):
+
+ """Represent and manipulate single IPv6 Addresses."""
+
+ __slots__ = ('_ip', '__weakref__')
+
+ def __init__(self, address):
+ """Instantiate a new IPv6 address object.
+
+ Args:
+ address: A string or integer representing the IP
+
+ Additionally, an integer can be passed, so
+ IPv6Address('2001:db8::') ==
+ IPv6Address(42540766411282592856903984951653826560)
+ or, more generally
+ IPv6Address(int(IPv6Address('2001:db8::'))) ==
+ IPv6Address('2001:db8::')
+
+ Raises:
+ AddressValueError: If address isn't a valid IPv6 address.
+
+ """
+ # Efficient constructor from integer.
+ if isinstance(address, _compat_int_types):
+ self._check_int_address(address)
+ self._ip = address
+ return
+
+ # Constructing from a packed address
+ if isinstance(address, bytes):
+ self._check_packed_address(address, 16)
+ bvs = _compat_bytes_to_byte_vals(address)
+ self._ip = _compat_int_from_byte_vals(bvs, 'big')
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP string.
+ addr_str = _compat_str(address)
+ if '/' in addr_str:
+ raise AddressValueError("Unexpected '/' in %r" % address)
+ self._ip = self._ip_int_from_string(addr_str)
+
+ @property
+ def packed(self):
+ """The binary representation of this address."""
+ return v6_int_to_packed(self._ip)
+
+ @property
+ def is_multicast(self):
+ """Test if the address is reserved for multicast use.
+
+ Returns:
+ A boolean, True if the address is a multicast address.
+ See RFC 2373 2.7 for details.
+
+ """
+ return self in self._constants._multicast_network
+
+ @property
+ def is_reserved(self):
+ """Test if the address is otherwise IETF reserved.
+
+ Returns:
+ A boolean, True if the address is within one of the
+ reserved IPv6 Network ranges.
+
+ """
+ return any(self in x for x in self._constants._reserved_networks)
+
+ @property
+ def is_link_local(self):
+ """Test if the address is reserved for link-local.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 4291.
+
+ """
+ return self in self._constants._linklocal_network
+
+ @property
+ def is_site_local(self):
+ """Test if the address is reserved for site-local.
+
+ Note that the site-local address space has been deprecated by RFC 3879.
+ Use is_private to test if this address is in the space of unique local
+ addresses as defined by RFC 4193.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 3513 2.5.6.
+
+ """
+ return self in self._constants._sitelocal_network
+
+ @property
+ def is_private(self):
+ """Test if this address is allocated for private networks.
+
+ Returns:
+ A boolean, True if the address is reserved per
+ iana-ipv6-special-registry.
+
+ """
+ return any(self in net for net in self._constants._private_networks)
+
+ @property
+ def is_global(self):
+ """Test if this address is allocated for public networks.
+
+ Returns:
+ A boolean, true if the address is not reserved per
+ iana-ipv6-special-registry.
+
+ """
+ return not self.is_private
+
+ @property
+ def is_unspecified(self):
+ """Test if the address is unspecified.
+
+ Returns:
+ A boolean, True if this is the unspecified address as defined in
+ RFC 2373 2.5.2.
+
+ """
+ return self._ip == 0
+
+ @property
+ def is_loopback(self):
+ """Test if the address is a loopback address.
+
+ Returns:
+ A boolean, True if the address is a loopback address as defined in
+ RFC 2373 2.5.3.
+
+ """
+ return self._ip == 1
+
+ @property
+ def ipv4_mapped(self):
+ """Return the IPv4 mapped address.
+
+ Returns:
+ If the IPv6 address is a v4 mapped address, return the
+ IPv4 mapped address. Return None otherwise.
+
+ """
+ if (self._ip >> 32) != 0xFFFF:
+ return None
+ return IPv4Address(self._ip & 0xFFFFFFFF)
+
+ @property
+ def teredo(self):
+ """Tuple of embedded teredo IPs.
+
+ Returns:
+ Tuple of the (server, client) IPs or None if the address
+ doesn't appear to be a teredo address (doesn't start with
+ 2001::/32)
+
+ """
+ if (self._ip >> 96) != 0x20010000:
+ return None
+ return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF),
+ IPv4Address(~self._ip & 0xFFFFFFFF))
+
+ @property
+ def sixtofour(self):
+ """Return the IPv4 6to4 embedded address.
+
+ Returns:
+ The IPv4 6to4-embedded address if present or None if the
+ address doesn't appear to contain a 6to4 embedded address.
+
+ """
+ if (self._ip >> 112) != 0x2002:
+ return None
+ return IPv4Address((self._ip >> 80) & 0xFFFFFFFF)
+
+
+class IPv6Interface(IPv6Address):
+
+ def __init__(self, address):
+ if isinstance(address, (bytes, _compat_int_types)):
+ IPv6Address.__init__(self, address)
+ self.network = IPv6Network(self._ip)
+ self._prefixlen = self._max_prefixlen
+ return
+ if isinstance(address, tuple):
+ IPv6Address.__init__(self, address[0])
+ if len(address) > 1:
+ self._prefixlen = int(address[1])
+ else:
+ self._prefixlen = self._max_prefixlen
+ self.network = IPv6Network(address, strict=False)
+ self.netmask = self.network.netmask
+ self.hostmask = self.network.hostmask
+ return
+
+ addr = _split_optional_netmask(address)
+ IPv6Address.__init__(self, addr[0])
+ self.network = IPv6Network(address, strict=False)
+ self.netmask = self.network.netmask
+ self._prefixlen = self.network._prefixlen
+ self.hostmask = self.network.hostmask
+
+ def __str__(self):
+ return '%s/%d' % (self._string_from_ip_int(self._ip),
+ self.network.prefixlen)
+
+ def __eq__(self, other):
+ address_equal = IPv6Address.__eq__(self, other)
+ if not address_equal or address_equal is NotImplemented:
+ return address_equal
+ try:
+ return self.network == other.network
+ except AttributeError:
+ # An interface with an associated network is NOT the
+ # same as an unassociated address. That's why the hash
+ # takes the extra info into account.
+ return False
+
+ def __lt__(self, other):
+ address_less = IPv6Address.__lt__(self, other)
+ if address_less is NotImplemented:
+ return NotImplemented
+ try:
+ return (self.network < other.network or
+ self.network == other.network and address_less)
+ except AttributeError:
+ # We *do* allow addresses and interfaces to be sorted. The
+ # unassociated address is considered less than all interfaces.
+ return False
+
+ def __hash__(self):
+ return self._ip ^ self._prefixlen ^ int(self.network.network_address)
+
+ __reduce__ = _IPAddressBase.__reduce__
+
+ @property
+ def ip(self):
+ return IPv6Address(self._ip)
+
+ @property
+ def with_prefixlen(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self._prefixlen)
+
+ @property
+ def with_netmask(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self.netmask)
+
+ @property
+ def with_hostmask(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self.hostmask)
+
+ @property
+ def is_unspecified(self):
+ return self._ip == 0 and self.network.is_unspecified
+
+ @property
+ def is_loopback(self):
+ return self._ip == 1 and self.network.is_loopback
+
+
+class IPv6Network(_BaseV6, _BaseNetwork):
+
+ """This class represents and manipulates 128-bit IPv6 networks.
+
+ Attributes: [examples for IPv6('2001:db8::1000/124')]
+ .network_address: IPv6Address('2001:db8::1000')
+ .hostmask: IPv6Address('::f')
+ .broadcast_address: IPv6Address('2001:db8::100f')
+ .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0')
+ .prefixlen: 124
+
+ """
+
+ # Class to use when creating address objects
+ _address_class = IPv6Address
+
+ def __init__(self, address, strict=True):
+ """Instantiate a new IPv6 Network object.
+
+ Args:
+ address: A string or integer representing the IPv6 network or the
+ IP and prefix/netmask.
+ '2001:db8::/128'
+ '2001:db8:0000:0000:0000:0000:0000:0000/128'
+ '2001:db8::'
+ are all functionally the same in IPv6. That is to say,
+ failing to provide a subnetmask will create an object with
+ a mask of /128.
+
+ Additionally, an integer can be passed, so
+ IPv6Network('2001:db8::') ==
+ IPv6Network(42540766411282592856903984951653826560)
+ or, more generally
+ IPv6Network(int(IPv6Network('2001:db8::'))) ==
+ IPv6Network('2001:db8::')
+
+ strict: A boolean. If true, ensure that we have been passed
+ A true network address, eg, 2001:db8::1000/124 and not an
+ IP address on a network, eg, 2001:db8::1/124.
+
+ Raises:
+ AddressValueError: If address isn't a valid IPv6 address.
+ NetmaskValueError: If the netmask isn't valid for
+ an IPv6 address.
+ ValueError: If strict was True and a network address was not
+ supplied.
+
+ """
+ _BaseNetwork.__init__(self, address)
+
+ # Efficient constructor from integer or packed address
+ if isinstance(address, (bytes, _compat_int_types)):
+ self.network_address = IPv6Address(address)
+ self.netmask, self._prefixlen = self._make_netmask(
+ self._max_prefixlen)
+ return
+
+ if isinstance(address, tuple):
+ if len(address) > 1:
+ arg = address[1]
+ else:
+ arg = self._max_prefixlen
+ self.netmask, self._prefixlen = self._make_netmask(arg)
+ self.network_address = IPv6Address(address[0])
+ packed = int(self.network_address)
+ if packed & int(self.netmask) != packed:
+ if strict:
+ raise ValueError('%s has host bits set' % self)
+ else:
+ self.network_address = IPv6Address(packed &
+ int(self.netmask))
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP prefix string.
+ addr = _split_optional_netmask(address)
+
+ self.network_address = IPv6Address(self._ip_int_from_string(addr[0]))
+
+ if len(addr) == 2:
+ arg = addr[1]
+ else:
+ arg = self._max_prefixlen
+ self.netmask, self._prefixlen = self._make_netmask(arg)
+
+ if strict:
+ if (IPv6Address(int(self.network_address) & int(self.netmask)) !=
+ self.network_address):
+ raise ValueError('%s has host bits set' % self)
+ self.network_address = IPv6Address(int(self.network_address) &
+ int(self.netmask))
+
+ if self._prefixlen == (self._max_prefixlen - 1):
+ self.hosts = self.__iter__
+
+ def hosts(self):
+ """Generate Iterator over usable hosts in a network.
+
+ This is like __iter__ except it doesn't return the
+ Subnet-Router anycast address.
+
+ """
+ network = int(self.network_address)
+ broadcast = int(self.broadcast_address)
+ for x in _compat_range(network + 1, broadcast + 1):
+ yield self._address_class(x)
+
+ @property
+ def is_site_local(self):
+ """Test if the address is reserved for site-local.
+
+ Note that the site-local address space has been deprecated by RFC 3879.
+ Use is_private to test if this address is in the space of unique local
+ addresses as defined by RFC 4193.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 3513 2.5.6.
+
+ """
+ return (self.network_address.is_site_local and
+ self.broadcast_address.is_site_local)
+
+
+class _IPv6Constants(object):
+
+ _linklocal_network = IPv6Network('fe80::/10')
+
+ _multicast_network = IPv6Network('ff00::/8')
+
+ _private_networks = [
+ IPv6Network('::1/128'),
+ IPv6Network('::/128'),
+ IPv6Network('::ffff:0:0/96'),
+ IPv6Network('100::/64'),
+ IPv6Network('2001::/23'),
+ IPv6Network('2001:2::/48'),
+ IPv6Network('2001:db8::/32'),
+ IPv6Network('2001:10::/28'),
+ IPv6Network('fc00::/7'),
+ IPv6Network('fe80::/10'),
+ ]
+
+ _reserved_networks = [
+ IPv6Network('::/8'), IPv6Network('100::/8'),
+ IPv6Network('200::/7'), IPv6Network('400::/6'),
+ IPv6Network('800::/5'), IPv6Network('1000::/4'),
+ IPv6Network('4000::/3'), IPv6Network('6000::/3'),
+ IPv6Network('8000::/3'), IPv6Network('A000::/3'),
+ IPv6Network('C000::/3'), IPv6Network('E000::/4'),
+ IPv6Network('F000::/5'), IPv6Network('F800::/6'),
+ IPv6Network('FE00::/9'),
+ ]
+
+ _sitelocal_network = IPv6Network('fec0::/10')
+
+
+IPv6Address._constants = _IPv6Constants
diff --git a/test/support/integration/plugins/module_utils/net_tools/__init__.py b/test/support/integration/plugins/module_utils/net_tools/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/support/integration/plugins/module_utils/net_tools/__init__.py
diff --git a/test/support/integration/plugins/module_utils/network/__init__.py b/test/support/integration/plugins/module_utils/network/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/support/integration/plugins/module_utils/network/__init__.py
diff --git a/test/support/integration/plugins/module_utils/network/common/__init__.py b/test/support/integration/plugins/module_utils/network/common/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/support/integration/plugins/module_utils/network/common/__init__.py
diff --git a/test/support/integration/plugins/module_utils/network/common/utils.py b/test/support/integration/plugins/module_utils/network/common/utils.py
new file mode 100644
index 00000000..80317387
--- /dev/null
+++ b/test/support/integration/plugins/module_utils/network/common/utils.py
@@ -0,0 +1,643 @@
+# This code is part of Ansible, but is an independent component.
+# This particular file snippet, and this file snippet only, is BSD licensed.
+# Modules you write using this snippet, which is embedded dynamically by Ansible
+# still belong to the author of the module, and may assign their own license
+# to the complete work.
+#
+# (c) 2016 Red Hat Inc.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+# Networking tools for network modules only
+
+import re
+import ast
+import operator
+import socket
+import json
+
+from itertools import chain
+
+from ansible.module_utils._text import to_text, to_bytes
+from ansible.module_utils.common._collections_compat import Mapping
+from ansible.module_utils.six import iteritems, string_types
+from ansible.module_utils import basic
+from ansible.module_utils.parsing.convert_bool import boolean
+
+# Backwards compatibility for 3rd party modules
+# TODO(pabelanger): With move to ansible.netcommon, we should clean this code
+# up and have modules import directly themself.
+from ansible.module_utils.common.network import ( # noqa: F401
+ to_bits, is_netmask, is_masklen, to_netmask, to_masklen, to_subnet, to_ipv6_network, VALID_MASKS
+)
+
+try:
+ from jinja2 import Environment, StrictUndefined
+ from jinja2.exceptions import UndefinedError
+ HAS_JINJA2 = True
+except ImportError:
+ HAS_JINJA2 = False
+
+
+OPERATORS = frozenset(['ge', 'gt', 'eq', 'neq', 'lt', 'le'])
+ALIASES = frozenset([('min', 'ge'), ('max', 'le'), ('exactly', 'eq'), ('neq', 'ne')])
+
+
+def to_list(val):
+ if isinstance(val, (list, tuple, set)):
+ return list(val)
+ elif val is not None:
+ return [val]
+ else:
+ return list()
+
+
+def to_lines(stdout):
+ for item in stdout:
+ if isinstance(item, string_types):
+ item = to_text(item).split('\n')
+ yield item
+
+
+def transform_commands(module):
+ transform = ComplexList(dict(
+ command=dict(key=True),
+ output=dict(),
+ prompt=dict(type='list'),
+ answer=dict(type='list'),
+ newline=dict(type='bool', default=True),
+ sendonly=dict(type='bool', default=False),
+ check_all=dict(type='bool', default=False),
+ ), module)
+
+ return transform(module.params['commands'])
+
+
+def sort_list(val):
+ if isinstance(val, list):
+ return sorted(val)
+ return val
+
+
+class Entity(object):
+ """Transforms a dict to with an argument spec
+
+ This class will take a dict and apply an Ansible argument spec to the
+ values. The resulting dict will contain all of the keys in the param
+ with appropriate values set.
+
+ Example::
+
+ argument_spec = dict(
+ command=dict(key=True),
+ display=dict(default='text', choices=['text', 'json']),
+ validate=dict(type='bool')
+ )
+ transform = Entity(module, argument_spec)
+ value = dict(command='foo')
+ result = transform(value)
+ print result
+ {'command': 'foo', 'display': 'text', 'validate': None}
+
+ Supported argument spec:
+ * key - specifies how to map a single value to a dict
+ * read_from - read and apply the argument_spec from the module
+ * required - a value is required
+ * type - type of value (uses AnsibleModule type checker)
+ * fallback - implements fallback function
+ * choices - set of valid options
+ * default - default value
+ """
+
+ def __init__(self, module, attrs=None, args=None, keys=None, from_argspec=False):
+ args = [] if args is None else args
+
+ self._attributes = attrs or {}
+ self._module = module
+
+ for arg in args:
+ self._attributes[arg] = dict()
+ if from_argspec:
+ self._attributes[arg]['read_from'] = arg
+ if keys and arg in keys:
+ self._attributes[arg]['key'] = True
+
+ self.attr_names = frozenset(self._attributes.keys())
+
+ _has_key = False
+
+ for name, attr in iteritems(self._attributes):
+ if attr.get('read_from'):
+ if attr['read_from'] not in self._module.argument_spec:
+ module.fail_json(msg='argument %s does not exist' % attr['read_from'])
+ spec = self._module.argument_spec.get(attr['read_from'])
+ for key, value in iteritems(spec):
+ if key not in attr:
+ attr[key] = value
+
+ if attr.get('key'):
+ if _has_key:
+ module.fail_json(msg='only one key value can be specified')
+ _has_key = True
+ attr['required'] = True
+
+ def serialize(self):
+ return self._attributes
+
+ def to_dict(self, value):
+ obj = {}
+ for name, attr in iteritems(self._attributes):
+ if attr.get('key'):
+ obj[name] = value
+ else:
+ obj[name] = attr.get('default')
+ return obj
+
+ def __call__(self, value, strict=True):
+ if not isinstance(value, dict):
+ value = self.to_dict(value)
+
+ if strict:
+ unknown = set(value).difference(self.attr_names)
+ if unknown:
+ self._module.fail_json(msg='invalid keys: %s' % ','.join(unknown))
+
+ for name, attr in iteritems(self._attributes):
+ if value.get(name) is None:
+ value[name] = attr.get('default')
+
+ if attr.get('fallback') and not value.get(name):
+ fallback = attr.get('fallback', (None,))
+ fallback_strategy = fallback[0]
+ fallback_args = []
+ fallback_kwargs = {}
+ if fallback_strategy is not None:
+ for item in fallback[1:]:
+ if isinstance(item, dict):
+ fallback_kwargs = item
+ else:
+ fallback_args = item
+ try:
+ value[name] = fallback_strategy(*fallback_args, **fallback_kwargs)
+ except basic.AnsibleFallbackNotFound:
+ continue
+
+ if attr.get('required') and value.get(name) is None:
+ self._module.fail_json(msg='missing required attribute %s' % name)
+
+ if 'choices' in attr:
+ if value[name] not in attr['choices']:
+ self._module.fail_json(msg='%s must be one of %s, got %s' % (name, ', '.join(attr['choices']), value[name]))
+
+ if value[name] is not None:
+ value_type = attr.get('type', 'str')
+ type_checker = self._module._CHECK_ARGUMENT_TYPES_DISPATCHER[value_type]
+ type_checker(value[name])
+ elif value.get(name):
+ value[name] = self._module.params[name]
+
+ return value
+
+
+class EntityCollection(Entity):
+ """Extends ```Entity``` to handle a list of dicts """
+
+ def __call__(self, iterable, strict=True):
+ if iterable is None:
+ iterable = [super(EntityCollection, self).__call__(self._module.params, strict)]
+
+ if not isinstance(iterable, (list, tuple)):
+ self._module.fail_json(msg='value must be an iterable')
+
+ return [(super(EntityCollection, self).__call__(i, strict)) for i in iterable]
+
+
+# these two are for backwards compatibility and can be removed once all of the
+# modules that use them are updated
+class ComplexDict(Entity):
+ def __init__(self, attrs, module, *args, **kwargs):
+ super(ComplexDict, self).__init__(module, attrs, *args, **kwargs)
+
+
+class ComplexList(EntityCollection):
+ def __init__(self, attrs, module, *args, **kwargs):
+ super(ComplexList, self).__init__(module, attrs, *args, **kwargs)
+
+
+def dict_diff(base, comparable):
+ """ Generate a dict object of differences
+
+ This function will compare two dict objects and return the difference
+ between them as a dict object. For scalar values, the key will reflect
+ the updated value. If the key does not exist in `comparable`, then then no
+ key will be returned. For lists, the value in comparable will wholly replace
+ the value in base for the key. For dicts, the returned value will only
+ return keys that are different.
+
+ :param base: dict object to base the diff on
+ :param comparable: dict object to compare against base
+
+ :returns: new dict object with differences
+ """
+ if not isinstance(base, dict):
+ raise AssertionError("`base` must be of type <dict>")
+ if not isinstance(comparable, dict):
+ if comparable is None:
+ comparable = dict()
+ else:
+ raise AssertionError("`comparable` must be of type <dict>")
+
+ updates = dict()
+
+ for key, value in iteritems(base):
+ if isinstance(value, dict):
+ item = comparable.get(key)
+ if item is not None:
+ sub_diff = dict_diff(value, comparable[key])
+ if sub_diff:
+ updates[key] = sub_diff
+ else:
+ comparable_value = comparable.get(key)
+ if comparable_value is not None:
+ if sort_list(base[key]) != sort_list(comparable_value):
+ updates[key] = comparable_value
+
+ for key in set(comparable.keys()).difference(base.keys()):
+ updates[key] = comparable.get(key)
+
+ return updates
+
+
+def dict_merge(base, other):
+ """ Return a new dict object that combines base and other
+
+ This will create a new dict object that is a combination of the key/value
+ pairs from base and other. When both keys exist, the value will be
+ selected from other. If the value is a list object, the two lists will
+ be combined and duplicate entries removed.
+
+ :param base: dict object to serve as base
+ :param other: dict object to combine with base
+
+ :returns: new combined dict object
+ """
+ if not isinstance(base, dict):
+ raise AssertionError("`base` must be of type <dict>")
+ if not isinstance(other, dict):
+ raise AssertionError("`other` must be of type <dict>")
+
+ combined = dict()
+
+ for key, value in iteritems(base):
+ if isinstance(value, dict):
+ if key in other:
+ item = other.get(key)
+ if item is not None:
+ if isinstance(other[key], Mapping):
+ combined[key] = dict_merge(value, other[key])
+ else:
+ combined[key] = other[key]
+ else:
+ combined[key] = item
+ else:
+ combined[key] = value
+ elif isinstance(value, list):
+ if key in other:
+ item = other.get(key)
+ if item is not None:
+ try:
+ combined[key] = list(set(chain(value, item)))
+ except TypeError:
+ value.extend([i for i in item if i not in value])
+ combined[key] = value
+ else:
+ combined[key] = item
+ else:
+ combined[key] = value
+ else:
+ if key in other:
+ other_value = other.get(key)
+ if other_value is not None:
+ if sort_list(base[key]) != sort_list(other_value):
+ combined[key] = other_value
+ else:
+ combined[key] = value
+ else:
+ combined[key] = other_value
+ else:
+ combined[key] = value
+
+ for key in set(other.keys()).difference(base.keys()):
+ combined[key] = other.get(key)
+
+ return combined
+
+
+def param_list_to_dict(param_list, unique_key="name", remove_key=True):
+ """Rotates a list of dictionaries to be a dictionary of dictionaries.
+
+ :param param_list: The aforementioned list of dictionaries
+ :param unique_key: The name of a key which is present and unique in all of param_list's dictionaries. The value
+ behind this key will be the key each dictionary can be found at in the new root dictionary
+ :param remove_key: If True, remove unique_key from the individual dictionaries before returning.
+ """
+ param_dict = {}
+ for params in param_list:
+ params = params.copy()
+ if remove_key:
+ name = params.pop(unique_key)
+ else:
+ name = params.get(unique_key)
+ param_dict[name] = params
+
+ return param_dict
+
+
+def conditional(expr, val, cast=None):
+ match = re.match(r'^(.+)\((.+)\)$', str(expr), re.I)
+ if match:
+ op, arg = match.groups()
+ else:
+ op = 'eq'
+ if ' ' in str(expr):
+ raise AssertionError('invalid expression: cannot contain spaces')
+ arg = expr
+
+ if cast is None and val is not None:
+ arg = type(val)(arg)
+ elif callable(cast):
+ arg = cast(arg)
+ val = cast(val)
+
+ op = next((oper for alias, oper in ALIASES if op == alias), op)
+
+ if not hasattr(operator, op) and op not in OPERATORS:
+ raise ValueError('unknown operator: %s' % op)
+
+ func = getattr(operator, op)
+ return func(val, arg)
+
+
+def ternary(value, true_val, false_val):
+ ''' value ? true_val : false_val '''
+ if value:
+ return true_val
+ else:
+ return false_val
+
+
+def remove_default_spec(spec):
+ for item in spec:
+ if 'default' in spec[item]:
+ del spec[item]['default']
+
+
+def validate_ip_address(address):
+ try:
+ socket.inet_aton(address)
+ except socket.error:
+ return False
+ return address.count('.') == 3
+
+
+def validate_ip_v6_address(address):
+ try:
+ socket.inet_pton(socket.AF_INET6, address)
+ except socket.error:
+ return False
+ return True
+
+
+def validate_prefix(prefix):
+ if prefix and not 0 <= int(prefix) <= 32:
+ return False
+ return True
+
+
+def load_provider(spec, args):
+ provider = args.get('provider') or {}
+ for key, value in iteritems(spec):
+ if key not in provider:
+ if 'fallback' in value:
+ provider[key] = _fallback(value['fallback'])
+ elif 'default' in value:
+ provider[key] = value['default']
+ else:
+ provider[key] = None
+ if 'authorize' in provider:
+ # Coerce authorize to provider if a string has somehow snuck in.
+ provider['authorize'] = boolean(provider['authorize'] or False)
+ args['provider'] = provider
+ return provider
+
+
+def _fallback(fallback):
+ strategy = fallback[0]
+ args = []
+ kwargs = {}
+
+ for item in fallback[1:]:
+ if isinstance(item, dict):
+ kwargs = item
+ else:
+ args = item
+ try:
+ return strategy(*args, **kwargs)
+ except basic.AnsibleFallbackNotFound:
+ pass
+
+
+def generate_dict(spec):
+ """
+ Generate dictionary which is in sync with argspec
+
+ :param spec: A dictionary that is the argspec of the module
+ :rtype: A dictionary
+ :returns: A dictionary in sync with argspec with default value
+ """
+ obj = {}
+ if not spec:
+ return obj
+
+ for key, val in iteritems(spec):
+ if 'default' in val:
+ dct = {key: val['default']}
+ elif 'type' in val and val['type'] == 'dict':
+ dct = {key: generate_dict(val['options'])}
+ else:
+ dct = {key: None}
+ obj.update(dct)
+ return obj
+
+
+def parse_conf_arg(cfg, arg):
+ """
+ Parse config based on argument
+
+ :param cfg: A text string which is a line of configuration.
+ :param arg: A text string which is to be matched.
+ :rtype: A text string
+ :returns: A text string if match is found
+ """
+ match = re.search(r'%s (.+)(\n|$)' % arg, cfg, re.M)
+ if match:
+ result = match.group(1).strip()
+ else:
+ result = None
+ return result
+
+
+def parse_conf_cmd_arg(cfg, cmd, res1, res2=None, delete_str='no'):
+ """
+ Parse config based on command
+
+ :param cfg: A text string which is a line of configuration.
+ :param cmd: A text string which is the command to be matched
+ :param res1: A text string to be returned if the command is present
+ :param res2: A text string to be returned if the negate command
+ is present
+ :param delete_str: A text string to identify the start of the
+ negate command
+ :rtype: A text string
+ :returns: A text string if match is found
+ """
+ match = re.search(r'\n\s+%s(\n|$)' % cmd, cfg)
+ if match:
+ return res1
+ if res2 is not None:
+ match = re.search(r'\n\s+%s %s(\n|$)' % (delete_str, cmd), cfg)
+ if match:
+ return res2
+ return None
+
+
+def get_xml_conf_arg(cfg, path, data='text'):
+ """
+ :param cfg: The top level configuration lxml Element tree object
+ :param path: The relative xpath w.r.t to top level element (cfg)
+ to be searched in the xml hierarchy
+ :param data: The type of data to be returned for the matched xml node.
+ Valid values are text, tag, attrib, with default as text.
+ :return: Returns the required type for the matched xml node or else None
+ """
+ match = cfg.xpath(path)
+ if len(match):
+ if data == 'tag':
+ result = getattr(match[0], 'tag')
+ elif data == 'attrib':
+ result = getattr(match[0], 'attrib')
+ else:
+ result = getattr(match[0], 'text')
+ else:
+ result = None
+ return result
+
+
+def remove_empties(cfg_dict):
+ """
+ Generate final config dictionary
+
+ :param cfg_dict: A dictionary parsed in the facts system
+ :rtype: A dictionary
+ :returns: A dictionary by eliminating keys that have null values
+ """
+ final_cfg = {}
+ if not cfg_dict:
+ return final_cfg
+
+ for key, val in iteritems(cfg_dict):
+ dct = None
+ if isinstance(val, dict):
+ child_val = remove_empties(val)
+ if child_val:
+ dct = {key: child_val}
+ elif (isinstance(val, list) and val
+ and all([isinstance(x, dict) for x in val])):
+ child_val = [remove_empties(x) for x in val]
+ if child_val:
+ dct = {key: child_val}
+ elif val not in [None, [], {}, (), '']:
+ dct = {key: val}
+ if dct:
+ final_cfg.update(dct)
+ return final_cfg
+
+
+def validate_config(spec, data):
+ """
+ Validate if the input data against the AnsibleModule spec format
+ :param spec: Ansible argument spec
+ :param data: Data to be validated
+ :return:
+ """
+ params = basic._ANSIBLE_ARGS
+ basic._ANSIBLE_ARGS = to_bytes(json.dumps({'ANSIBLE_MODULE_ARGS': data}))
+ validated_data = basic.AnsibleModule(spec).params
+ basic._ANSIBLE_ARGS = params
+ return validated_data
+
+
+def search_obj_in_list(name, lst, key='name'):
+ if not lst:
+ return None
+ else:
+ for item in lst:
+ if item.get(key) == name:
+ return item
+
+
+class Template:
+
+ def __init__(self):
+ if not HAS_JINJA2:
+ raise ImportError("jinja2 is required but does not appear to be installed. "
+ "It can be installed using `pip install jinja2`")
+
+ self.env = Environment(undefined=StrictUndefined)
+ self.env.filters.update({'ternary': ternary})
+
+ def __call__(self, value, variables=None, fail_on_undefined=True):
+ variables = variables or {}
+
+ if not self.contains_vars(value):
+ return value
+
+ try:
+ value = self.env.from_string(value).render(variables)
+ except UndefinedError:
+ if not fail_on_undefined:
+ return None
+ raise
+
+ if value:
+ try:
+ return ast.literal_eval(value)
+ except Exception:
+ return str(value)
+ else:
+ return None
+
+ def contains_vars(self, data):
+ if isinstance(data, string_types):
+ for marker in (self.env.block_start_string, self.env.variable_start_string, self.env.comment_start_string):
+ if marker in data:
+ return True
+ return False
diff --git a/test/support/integration/plugins/modules/sefcontext.py b/test/support/integration/plugins/modules/sefcontext.py
index 946ae880..5574abca 100644
--- a/test/support/integration/plugins/modules/sefcontext.py
+++ b/test/support/integration/plugins/modules/sefcontext.py
@@ -105,11 +105,13 @@ RETURN = r'''
# Default return values
'''
+import os
+import subprocess
import traceback
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.common.respawn import has_respawned, probe_interpreters_for_module, respawn_module
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
SELINUX_IMP_ERR = None
try:
diff --git a/test/support/integration/plugins/modules/timezone.py b/test/support/integration/plugins/modules/timezone.py
index dd374838..b7439a12 100644
--- a/test/support/integration/plugins/modules/timezone.py
+++ b/test/support/integration/plugins/modules/timezone.py
@@ -121,7 +121,7 @@ class Timezone(object):
# running in the global zone where changing the timezone has no effect.
zonename_cmd = module.get_bin_path('zonename')
if zonename_cmd is not None:
- (rc, stdout, stderr) = module.run_command(zonename_cmd)
+ (rc, stdout, _) = module.run_command(zonename_cmd)
if rc == 0 and stdout.strip() == 'global':
module.fail_json(msg='Adjusting timezone is not supported in Global Zone')
@@ -731,7 +731,7 @@ class BSDTimezone(Timezone):
# Strategy 3:
# (If /etc/localtime is not symlinked)
# Check all files in /usr/share/zoneinfo and return first non-link match.
- for dname, dirs, fnames in sorted(os.walk(zoneinfo_dir)):
+ for dname, _, fnames in sorted(os.walk(zoneinfo_dir)):
for fname in sorted(fnames):
zoneinfo_file = os.path.join(dname, fname)
if not os.path.islink(zoneinfo_file) and filecmp.cmp(zoneinfo_file, localtime_file):
diff --git a/test/support/integration/plugins/modules/zypper.py b/test/support/integration/plugins/modules/zypper.py
index cd67b605..bfb31819 100644
--- a/test/support/integration/plugins/modules/zypper.py
+++ b/test/support/integration/plugins/modules/zypper.py
@@ -41,7 +41,7 @@ options:
- Package name C(name) or package specifier or a list of either.
- Can include a version like C(name=1.0), C(name>3.4) or C(name<=2.7). If a version is given, C(oldpackage) is implied and zypper is allowed to
update the package within the version range given.
- - You can also pass a url or a local path to an rpm file.
+ - You can also pass a url or a local path to a rpm file.
- When using state=latest, this can be '*', which updates all installed packages.
required: true
aliases: [ 'pkg' ]
@@ -202,7 +202,8 @@ EXAMPLES = '''
import xml
import re
from xml.dom.minidom import parseString as parseXML
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils.six import iteritems
+from ansible.module_utils._text import to_native
# import module snippets
from ansible.module_utils.basic import AnsibleModule
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/net_base.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/net_base.py
new file mode 100644
index 00000000..542dcfef
--- /dev/null
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/net_base.py
@@ -0,0 +1,90 @@
+# Copyright: (c) 2015, Ansible Inc,
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import copy
+
+from ansible.errors import AnsibleError
+from ansible.plugins.action import ActionBase
+from ansible.utils.display import Display
+
+display = Display()
+
+
+class ActionModule(ActionBase):
+ def run(self, tmp=None, task_vars=None):
+ del tmp # tmp no longer has any effect
+
+ result = {}
+ play_context = copy.deepcopy(self._play_context)
+ play_context.network_os = self._get_network_os(task_vars)
+ new_task = self._task.copy()
+
+ module = self._get_implementation_module(
+ play_context.network_os, self._task.action
+ )
+ if not module:
+ if self._task.args["fail_on_missing_module"]:
+ result["failed"] = True
+ else:
+ result["failed"] = False
+
+ result["msg"] = (
+ "Could not find implementation module %s for %s"
+ % (self._task.action, play_context.network_os)
+ )
+ return result
+
+ new_task.action = module
+
+ action = self._shared_loader_obj.action_loader.get(
+ play_context.network_os,
+ task=new_task,
+ connection=self._connection,
+ play_context=play_context,
+ loader=self._loader,
+ templar=self._templar,
+ shared_loader_obj=self._shared_loader_obj,
+ )
+ display.vvvv("Running implementation module %s" % module)
+ return action.run(task_vars=task_vars)
+
+ def _get_network_os(self, task_vars):
+ if "network_os" in self._task.args and self._task.args["network_os"]:
+ display.vvvv("Getting network OS from task argument")
+ network_os = self._task.args["network_os"]
+ elif self._play_context.network_os:
+ display.vvvv("Getting network OS from inventory")
+ network_os = self._play_context.network_os
+ elif (
+ "network_os" in task_vars.get("ansible_facts", {})
+ and task_vars["ansible_facts"]["network_os"]
+ ):
+ display.vvvv("Getting network OS from fact")
+ network_os = task_vars["ansible_facts"]["network_os"]
+ else:
+ raise AnsibleError(
+ "ansible_network_os must be specified on this host to use platform agnostic modules"
+ )
+
+ return network_os
+
+ def _get_implementation_module(self, network_os, platform_agnostic_module):
+ module_name = (
+ network_os.split(".")[-1]
+ + "_"
+ + platform_agnostic_module.partition("_")[2]
+ )
+ if "." in network_os:
+ fqcn_module = ".".join(network_os.split(".")[0:-1])
+ implementation_module = fqcn_module + "." + module_name
+ else:
+ implementation_module = module_name
+
+ if implementation_module not in self._shared_loader_obj.module_loader:
+ implementation_module = None
+
+ return implementation_module
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/net_get.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/net_get.py
index c6dbb2cf..40205a46 100644
--- a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/net_get.py
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/net_get.py
@@ -24,7 +24,7 @@ import uuid
import hashlib
from ansible.errors import AnsibleError
-from ansible.module_utils.common.text.converters import to_text, to_bytes
+from ansible.module_utils._text import to_text, to_bytes
from ansible.module_utils.connection import Connection, ConnectionError
from ansible.plugins.action import ActionBase
from ansible.module_utils.six.moves.urllib.parse import urlsplit
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/net_put.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/net_put.py
index 6fa3b8d6..955329d4 100644
--- a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/net_put.py
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/net_put.py
@@ -23,7 +23,7 @@ import uuid
import hashlib
from ansible.errors import AnsibleError
-from ansible.module_utils.common.text.converters import to_text, to_bytes
+from ansible.module_utils._text import to_text, to_bytes
from ansible.module_utils.connection import Connection, ConnectionError
from ansible.plugins.action import ActionBase
from ansible.module_utils.six.moves.urllib.parse import urlsplit
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/network.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/network.py
index fbcc9c13..5d05d338 100644
--- a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/network.py
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/action/network.py
@@ -25,7 +25,7 @@ import time
import re
from ansible.errors import AnsibleError
-from ansible.module_utils.common.text.converters import to_text, to_bytes
+from ansible.module_utils._text import to_text, to_bytes
from ansible.module_utils.six.moves.urllib.parse import urlsplit
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.utils.display import Display
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/become/enable.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/become/enable.py
new file mode 100644
index 00000000..33938fd1
--- /dev/null
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/become/enable.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+# Copyright: (c) 2018, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """become: enable
+short_description: Switch to elevated permissions on a network device
+description:
+- This become plugins allows elevated permissions on a remote network device.
+author: ansible (@core)
+options:
+ become_pass:
+ description: password
+ ini:
+ - section: enable_become_plugin
+ key: password
+ vars:
+ - name: ansible_become_password
+ - name: ansible_become_pass
+ - name: ansible_enable_pass
+ env:
+ - name: ANSIBLE_BECOME_PASS
+ - name: ANSIBLE_ENABLE_PASS
+notes:
+- enable is really implemented in the network connection handler and as such can only
+ be used with network connections.
+- This plugin ignores the 'become_exe' and 'become_user' settings as it uses an API
+ and not an executable.
+"""
+
+from ansible.plugins.become import BecomeBase
+
+
+class BecomeModule(BecomeBase):
+
+ name = "ansible.netcommon.enable"
+
+ def build_become_command(self, cmd, shell):
+ # enable is implemented inside the network connection plugins
+ return cmd
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/httpapi.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/httpapi.py
new file mode 100644
index 00000000..b063ef0d
--- /dev/null
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/httpapi.py
@@ -0,0 +1,324 @@
+# (c) 2018 Red Hat Inc.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """author: Ansible Networking Team
+connection: httpapi
+short_description: Use httpapi to run command on network appliances
+description:
+- This connection plugin provides a connection to remote devices over a HTTP(S)-based
+ api.
+options:
+ host:
+ description:
+ - Specifies the remote device FQDN or IP address to establish the HTTP(S) connection
+ to.
+ default: inventory_hostname
+ vars:
+ - name: ansible_host
+ port:
+ type: int
+ description:
+ - Specifies the port on the remote device that listens for connections when establishing
+ the HTTP(S) connection.
+ - When unspecified, will pick 80 or 443 based on the value of use_ssl.
+ ini:
+ - section: defaults
+ key: remote_port
+ env:
+ - name: ANSIBLE_REMOTE_PORT
+ vars:
+ - name: ansible_httpapi_port
+ network_os:
+ description:
+ - Configures the device platform network operating system. This value is used
+ to load the correct httpapi plugin to communicate with the remote device
+ vars:
+ - name: ansible_network_os
+ remote_user:
+ description:
+ - The username used to authenticate to the remote device when the API connection
+ is first established. If the remote_user is not specified, the connection will
+ use the username of the logged in user.
+ - Can be configured from the CLI via the C(--user) or C(-u) options.
+ ini:
+ - section: defaults
+ key: remote_user
+ env:
+ - name: ANSIBLE_REMOTE_USER
+ vars:
+ - name: ansible_user
+ password:
+ description:
+ - Configures the user password used to authenticate to the remote device when
+ needed for the device API.
+ vars:
+ - name: ansible_password
+ - name: ansible_httpapi_pass
+ - name: ansible_httpapi_password
+ use_ssl:
+ type: boolean
+ description:
+ - Whether to connect using SSL (HTTPS) or not (HTTP).
+ default: false
+ vars:
+ - name: ansible_httpapi_use_ssl
+ validate_certs:
+ type: boolean
+ description:
+ - Whether to validate SSL certificates
+ default: true
+ vars:
+ - name: ansible_httpapi_validate_certs
+ use_proxy:
+ type: boolean
+ description:
+ - Whether to use https_proxy for requests.
+ default: true
+ vars:
+ - name: ansible_httpapi_use_proxy
+ become:
+ type: boolean
+ description:
+ - The become option will instruct the CLI session to attempt privilege escalation
+ on platforms that support it. Normally this means transitioning from user mode
+ to C(enable) mode in the CLI session. If become is set to True and the remote
+ device does not support privilege escalation or the privilege has already been
+ elevated, then this option is silently ignored.
+ - Can be configured from the CLI via the C(--become) or C(-b) options.
+ default: false
+ ini:
+ - section: privilege_escalation
+ key: become
+ env:
+ - name: ANSIBLE_BECOME
+ vars:
+ - name: ansible_become
+ become_method:
+ description:
+ - This option allows the become method to be specified in for handling privilege
+ escalation. Typically the become_method value is set to C(enable) but could
+ be defined as other values.
+ default: sudo
+ ini:
+ - section: privilege_escalation
+ key: become_method
+ env:
+ - name: ANSIBLE_BECOME_METHOD
+ vars:
+ - name: ansible_become_method
+ persistent_connect_timeout:
+ type: int
+ description:
+ - Configures, in seconds, the amount of time to wait when trying to initially
+ establish a persistent connection. If this value expires before the connection
+ to the remote device is completed, the connection will fail.
+ default: 30
+ ini:
+ - section: persistent_connection
+ key: connect_timeout
+ env:
+ - name: ANSIBLE_PERSISTENT_CONNECT_TIMEOUT
+ vars:
+ - name: ansible_connect_timeout
+ persistent_command_timeout:
+ type: int
+ description:
+ - Configures, in seconds, the amount of time to wait for a command to return from
+ the remote device. If this timer is exceeded before the command returns, the
+ connection plugin will raise an exception and close.
+ default: 30
+ ini:
+ - section: persistent_connection
+ key: command_timeout
+ env:
+ - name: ANSIBLE_PERSISTENT_COMMAND_TIMEOUT
+ vars:
+ - name: ansible_command_timeout
+ persistent_log_messages:
+ type: boolean
+ description:
+ - This flag will enable logging the command executed and response received from
+ target device in the ansible log file. For this option to work 'log_path' ansible
+ configuration option is required to be set to a file path with write access.
+ - Be sure to fully understand the security implications of enabling this option
+ as it could create a security vulnerability by logging sensitive information
+ in log file.
+ default: false
+ ini:
+ - section: persistent_connection
+ key: log_messages
+ env:
+ - name: ANSIBLE_PERSISTENT_LOG_MESSAGES
+ vars:
+ - name: ansible_persistent_log_messages
+"""
+
+from io import BytesIO
+
+from ansible.errors import AnsibleConnectionFailure
+from ansible.module_utils._text import to_bytes
+from ansible.module_utils.six import PY3
+from ansible.module_utils.six.moves import cPickle
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import open_url
+from ansible.playbook.play_context import PlayContext
+from ansible.plugins.loader import httpapi_loader
+from ansible.plugins.connection import NetworkConnectionBase, ensure_connect
+
+
+class Connection(NetworkConnectionBase):
+ """Network API connection"""
+
+ transport = "ansible.netcommon.httpapi"
+ has_pipelining = True
+
+ def __init__(self, play_context, new_stdin, *args, **kwargs):
+ super(Connection, self).__init__(
+ play_context, new_stdin, *args, **kwargs
+ )
+
+ self._url = None
+ self._auth = None
+
+ if self._network_os:
+
+ self.httpapi = httpapi_loader.get(self._network_os, self)
+ if self.httpapi:
+ self._sub_plugin = {
+ "type": "httpapi",
+ "name": self.httpapi._load_name,
+ "obj": self.httpapi,
+ }
+ self.queue_message(
+ "vvvv",
+ "loaded API plugin %s from path %s for network_os %s"
+ % (
+ self.httpapi._load_name,
+ self.httpapi._original_path,
+ self._network_os,
+ ),
+ )
+ else:
+ raise AnsibleConnectionFailure(
+ "unable to load API plugin for network_os %s"
+ % self._network_os
+ )
+
+ else:
+ raise AnsibleConnectionFailure(
+ "Unable to automatically determine host network os. Please "
+ "manually configure ansible_network_os value for this host"
+ )
+ self.queue_message("log", "network_os is set to %s" % self._network_os)
+
+ def update_play_context(self, pc_data):
+ """Updates the play context information for the connection"""
+ pc_data = to_bytes(pc_data)
+ if PY3:
+ pc_data = cPickle.loads(pc_data, encoding="bytes")
+ else:
+ pc_data = cPickle.loads(pc_data)
+ play_context = PlayContext()
+ play_context.deserialize(pc_data)
+
+ self.queue_message("vvvv", "updating play_context for connection")
+ if self._play_context.become ^ play_context.become:
+ self.set_become(play_context)
+ if play_context.become is True:
+ self.queue_message("vvvv", "authorizing connection")
+ else:
+ self.queue_message("vvvv", "deauthorizing connection")
+
+ self._play_context = play_context
+
+ def _connect(self):
+ if not self.connected:
+ protocol = "https" if self.get_option("use_ssl") else "http"
+ host = self.get_option("host")
+ port = self.get_option("port") or (
+ 443 if protocol == "https" else 80
+ )
+ self._url = "%s://%s:%s" % (protocol, host, port)
+
+ self.queue_message(
+ "vvv",
+ "ESTABLISH HTTP(S) CONNECTFOR USER: %s TO %s"
+ % (self._play_context.remote_user, self._url),
+ )
+ self.httpapi.set_become(self._play_context)
+ self._connected = True
+
+ self.httpapi.login(
+ self.get_option("remote_user"), self.get_option("password")
+ )
+
+ def close(self):
+ """
+ Close the active session to the device
+ """
+ # only close the connection if its connected.
+ if self._connected:
+ self.queue_message("vvvv", "closing http(s) connection to device")
+ self.logout()
+
+ super(Connection, self).close()
+
+ @ensure_connect
+ def send(self, path, data, **kwargs):
+ """
+ Sends the command to the device over api
+ """
+ url_kwargs = dict(
+ timeout=self.get_option("persistent_command_timeout"),
+ validate_certs=self.get_option("validate_certs"),
+ use_proxy=self.get_option("use_proxy"),
+ headers={},
+ )
+ url_kwargs.update(kwargs)
+ if self._auth:
+ # Avoid modifying passed-in headers
+ headers = dict(kwargs.get("headers", {}))
+ headers.update(self._auth)
+ url_kwargs["headers"] = headers
+ else:
+ url_kwargs["force_basic_auth"] = True
+ url_kwargs["url_username"] = self.get_option("remote_user")
+ url_kwargs["url_password"] = self.get_option("password")
+
+ try:
+ url = self._url + path
+ self._log_messages(
+ "send url '%s' with data '%s' and kwargs '%s'"
+ % (url, data, url_kwargs)
+ )
+ response = open_url(url, data=data, **url_kwargs)
+ except HTTPError as exc:
+ is_handled = self.handle_httperror(exc)
+ if is_handled is True:
+ return self.send(path, data, **kwargs)
+ elif is_handled is False:
+ raise
+ else:
+ response = is_handled
+ except URLError as exc:
+ raise AnsibleConnectionFailure(
+ "Could not connect to {0}: {1}".format(
+ self._url + path, exc.reason
+ )
+ )
+
+ response_buffer = BytesIO()
+ resp_data = response.read()
+ self._log_messages("received response: '%s'" % resp_data)
+ response_buffer.write(resp_data)
+
+ # Try to assign a new auth token if one is given
+ self._auth = self.update_auth(response, response_buffer) or self._auth
+
+ response_buffer.seek(0)
+
+ return response, response_buffer
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/netconf.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/netconf.py
new file mode 100644
index 00000000..1e2d3caa
--- /dev/null
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/netconf.py
@@ -0,0 +1,404 @@
+# (c) 2016 Red Hat Inc.
+# (c) 2017 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """author: Ansible Networking Team
+connection: netconf
+short_description: Provides a persistent connection using the netconf protocol
+description:
+- This connection plugin provides a connection to remote devices over the SSH NETCONF
+ subsystem. This connection plugin is typically used by network devices for sending
+ and receiving RPC calls over NETCONF.
+- Note this connection plugin requires ncclient to be installed on the local Ansible
+ controller.
+requirements:
+- ncclient
+options:
+ host:
+ description:
+ - Specifies the remote device FQDN or IP address to establish the SSH connection
+ to.
+ default: inventory_hostname
+ vars:
+ - name: ansible_host
+ port:
+ type: int
+ description:
+ - Specifies the port on the remote device that listens for connections when establishing
+ the SSH connection.
+ default: 830
+ ini:
+ - section: defaults
+ key: remote_port
+ env:
+ - name: ANSIBLE_REMOTE_PORT
+ vars:
+ - name: ansible_port
+ network_os:
+ description:
+ - Configures the device platform network operating system. This value is used
+ to load a device specific netconf plugin. If this option is not configured
+ (or set to C(auto)), then Ansible will attempt to guess the correct network_os
+ to use. If it can not guess a network_os correctly it will use C(default).
+ vars:
+ - name: ansible_network_os
+ remote_user:
+ description:
+ - The username used to authenticate to the remote device when the SSH connection
+ is first established. If the remote_user is not specified, the connection will
+ use the username of the logged in user.
+ - Can be configured from the CLI via the C(--user) or C(-u) options.
+ ini:
+ - section: defaults
+ key: remote_user
+ env:
+ - name: ANSIBLE_REMOTE_USER
+ vars:
+ - name: ansible_user
+ password:
+ description:
+ - Configures the user password used to authenticate to the remote device when
+ first establishing the SSH connection.
+ vars:
+ - name: ansible_password
+ - name: ansible_ssh_pass
+ - name: ansible_ssh_password
+ - name: ansible_netconf_password
+ private_key_file:
+ description:
+ - The private SSH key or certificate file used to authenticate to the remote device
+ when first establishing the SSH connection.
+ ini:
+ - section: defaults
+ key: private_key_file
+ env:
+ - name: ANSIBLE_PRIVATE_KEY_FILE
+ vars:
+ - name: ansible_private_key_file
+ look_for_keys:
+ default: true
+ description:
+ - Enables looking for ssh keys in the usual locations for ssh keys (e.g. :file:`~/.ssh/id_*`).
+ env:
+ - name: ANSIBLE_PARAMIKO_LOOK_FOR_KEYS
+ ini:
+ - section: paramiko_connection
+ key: look_for_keys
+ type: boolean
+ host_key_checking:
+ description: Set this to "False" if you want to avoid host key checking by the
+ underlying tools Ansible uses to connect to the host
+ type: boolean
+ default: true
+ env:
+ - name: ANSIBLE_HOST_KEY_CHECKING
+ - name: ANSIBLE_SSH_HOST_KEY_CHECKING
+ - name: ANSIBLE_NETCONF_HOST_KEY_CHECKING
+ ini:
+ - section: defaults
+ key: host_key_checking
+ - section: paramiko_connection
+ key: host_key_checking
+ vars:
+ - name: ansible_host_key_checking
+ - name: ansible_ssh_host_key_checking
+ - name: ansible_netconf_host_key_checking
+ persistent_connect_timeout:
+ type: int
+ description:
+ - Configures, in seconds, the amount of time to wait when trying to initially
+ establish a persistent connection. If this value expires before the connection
+ to the remote device is completed, the connection will fail.
+ default: 30
+ ini:
+ - section: persistent_connection
+ key: connect_timeout
+ env:
+ - name: ANSIBLE_PERSISTENT_CONNECT_TIMEOUT
+ vars:
+ - name: ansible_connect_timeout
+ persistent_command_timeout:
+ type: int
+ description:
+ - Configures, in seconds, the amount of time to wait for a command to return from
+ the remote device. If this timer is exceeded before the command returns, the
+ connection plugin will raise an exception and close.
+ default: 30
+ ini:
+ - section: persistent_connection
+ key: command_timeout
+ env:
+ - name: ANSIBLE_PERSISTENT_COMMAND_TIMEOUT
+ vars:
+ - name: ansible_command_timeout
+ netconf_ssh_config:
+ description:
+ - This variable is used to enable bastion/jump host with netconf connection. If
+ set to True the bastion/jump host ssh settings should be present in ~/.ssh/config
+ file, alternatively it can be set to custom ssh configuration file path to read
+ the bastion/jump host settings.
+ ini:
+ - section: netconf_connection
+ key: ssh_config
+ version_added: '2.7'
+ env:
+ - name: ANSIBLE_NETCONF_SSH_CONFIG
+ vars:
+ - name: ansible_netconf_ssh_config
+ version_added: '2.7'
+ persistent_log_messages:
+ type: boolean
+ description:
+ - This flag will enable logging the command executed and response received from
+ target device in the ansible log file. For this option to work 'log_path' ansible
+ configuration option is required to be set to a file path with write access.
+ - Be sure to fully understand the security implications of enabling this option
+ as it could create a security vulnerability by logging sensitive information
+ in log file.
+ default: false
+ ini:
+ - section: persistent_connection
+ key: log_messages
+ env:
+ - name: ANSIBLE_PERSISTENT_LOG_MESSAGES
+ vars:
+ - name: ansible_persistent_log_messages
+"""
+
+import os
+import logging
+import json
+
+from ansible.errors import AnsibleConnectionFailure, AnsibleError
+from ansible.module_utils._text import to_bytes, to_native, to_text
+from ansible.module_utils.basic import missing_required_lib
+from ansible.module_utils.parsing.convert_bool import (
+ BOOLEANS_TRUE,
+ BOOLEANS_FALSE,
+)
+from ansible.plugins.loader import netconf_loader
+from ansible.plugins.connection import NetworkConnectionBase, ensure_connect
+
+try:
+ from ncclient import manager
+ from ncclient.operations import RPCError
+ from ncclient.transport.errors import SSHUnknownHostError
+ from ncclient.xml_ import to_ele, to_xml
+
+ HAS_NCCLIENT = True
+ NCCLIENT_IMP_ERR = None
+except (
+ ImportError,
+ AttributeError,
+) as err: # paramiko and gssapi are incompatible and raise AttributeError not ImportError
+ HAS_NCCLIENT = False
+ NCCLIENT_IMP_ERR = err
+
+logging.getLogger("ncclient").setLevel(logging.INFO)
+
+
+class Connection(NetworkConnectionBase):
+ """NetConf connections"""
+
+ transport = "ansible.netcommon.netconf"
+ has_pipelining = False
+
+ def __init__(self, play_context, new_stdin, *args, **kwargs):
+ super(Connection, self).__init__(
+ play_context, new_stdin, *args, **kwargs
+ )
+
+ # If network_os is not specified then set the network os to auto
+ # This will be used to trigger the use of guess_network_os when connecting.
+ self._network_os = self._network_os or "auto"
+
+ self.netconf = netconf_loader.get(self._network_os, self)
+ if self.netconf:
+ self._sub_plugin = {
+ "type": "netconf",
+ "name": self.netconf._load_name,
+ "obj": self.netconf,
+ }
+ self.queue_message(
+ "vvvv",
+ "loaded netconf plugin %s from path %s for network_os %s"
+ % (
+ self.netconf._load_name,
+ self.netconf._original_path,
+ self._network_os,
+ ),
+ )
+ else:
+ self.netconf = netconf_loader.get("default", self)
+ self._sub_plugin = {
+ "type": "netconf",
+ "name": "default",
+ "obj": self.netconf,
+ }
+ self.queue_message(
+ "display",
+ "unable to load netconf plugin for network_os %s, falling back to default plugin"
+ % self._network_os,
+ )
+
+ self.queue_message("log", "network_os is set to %s" % self._network_os)
+ self._manager = None
+ self.key_filename = None
+ self._ssh_config = None
+
+ def exec_command(self, cmd, in_data=None, sudoable=True):
+ """Sends the request to the node and returns the reply
+ The method accepts two forms of request. The first form is as a byte
+ string that represents xml string be send over netconf session.
+ The second form is a json-rpc (2.0) byte string.
+ """
+ if self._manager:
+ # to_ele operates on native strings
+ request = to_ele(to_native(cmd, errors="surrogate_or_strict"))
+
+ if request is None:
+ return "unable to parse request"
+
+ try:
+ reply = self._manager.rpc(request)
+ except RPCError as exc:
+ error = self.internal_error(
+ data=to_text(to_xml(exc.xml), errors="surrogate_or_strict")
+ )
+ return json.dumps(error)
+
+ return reply.data_xml
+ else:
+ return super(Connection, self).exec_command(cmd, in_data, sudoable)
+
+ @property
+ @ensure_connect
+ def manager(self):
+ return self._manager
+
+ def _connect(self):
+ if not HAS_NCCLIENT:
+ raise AnsibleError(
+ "%s: %s"
+ % (
+ missing_required_lib("ncclient"),
+ to_native(NCCLIENT_IMP_ERR),
+ )
+ )
+
+ self.queue_message("log", "ssh connection done, starting ncclient")
+
+ allow_agent = True
+ if self._play_context.password is not None:
+ allow_agent = False
+ setattr(self._play_context, "allow_agent", allow_agent)
+
+ self.key_filename = (
+ self._play_context.private_key_file
+ or self.get_option("private_key_file")
+ )
+ if self.key_filename:
+ self.key_filename = str(os.path.expanduser(self.key_filename))
+
+ self._ssh_config = self.get_option("netconf_ssh_config")
+ if self._ssh_config in BOOLEANS_TRUE:
+ self._ssh_config = True
+ elif self._ssh_config in BOOLEANS_FALSE:
+ self._ssh_config = None
+
+ # Try to guess the network_os if the network_os is set to auto
+ if self._network_os == "auto":
+ for cls in netconf_loader.all(class_only=True):
+ network_os = cls.guess_network_os(self)
+ if network_os:
+ self.queue_message(
+ "vvv", "discovered network_os %s" % network_os
+ )
+ self._network_os = network_os
+
+ # If we have tried to detect the network_os but were unable to i.e. network_os is still 'auto'
+ # then use default as the network_os
+
+ if self._network_os == "auto":
+ # Network os not discovered. Set it to default
+ self.queue_message(
+ "vvv",
+ "Unable to discover network_os. Falling back to default.",
+ )
+ self._network_os = "default"
+ try:
+ ncclient_device_handler = self.netconf.get_option(
+ "ncclient_device_handler"
+ )
+ except KeyError:
+ ncclient_device_handler = "default"
+ self.queue_message(
+ "vvv",
+ "identified ncclient device handler: %s."
+ % ncclient_device_handler,
+ )
+ device_params = {"name": ncclient_device_handler}
+
+ try:
+ port = self._play_context.port or 830
+ self.queue_message(
+ "vvv",
+ "ESTABLISH NETCONF SSH CONNECTION FOR USER: %s on PORT %s TO %s WITH SSH_CONFIG = %s"
+ % (
+ self._play_context.remote_user,
+ port,
+ self._play_context.remote_addr,
+ self._ssh_config,
+ ),
+ )
+ self._manager = manager.connect(
+ host=self._play_context.remote_addr,
+ port=port,
+ username=self._play_context.remote_user,
+ password=self._play_context.password,
+ key_filename=self.key_filename,
+ hostkey_verify=self.get_option("host_key_checking"),
+ look_for_keys=self.get_option("look_for_keys"),
+ device_params=device_params,
+ allow_agent=self._play_context.allow_agent,
+ timeout=self.get_option("persistent_connect_timeout"),
+ ssh_config=self._ssh_config,
+ )
+
+ self._manager._timeout = self.get_option(
+ "persistent_command_timeout"
+ )
+ except SSHUnknownHostError as exc:
+ raise AnsibleConnectionFailure(to_native(exc))
+ except ImportError:
+ raise AnsibleError(
+ "connection=netconf is not supported on {0}".format(
+ self._network_os
+ )
+ )
+
+ if not self._manager.connected:
+ return 1, b"", b"not connected"
+
+ self.queue_message(
+ "log", "ncclient manager object created successfully"
+ )
+
+ self._connected = True
+
+ super(Connection, self)._connect()
+
+ return (
+ 0,
+ to_bytes(self._manager.session_id, errors="surrogate_or_strict"),
+ b"",
+ )
+
+ def close(self):
+ if self._manager:
+ self._manager.close_session()
+ super(Connection, self).close()
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/network_cli.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/network_cli.py
index d0d977fa..fef40810 100644
--- a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/network_cli.py
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/network_cli.py
@@ -302,7 +302,7 @@ from functools import wraps
from io import BytesIO
from ansible.errors import AnsibleConnectionFailure, AnsibleError
-from ansible.module_utils.common.text.converters import to_bytes, to_text
+from ansible.module_utils._text import to_bytes, to_text
from ansible.module_utils.basic import missing_required_lib
from ansible.module_utils.six import PY3
from ansible.module_utils.six.moves import cPickle
@@ -1310,6 +1310,7 @@ class Connection(NetworkConnectionBase):
remote host before triggering timeout exception
:return: None
"""
+ """Fetch file over scp/sftp from remote device"""
ssh = self.ssh_type_conn._connect_uncached()
if self.ssh_type == "libssh":
self.ssh_type_conn.fetch_file(source, destination, proto=proto)
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/persistent.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/persistent.py
index c7379a63..b29b4872 100644
--- a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/persistent.py
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/persistent.py
@@ -29,7 +29,7 @@ options:
"""
from ansible.executor.task_executor import start_connection
from ansible.plugins.connection import ConnectionBase
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connection as SocketConnection
from ansible.utils.display import Display
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py
new file mode 100644
index 00000000..8789075a
--- /dev/null
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py
@@ -0,0 +1,66 @@
+# -*- coding: utf-8 -*-
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+
+class ModuleDocFragment(object):
+
+ # Standard files documentation fragment
+ DOCUMENTATION = r"""options:
+ host:
+ description:
+ - Specifies the DNS host name or address for connecting to the remote device over
+ the specified transport. The value of host is used as the destination address
+ for the transport.
+ type: str
+ required: true
+ port:
+ description:
+ - Specifies the port to use when building the connection to the remote device. The
+ port value will default to port 830.
+ type: int
+ default: 830
+ username:
+ description:
+ - Configures the username to use to authenticate the connection to the remote
+ device. This value is used to authenticate the SSH session. If the value is
+ not specified in the task, the value of environment variable C(ANSIBLE_NET_USERNAME)
+ will be used instead.
+ type: str
+ password:
+ description:
+ - Specifies the password to use to authenticate the connection to the remote device. This
+ value is used to authenticate the SSH session. If the value is not specified
+ in the task, the value of environment variable C(ANSIBLE_NET_PASSWORD) will
+ be used instead.
+ type: str
+ timeout:
+ description:
+ - Specifies the timeout in seconds for communicating with the network device for
+ either connecting or sending commands. If the timeout is exceeded before the
+ operation is completed, the module will error.
+ type: int
+ default: 10
+ ssh_keyfile:
+ description:
+ - Specifies the SSH key to use to authenticate the connection to the remote device. This
+ value is the path to the key used to authenticate the SSH session. If the value
+ is not specified in the task, the value of environment variable C(ANSIBLE_NET_SSH_KEYFILE)
+ will be used instead.
+ type: path
+ hostkey_verify:
+ description:
+ - If set to C(yes), the ssh host key of the device must match a ssh key present
+ on the host if set to C(no), the ssh host key of the device is not checked.
+ type: bool
+ default: true
+ look_for_keys:
+ description:
+ - Enables looking in the usual locations for the ssh keys (e.g. :file:`~/.ssh/id_*`)
+ type: bool
+ default: true
+notes:
+- For information on using netconf see the :ref:`Platform Options guide using Netconf<netconf_enabled_platform_options>`
+- For more information on using Ansible to manage network devices see the :ref:`Ansible
+ Network Guide <network_guide>`
+"""
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py
new file mode 100644
index 00000000..ad65f6ef
--- /dev/null
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py
@@ -0,0 +1,14 @@
+# -*- coding: utf-8 -*-
+
+# Copyright: (c) 2019 Ansible, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+
+class ModuleDocFragment(object):
+
+ # Standard files documentation fragment
+ DOCUMENTATION = r"""options: {}
+notes:
+- This module is supported on C(ansible_network_os) network platforms. See the :ref:`Network
+ Platform Options <platform_options>` for details.
+"""
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/filter/ipaddr.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/filter/ipaddr.py
new file mode 100644
index 00000000..6ae47a73
--- /dev/null
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/filter/ipaddr.py
@@ -0,0 +1,1186 @@
+# (c) 2014, Maciej Delmanowski <drybjed@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+from functools import partial
+import types
+
+try:
+ import netaddr
+except ImportError:
+ # in this case, we'll make the filters return error messages (see bottom)
+ netaddr = None
+else:
+
+ class mac_linux(netaddr.mac_unix):
+ pass
+
+ mac_linux.word_fmt = "%.2x"
+
+from ansible import errors
+
+
+# ---- IP address and network query helpers ----
+def _empty_ipaddr_query(v, vtype):
+ # We don't have any query to process, so just check what type the user
+ # expects, and return the IP address in a correct format
+ if v:
+ if vtype == "address":
+ return str(v.ip)
+ elif vtype == "network":
+ return str(v)
+
+
+def _first_last(v):
+ if v.size == 2:
+ first_usable = int(netaddr.IPAddress(v.first))
+ last_usable = int(netaddr.IPAddress(v.last))
+ return first_usable, last_usable
+ elif v.size > 1:
+ first_usable = int(netaddr.IPAddress(v.first + 1))
+ last_usable = int(netaddr.IPAddress(v.last - 1))
+ return first_usable, last_usable
+
+
+def _6to4_query(v, vtype, value):
+ if v.version == 4:
+
+ if v.size == 1:
+ ipconv = str(v.ip)
+ elif v.size > 1:
+ if v.ip != v.network:
+ ipconv = str(v.ip)
+ else:
+ ipconv = False
+
+ if ipaddr(ipconv, "public"):
+ numbers = list(map(int, ipconv.split(".")))
+
+ try:
+ return "2002:{:02x}{:02x}:{:02x}{:02x}::1/48".format(*numbers)
+ except Exception:
+ return False
+
+ elif v.version == 6:
+ if vtype == "address":
+ if ipaddr(str(v), "2002::/16"):
+ return value
+ elif vtype == "network":
+ if v.ip != v.network:
+ if ipaddr(str(v.ip), "2002::/16"):
+ return value
+ else:
+ return False
+
+
+def _ip_query(v):
+ if v.size == 1:
+ return str(v.ip)
+ if v.size > 1:
+ # /31 networks in netaddr have no broadcast address
+ if v.ip != v.network or not v.broadcast:
+ return str(v.ip)
+
+
+def _gateway_query(v):
+ if v.size > 1:
+ if v.ip != v.network:
+ return str(v.ip) + "/" + str(v.prefixlen)
+
+
+def _address_prefix_query(v):
+ if v.size > 1:
+ if v.ip != v.network:
+ return str(v.ip) + "/" + str(v.prefixlen)
+
+
+def _bool_ipaddr_query(v):
+ if v:
+ return True
+
+
+def _broadcast_query(v):
+ if v.size > 2:
+ return str(v.broadcast)
+
+
+def _cidr_query(v):
+ return str(v)
+
+
+def _cidr_lookup_query(v, iplist, value):
+ try:
+ if v in iplist:
+ return value
+ except Exception:
+ return False
+
+
+def _first_usable_query(v, vtype):
+ if vtype == "address":
+ "Does it make sense to raise an error"
+ raise errors.AnsibleFilterError("Not a network address")
+ elif vtype == "network":
+ if v.size == 2:
+ return str(netaddr.IPAddress(int(v.network)))
+ elif v.size > 1:
+ return str(netaddr.IPAddress(int(v.network) + 1))
+
+
+def _host_query(v):
+ if v.size == 1:
+ return str(v)
+ elif v.size > 1:
+ if v.ip != v.network:
+ return str(v.ip) + "/" + str(v.prefixlen)
+
+
+def _hostmask_query(v):
+ return str(v.hostmask)
+
+
+def _int_query(v, vtype):
+ if vtype == "address":
+ return int(v.ip)
+ elif vtype == "network":
+ return str(int(v.ip)) + "/" + str(int(v.prefixlen))
+
+
+def _ip_prefix_query(v):
+ if v.size == 2:
+ return str(v.ip) + "/" + str(v.prefixlen)
+ elif v.size > 1:
+ if v.ip != v.network:
+ return str(v.ip) + "/" + str(v.prefixlen)
+
+
+def _ip_netmask_query(v):
+ if v.size == 2:
+ return str(v.ip) + " " + str(v.netmask)
+ elif v.size > 1:
+ if v.ip != v.network:
+ return str(v.ip) + " " + str(v.netmask)
+
+
+"""
+def _ip_wildcard_query(v):
+ if v.size == 2:
+ return str(v.ip) + ' ' + str(v.hostmask)
+ elif v.size > 1:
+ if v.ip != v.network:
+ return str(v.ip) + ' ' + str(v.hostmask)
+"""
+
+
+def _ipv4_query(v, value):
+ if v.version == 6:
+ try:
+ return str(v.ipv4())
+ except Exception:
+ return False
+ else:
+ return value
+
+
+def _ipv6_query(v, value):
+ if v.version == 4:
+ return str(v.ipv6())
+ else:
+ return value
+
+
+def _last_usable_query(v, vtype):
+ if vtype == "address":
+ "Does it make sense to raise an error"
+ raise errors.AnsibleFilterError("Not a network address")
+ elif vtype == "network":
+ if v.size > 1:
+ first_usable, last_usable = _first_last(v)
+ return str(netaddr.IPAddress(last_usable))
+
+
+def _link_local_query(v, value):
+ v_ip = netaddr.IPAddress(str(v.ip))
+ if v.version == 4:
+ if ipaddr(str(v_ip), "169.254.0.0/24"):
+ return value
+
+ elif v.version == 6:
+ if ipaddr(str(v_ip), "fe80::/10"):
+ return value
+
+
+def _loopback_query(v, value):
+ v_ip = netaddr.IPAddress(str(v.ip))
+ if v_ip.is_loopback():
+ return value
+
+
+def _multicast_query(v, value):
+ if v.is_multicast():
+ return value
+
+
+def _net_query(v):
+ if v.size > 1:
+ if v.ip == v.network:
+ return str(v.network) + "/" + str(v.prefixlen)
+
+
+def _netmask_query(v):
+ return str(v.netmask)
+
+
+def _network_query(v):
+ """Return the network of a given IP or subnet"""
+ return str(v.network)
+
+
+def _network_id_query(v):
+ """Return the network of a given IP or subnet"""
+ return str(v.network)
+
+
+def _network_netmask_query(v):
+ return str(v.network) + " " + str(v.netmask)
+
+
+def _network_wildcard_query(v):
+ return str(v.network) + " " + str(v.hostmask)
+
+
+def _next_usable_query(v, vtype):
+ if vtype == "address":
+ "Does it make sense to raise an error"
+ raise errors.AnsibleFilterError("Not a network address")
+ elif vtype == "network":
+ if v.size > 1:
+ first_usable, last_usable = _first_last(v)
+ next_ip = int(netaddr.IPAddress(int(v.ip) + 1))
+ if next_ip >= first_usable and next_ip <= last_usable:
+ return str(netaddr.IPAddress(int(v.ip) + 1))
+
+
+def _peer_query(v, vtype):
+ if vtype == "address":
+ raise errors.AnsibleFilterError("Not a network address")
+ elif vtype == "network":
+ if v.size == 2:
+ return str(netaddr.IPAddress(int(v.ip) ^ 1))
+ if v.size == 4:
+ if int(v.ip) % 4 == 0:
+ raise errors.AnsibleFilterError(
+ "Network address of /30 has no peer"
+ )
+ if int(v.ip) % 4 == 3:
+ raise errors.AnsibleFilterError(
+ "Broadcast address of /30 has no peer"
+ )
+ return str(netaddr.IPAddress(int(v.ip) ^ 3))
+ raise errors.AnsibleFilterError("Not a point-to-point network")
+
+
+def _prefix_query(v):
+ return int(v.prefixlen)
+
+
+def _previous_usable_query(v, vtype):
+ if vtype == "address":
+ "Does it make sense to raise an error"
+ raise errors.AnsibleFilterError("Not a network address")
+ elif vtype == "network":
+ if v.size > 1:
+ first_usable, last_usable = _first_last(v)
+ previous_ip = int(netaddr.IPAddress(int(v.ip) - 1))
+ if previous_ip >= first_usable and previous_ip <= last_usable:
+ return str(netaddr.IPAddress(int(v.ip) - 1))
+
+
+def _private_query(v, value):
+ if v.is_private():
+ return value
+
+
+def _public_query(v, value):
+ v_ip = netaddr.IPAddress(str(v.ip))
+ if (
+ v_ip.is_unicast()
+ and not v_ip.is_private()
+ and not v_ip.is_loopback()
+ and not v_ip.is_netmask()
+ and not v_ip.is_hostmask()
+ ):
+ return value
+
+
+def _range_usable_query(v, vtype):
+ if vtype == "address":
+ "Does it make sense to raise an error"
+ raise errors.AnsibleFilterError("Not a network address")
+ elif vtype == "network":
+ if v.size > 1:
+ first_usable, last_usable = _first_last(v)
+ first_usable = str(netaddr.IPAddress(first_usable))
+ last_usable = str(netaddr.IPAddress(last_usable))
+ return "{0}-{1}".format(first_usable, last_usable)
+
+
+def _revdns_query(v):
+ v_ip = netaddr.IPAddress(str(v.ip))
+ return v_ip.reverse_dns
+
+
+def _size_query(v):
+ return v.size
+
+
+def _size_usable_query(v):
+ if v.size == 1:
+ return 0
+ elif v.size == 2:
+ return 2
+ return v.size - 2
+
+
+def _subnet_query(v):
+ return str(v.cidr)
+
+
+def _type_query(v):
+ if v.size == 1:
+ return "address"
+ if v.size > 1:
+ if v.ip != v.network:
+ return "address"
+ else:
+ return "network"
+
+
+def _unicast_query(v, value):
+ if v.is_unicast():
+ return value
+
+
+def _version_query(v):
+ return v.version
+
+
+def _wrap_query(v, vtype, value):
+ if v.version == 6:
+ if vtype == "address":
+ return "[" + str(v.ip) + "]"
+ elif vtype == "network":
+ return "[" + str(v.ip) + "]/" + str(v.prefixlen)
+ else:
+ return value
+
+
+# ---- HWaddr query helpers ----
+def _bare_query(v):
+ v.dialect = netaddr.mac_bare
+ return str(v)
+
+
+def _bool_hwaddr_query(v):
+ if v:
+ return True
+
+
+def _int_hwaddr_query(v):
+ return int(v)
+
+
+def _cisco_query(v):
+ v.dialect = netaddr.mac_cisco
+ return str(v)
+
+
+def _empty_hwaddr_query(v, value):
+ if v:
+ return value
+
+
+def _linux_query(v):
+ v.dialect = mac_linux
+ return str(v)
+
+
+def _postgresql_query(v):
+ v.dialect = netaddr.mac_pgsql
+ return str(v)
+
+
+def _unix_query(v):
+ v.dialect = netaddr.mac_unix
+ return str(v)
+
+
+def _win_query(v):
+ v.dialect = netaddr.mac_eui48
+ return str(v)
+
+
+# ---- IP address and network filters ----
+
+# Returns a minified list of subnets or a single subnet that spans all of
+# the inputs.
+def cidr_merge(value, action="merge"):
+ if not hasattr(value, "__iter__"):
+ raise errors.AnsibleFilterError(
+ "cidr_merge: expected iterable, got " + repr(value)
+ )
+
+ if action == "merge":
+ try:
+ return [str(ip) for ip in netaddr.cidr_merge(value)]
+ except Exception as e:
+ raise errors.AnsibleFilterError(
+ "cidr_merge: error in netaddr:\n%s" % e
+ )
+
+ elif action == "span":
+ # spanning_cidr needs at least two values
+ if len(value) == 0:
+ return None
+ elif len(value) == 1:
+ try:
+ return str(netaddr.IPNetwork(value[0]))
+ except Exception as e:
+ raise errors.AnsibleFilterError(
+ "cidr_merge: error in netaddr:\n%s" % e
+ )
+ else:
+ try:
+ return str(netaddr.spanning_cidr(value))
+ except Exception as e:
+ raise errors.AnsibleFilterError(
+ "cidr_merge: error in netaddr:\n%s" % e
+ )
+
+ else:
+ raise errors.AnsibleFilterError(
+ "cidr_merge: invalid action '%s'" % action
+ )
+
+
+def ipaddr(value, query="", version=False, alias="ipaddr"):
+ """ Check if string is an IP address or network and filter it """
+
+ query_func_extra_args = {
+ "": ("vtype",),
+ "6to4": ("vtype", "value"),
+ "cidr_lookup": ("iplist", "value"),
+ "first_usable": ("vtype",),
+ "int": ("vtype",),
+ "ipv4": ("value",),
+ "ipv6": ("value",),
+ "last_usable": ("vtype",),
+ "link-local": ("value",),
+ "loopback": ("value",),
+ "lo": ("value",),
+ "multicast": ("value",),
+ "next_usable": ("vtype",),
+ "peer": ("vtype",),
+ "previous_usable": ("vtype",),
+ "private": ("value",),
+ "public": ("value",),
+ "unicast": ("value",),
+ "range_usable": ("vtype",),
+ "wrap": ("vtype", "value"),
+ }
+
+ query_func_map = {
+ "": _empty_ipaddr_query,
+ "6to4": _6to4_query,
+ "address": _ip_query,
+ "address/prefix": _address_prefix_query, # deprecate
+ "bool": _bool_ipaddr_query,
+ "broadcast": _broadcast_query,
+ "cidr": _cidr_query,
+ "cidr_lookup": _cidr_lookup_query,
+ "first_usable": _first_usable_query,
+ "gateway": _gateway_query, # deprecate
+ "gw": _gateway_query, # deprecate
+ "host": _host_query,
+ "host/prefix": _address_prefix_query, # deprecate
+ "hostmask": _hostmask_query,
+ "hostnet": _gateway_query, # deprecate
+ "int": _int_query,
+ "ip": _ip_query,
+ "ip/prefix": _ip_prefix_query,
+ "ip_netmask": _ip_netmask_query,
+ # 'ip_wildcard': _ip_wildcard_query, built then could not think of use case
+ "ipv4": _ipv4_query,
+ "ipv6": _ipv6_query,
+ "last_usable": _last_usable_query,
+ "link-local": _link_local_query,
+ "lo": _loopback_query,
+ "loopback": _loopback_query,
+ "multicast": _multicast_query,
+ "net": _net_query,
+ "next_usable": _next_usable_query,
+ "netmask": _netmask_query,
+ "network": _network_query,
+ "network_id": _network_id_query,
+ "network/prefix": _subnet_query,
+ "network_netmask": _network_netmask_query,
+ "network_wildcard": _network_wildcard_query,
+ "peer": _peer_query,
+ "prefix": _prefix_query,
+ "previous_usable": _previous_usable_query,
+ "private": _private_query,
+ "public": _public_query,
+ "range_usable": _range_usable_query,
+ "revdns": _revdns_query,
+ "router": _gateway_query, # deprecate
+ "size": _size_query,
+ "size_usable": _size_usable_query,
+ "subnet": _subnet_query,
+ "type": _type_query,
+ "unicast": _unicast_query,
+ "v4": _ipv4_query,
+ "v6": _ipv6_query,
+ "version": _version_query,
+ "wildcard": _hostmask_query,
+ "wrap": _wrap_query,
+ }
+
+ vtype = None
+
+ if not value:
+ return False
+
+ elif value is True:
+ return False
+
+ # Check if value is a list and parse each element
+ elif isinstance(value, (list, tuple, types.GeneratorType)):
+
+ _ret = []
+ for element in value:
+ if ipaddr(element, str(query), version):
+ _ret.append(ipaddr(element, str(query), version))
+
+ if _ret:
+ return _ret
+ else:
+ return list()
+
+ # Check if value is a number and convert it to an IP address
+ elif str(value).isdigit():
+
+ # We don't know what IP version to assume, so let's check IPv4 first,
+ # then IPv6
+ try:
+ if (not version) or (version and version == 4):
+ v = netaddr.IPNetwork("0.0.0.0/0")
+ v.value = int(value)
+ v.prefixlen = 32
+ elif version and version == 6:
+ v = netaddr.IPNetwork("::/0")
+ v.value = int(value)
+ v.prefixlen = 128
+
+ # IPv4 didn't work the first time, so it definitely has to be IPv6
+ except Exception:
+ try:
+ v = netaddr.IPNetwork("::/0")
+ v.value = int(value)
+ v.prefixlen = 128
+
+ # The value is too big for IPv6. Are you a nanobot?
+ except Exception:
+ return False
+
+ # We got an IP address, let's mark it as such
+ value = str(v)
+ vtype = "address"
+
+ # value has not been recognized, check if it's a valid IP string
+ else:
+ try:
+ v = netaddr.IPNetwork(value)
+
+ # value is a valid IP string, check if user specified
+ # CIDR prefix or just an IP address, this will indicate default
+ # output format
+ try:
+ address, prefix = value.split("/")
+ vtype = "network"
+ except Exception:
+ vtype = "address"
+
+ # value hasn't been recognized, maybe it's a numerical CIDR?
+ except Exception:
+ try:
+ address, prefix = value.split("/")
+ address.isdigit()
+ address = int(address)
+ prefix.isdigit()
+ prefix = int(prefix)
+
+ # It's not numerical CIDR, give up
+ except Exception:
+ return False
+
+ # It is something, so let's try and build a CIDR from the parts
+ try:
+ v = netaddr.IPNetwork("0.0.0.0/0")
+ v.value = address
+ v.prefixlen = prefix
+
+ # It's not a valid IPv4 CIDR
+ except Exception:
+ try:
+ v = netaddr.IPNetwork("::/0")
+ v.value = address
+ v.prefixlen = prefix
+
+ # It's not a valid IPv6 CIDR. Give up.
+ except Exception:
+ return False
+
+ # We have a valid CIDR, so let's write it in correct format
+ value = str(v)
+ vtype = "network"
+
+ # We have a query string but it's not in the known query types. Check if
+ # that string is a valid subnet, if so, we can check later if given IP
+ # address/network is inside that specific subnet
+ try:
+ # ?? 6to4 and link-local were True here before. Should they still?
+ if (
+ query
+ and (query not in query_func_map or query == "cidr_lookup")
+ and not str(query).isdigit()
+ and ipaddr(query, "network")
+ ):
+ iplist = netaddr.IPSet([netaddr.IPNetwork(query)])
+ query = "cidr_lookup"
+ except Exception:
+ pass
+
+ # This code checks if value maches the IP version the user wants, ie. if
+ # it's any version ("ipaddr()"), IPv4 ("ipv4()") or IPv6 ("ipv6()")
+ # If version does not match, return False
+ if version and v.version != version:
+ return False
+
+ extras = []
+ for arg in query_func_extra_args.get(query, tuple()):
+ extras.append(locals()[arg])
+ try:
+ return query_func_map[query](v, *extras)
+ except KeyError:
+ try:
+ float(query)
+ if v.size == 1:
+ if vtype == "address":
+ return str(v.ip)
+ elif vtype == "network":
+ return str(v)
+
+ elif v.size > 1:
+ try:
+ return str(v[query]) + "/" + str(v.prefixlen)
+ except Exception:
+ return False
+
+ else:
+ return value
+
+ except Exception:
+ raise errors.AnsibleFilterError(
+ alias + ": unknown filter type: %s" % query
+ )
+
+ return False
+
+
+def ipmath(value, amount):
+ try:
+ if "/" in value:
+ ip = netaddr.IPNetwork(value).ip
+ else:
+ ip = netaddr.IPAddress(value)
+ except (netaddr.AddrFormatError, ValueError):
+ msg = "You must pass a valid IP address; {0} is invalid".format(value)
+ raise errors.AnsibleFilterError(msg)
+
+ if not isinstance(amount, int):
+ msg = (
+ "You must pass an integer for arithmetic; "
+ "{0} is not a valid integer"
+ ).format(amount)
+ raise errors.AnsibleFilterError(msg)
+
+ return str(ip + amount)
+
+
+def ipwrap(value, query=""):
+ try:
+ if isinstance(value, (list, tuple, types.GeneratorType)):
+ _ret = []
+ for element in value:
+ if ipaddr(element, query, version=False, alias="ipwrap"):
+ _ret.append(ipaddr(element, "wrap"))
+ else:
+ _ret.append(element)
+
+ return _ret
+ else:
+ _ret = ipaddr(value, query, version=False, alias="ipwrap")
+ if _ret:
+ return ipaddr(_ret, "wrap")
+ else:
+ return value
+
+ except Exception:
+ return value
+
+
+def ipv4(value, query=""):
+ return ipaddr(value, query, version=4, alias="ipv4")
+
+
+def ipv6(value, query=""):
+ return ipaddr(value, query, version=6, alias="ipv6")
+
+
+# Split given subnet into smaller subnets or find out the biggest subnet of
+# a given IP address with given CIDR prefix
+# Usage:
+#
+# - address or address/prefix | ipsubnet
+# returns CIDR subnet of a given input
+#
+# - address/prefix | ipsubnet(cidr)
+# returns number of possible subnets for given CIDR prefix
+#
+# - address/prefix | ipsubnet(cidr, index)
+# returns new subnet with given CIDR prefix
+#
+# - address | ipsubnet(cidr)
+# returns biggest subnet with given CIDR prefix that address belongs to
+#
+# - address | ipsubnet(cidr, index)
+# returns next indexed subnet which contains given address
+#
+# - address/prefix | ipsubnet(subnet/prefix)
+# return the index of the subnet in the subnet
+def ipsubnet(value, query="", index="x"):
+ """ Manipulate IPv4/IPv6 subnets """
+
+ try:
+ vtype = ipaddr(value, "type")
+ if vtype == "address":
+ v = ipaddr(value, "cidr")
+ elif vtype == "network":
+ v = ipaddr(value, "subnet")
+
+ value = netaddr.IPNetwork(v)
+ except Exception:
+ return False
+ query_string = str(query)
+ if not query:
+ return str(value)
+
+ elif query_string.isdigit():
+ vsize = ipaddr(v, "size")
+ query = int(query)
+
+ try:
+ float(index)
+ index = int(index)
+
+ if vsize > 1:
+ try:
+ return str(list(value.subnet(query))[index])
+ except Exception:
+ return False
+
+ elif vsize == 1:
+ try:
+ return str(value.supernet(query)[index])
+ except Exception:
+ return False
+
+ except Exception:
+ if vsize > 1:
+ try:
+ return str(len(list(value.subnet(query))))
+ except Exception:
+ return False
+
+ elif vsize == 1:
+ try:
+ return str(value.supernet(query)[0])
+ except Exception:
+ return False
+
+ elif query_string:
+ vtype = ipaddr(query, "type")
+ if vtype == "address":
+ v = ipaddr(query, "cidr")
+ elif vtype == "network":
+ v = ipaddr(query, "subnet")
+ else:
+ msg = "You must pass a valid subnet or IP address; {0} is invalid".format(
+ query_string
+ )
+ raise errors.AnsibleFilterError(msg)
+ query = netaddr.IPNetwork(v)
+ for i, subnet in enumerate(query.subnet(value.prefixlen), 1):
+ if subnet == value:
+ return str(i)
+ msg = "{0} is not in the subnet {1}".format(value.cidr, query.cidr)
+ raise errors.AnsibleFilterError(msg)
+ return False
+
+
+# Returns the nth host within a network described by value.
+# Usage:
+#
+# - address or address/prefix | nthhost(nth)
+# returns the nth host within the given network
+def nthhost(value, query=""):
+ """ Get the nth host within a given network """
+ try:
+ vtype = ipaddr(value, "type")
+ if vtype == "address":
+ v = ipaddr(value, "cidr")
+ elif vtype == "network":
+ v = ipaddr(value, "subnet")
+
+ value = netaddr.IPNetwork(v)
+ except Exception:
+ return False
+
+ if not query:
+ return False
+
+ try:
+ nth = int(query)
+ if value.size > nth:
+ return value[nth]
+
+ except ValueError:
+ return False
+
+ return False
+
+
+# Returns the next nth usable ip within a network described by value.
+def next_nth_usable(value, offset):
+ try:
+ vtype = ipaddr(value, "type")
+ if vtype == "address":
+ v = ipaddr(value, "cidr")
+ elif vtype == "network":
+ v = ipaddr(value, "subnet")
+
+ v = netaddr.IPNetwork(v)
+ except Exception:
+ return False
+
+ if type(offset) != int:
+ raise errors.AnsibleFilterError("Must pass in an integer")
+ if v.size > 1:
+ first_usable, last_usable = _first_last(v)
+ nth_ip = int(netaddr.IPAddress(int(v.ip) + offset))
+ if nth_ip >= first_usable and nth_ip <= last_usable:
+ return str(netaddr.IPAddress(int(v.ip) + offset))
+
+
+# Returns the previous nth usable ip within a network described by value.
+def previous_nth_usable(value, offset):
+ try:
+ vtype = ipaddr(value, "type")
+ if vtype == "address":
+ v = ipaddr(value, "cidr")
+ elif vtype == "network":
+ v = ipaddr(value, "subnet")
+
+ v = netaddr.IPNetwork(v)
+ except Exception:
+ return False
+
+ if type(offset) != int:
+ raise errors.AnsibleFilterError("Must pass in an integer")
+ if v.size > 1:
+ first_usable, last_usable = _first_last(v)
+ nth_ip = int(netaddr.IPAddress(int(v.ip) - offset))
+ if nth_ip >= first_usable and nth_ip <= last_usable:
+ return str(netaddr.IPAddress(int(v.ip) - offset))
+
+
+def _range_checker(ip_check, first, last):
+ """
+ Tests whether an ip address is within the bounds of the first and last address.
+
+ :param ip_check: The ip to test if it is within first and last.
+ :param first: The first IP in the range to test against.
+ :param last: The last IP in the range to test against.
+
+ :return: bool
+ """
+ if ip_check >= first and ip_check <= last:
+ return True
+ else:
+ return False
+
+
+def _address_normalizer(value):
+ """
+ Used to validate an address or network type and return it in a consistent format.
+ This is being used for future use cases not currently available such as an address range.
+
+ :param value: The string representation of an address or network.
+
+ :return: The address or network in the normalized form.
+ """
+ try:
+ vtype = ipaddr(value, "type")
+ if vtype == "address" or vtype == "network":
+ v = ipaddr(value, "subnet")
+ except Exception:
+ return False
+
+ return v
+
+
+def network_in_usable(value, test):
+ """
+ Checks whether 'test' is a useable address or addresses in 'value'
+
+ :param: value: The string representation of an address or network to test against.
+ :param test: The string representation of an address or network to validate if it is within the range of 'value'.
+
+ :return: bool
+ """
+ # normalize value and test variables into an ipaddr
+ v = _address_normalizer(value)
+ w = _address_normalizer(test)
+
+ # get first and last addresses as integers to compare value and test; or cathes value when case is /32
+ v_first = ipaddr(ipaddr(v, "first_usable") or ipaddr(v, "address"), "int")
+ v_last = ipaddr(ipaddr(v, "last_usable") or ipaddr(v, "address"), "int")
+ w_first = ipaddr(ipaddr(w, "network") or ipaddr(w, "address"), "int")
+ w_last = ipaddr(ipaddr(w, "broadcast") or ipaddr(w, "address"), "int")
+
+ if _range_checker(w_first, v_first, v_last) and _range_checker(
+ w_last, v_first, v_last
+ ):
+ return True
+ else:
+ return False
+
+
+def network_in_network(value, test):
+ """
+ Checks whether the 'test' address or addresses are in 'value', including broadcast and network
+
+ :param: value: The network address or range to test against.
+ :param test: The address or network to validate if it is within the range of 'value'.
+
+ :return: bool
+ """
+ # normalize value and test variables into an ipaddr
+ v = _address_normalizer(value)
+ w = _address_normalizer(test)
+
+ # get first and last addresses as integers to compare value and test; or cathes value when case is /32
+ v_first = ipaddr(ipaddr(v, "network") or ipaddr(v, "address"), "int")
+ v_last = ipaddr(ipaddr(v, "broadcast") or ipaddr(v, "address"), "int")
+ w_first = ipaddr(ipaddr(w, "network") or ipaddr(w, "address"), "int")
+ w_last = ipaddr(ipaddr(w, "broadcast") or ipaddr(w, "address"), "int")
+
+ if _range_checker(w_first, v_first, v_last) and _range_checker(
+ w_last, v_first, v_last
+ ):
+ return True
+ else:
+ return False
+
+
+def reduce_on_network(value, network):
+ """
+ Reduces a list of addresses to only the addresses that match a given network.
+
+ :param: value: The list of addresses to filter on.
+ :param: network: The network to validate against.
+
+ :return: The reduced list of addresses.
+ """
+ # normalize network variable into an ipaddr
+ n = _address_normalizer(network)
+
+ # get first and last addresses as integers to compare value and test; or cathes value when case is /32
+ n_first = ipaddr(ipaddr(n, "network") or ipaddr(n, "address"), "int")
+ n_last = ipaddr(ipaddr(n, "broadcast") or ipaddr(n, "address"), "int")
+
+ # create an empty list to fill and return
+ r = []
+
+ for address in value:
+ # normalize address variables into an ipaddr
+ a = _address_normalizer(address)
+
+ # get first and last addresses as integers to compare value and test; or cathes value when case is /32
+ a_first = ipaddr(ipaddr(a, "network") or ipaddr(a, "address"), "int")
+ a_last = ipaddr(ipaddr(a, "broadcast") or ipaddr(a, "address"), "int")
+
+ if _range_checker(a_first, n_first, n_last) and _range_checker(
+ a_last, n_first, n_last
+ ):
+ r.append(address)
+
+ return r
+
+
+# Returns the SLAAC address within a network for a given HW/MAC address.
+# Usage:
+#
+# - prefix | slaac(mac)
+def slaac(value, query=""):
+ """ Get the SLAAC address within given network """
+ try:
+ vtype = ipaddr(value, "type")
+ if vtype == "address":
+ v = ipaddr(value, "cidr")
+ elif vtype == "network":
+ v = ipaddr(value, "subnet")
+
+ if ipaddr(value, "version") != 6:
+ return False
+
+ value = netaddr.IPNetwork(v)
+ except Exception:
+ return False
+
+ if not query:
+ return False
+
+ try:
+ mac = hwaddr(query, alias="slaac")
+
+ eui = netaddr.EUI(mac)
+ except Exception:
+ return False
+
+ return eui.ipv6(value.network)
+
+
+# ---- HWaddr / MAC address filters ----
+def hwaddr(value, query="", alias="hwaddr"):
+ """ Check if string is a HW/MAC address and filter it """
+
+ query_func_extra_args = {"": ("value",)}
+
+ query_func_map = {
+ "": _empty_hwaddr_query,
+ "bare": _bare_query,
+ "bool": _bool_hwaddr_query,
+ "int": _int_hwaddr_query,
+ "cisco": _cisco_query,
+ "eui48": _win_query,
+ "linux": _linux_query,
+ "pgsql": _postgresql_query,
+ "postgresql": _postgresql_query,
+ "psql": _postgresql_query,
+ "unix": _unix_query,
+ "win": _win_query,
+ }
+
+ try:
+ v = netaddr.EUI(value)
+ except Exception:
+ if query and query != "bool":
+ raise errors.AnsibleFilterError(
+ alias + ": not a hardware address: %s" % value
+ )
+
+ extras = []
+ for arg in query_func_extra_args.get(query, tuple()):
+ extras.append(locals()[arg])
+ try:
+ return query_func_map[query](v, *extras)
+ except KeyError:
+ raise errors.AnsibleFilterError(
+ alias + ": unknown filter type: %s" % query
+ )
+
+ return False
+
+
+def macaddr(value, query=""):
+ return hwaddr(value, query, alias="macaddr")
+
+
+def _need_netaddr(f_name, *args, **kwargs):
+ raise errors.AnsibleFilterError(
+ "The %s filter requires python's netaddr be "
+ "installed on the ansible controller" % f_name
+ )
+
+
+def ip4_hex(arg, delimiter=""):
+ """ Convert an IPv4 address to Hexadecimal notation """
+ numbers = list(map(int, arg.split(".")))
+ return "{0:02x}{sep}{1:02x}{sep}{2:02x}{sep}{3:02x}".format(
+ *numbers, sep=delimiter
+ )
+
+
+# ---- Ansible filters ----
+class FilterModule(object):
+ """ IP address and network manipulation filters """
+
+ filter_map = {
+ # IP addresses and networks
+ "cidr_merge": cidr_merge,
+ "ipaddr": ipaddr,
+ "ipmath": ipmath,
+ "ipwrap": ipwrap,
+ "ip4_hex": ip4_hex,
+ "ipv4": ipv4,
+ "ipv6": ipv6,
+ "ipsubnet": ipsubnet,
+ "next_nth_usable": next_nth_usable,
+ "network_in_network": network_in_network,
+ "network_in_usable": network_in_usable,
+ "reduce_on_network": reduce_on_network,
+ "nthhost": nthhost,
+ "previous_nth_usable": previous_nth_usable,
+ "slaac": slaac,
+ # MAC / HW addresses
+ "hwaddr": hwaddr,
+ "macaddr": macaddr,
+ }
+
+ def filters(self):
+ if netaddr:
+ return self.filter_map
+ else:
+ # Need to install python's netaddr for these filters to work
+ return dict(
+ (f, partial(_need_netaddr, f)) for f in self.filter_map
+ )
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/filter/network.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/filter/network.py
new file mode 100644
index 00000000..72d6c868
--- /dev/null
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/filter/network.py
@@ -0,0 +1,531 @@
+#
+# {c) 2017 Red Hat, Inc.
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import re
+import os
+import traceback
+import string
+
+from collections.abc import Mapping
+from xml.etree.ElementTree import fromstring
+
+from ansible.module_utils._text import to_native, to_text
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import (
+ Template,
+)
+from ansible.module_utils.six import iteritems, string_types
+from ansible.errors import AnsibleError, AnsibleFilterError
+from ansible.utils.display import Display
+from ansible.utils.encrypt import passlib_or_crypt, random_password
+
+try:
+ import yaml
+
+ HAS_YAML = True
+except ImportError:
+ HAS_YAML = False
+
+try:
+ import textfsm
+
+ HAS_TEXTFSM = True
+except ImportError:
+ HAS_TEXTFSM = False
+
+display = Display()
+
+
+def re_matchall(regex, value):
+ objects = list()
+ for match in re.findall(regex.pattern, value, re.M):
+ obj = {}
+ if regex.groupindex:
+ for name, index in iteritems(regex.groupindex):
+ if len(regex.groupindex) == 1:
+ obj[name] = match
+ else:
+ obj[name] = match[index - 1]
+ objects.append(obj)
+ return objects
+
+
+def re_search(regex, value):
+ obj = {}
+ match = regex.search(value, re.M)
+ if match:
+ items = list(match.groups())
+ if regex.groupindex:
+ for name, index in iteritems(regex.groupindex):
+ obj[name] = items[index - 1]
+ return obj
+
+
+def parse_cli(output, tmpl):
+ if not isinstance(output, string_types):
+ raise AnsibleError(
+ "parse_cli input should be a string, but was given a input of %s"
+ % (type(output))
+ )
+
+ if not os.path.exists(tmpl):
+ raise AnsibleError("unable to locate parse_cli template: %s" % tmpl)
+
+ try:
+ template = Template()
+ except ImportError as exc:
+ raise AnsibleError(to_native(exc))
+
+ with open(tmpl) as tmpl_fh:
+ tmpl_content = tmpl_fh.read()
+
+ spec = yaml.safe_load(tmpl_content)
+ obj = {}
+
+ for name, attrs in iteritems(spec["keys"]):
+ value = attrs["value"]
+
+ try:
+ variables = spec.get("vars", {})
+ value = template(value, variables)
+ except Exception:
+ pass
+
+ if "start_block" in attrs and "end_block" in attrs:
+ start_block = re.compile(attrs["start_block"])
+ end_block = re.compile(attrs["end_block"])
+
+ blocks = list()
+ lines = None
+ block_started = False
+
+ for line in output.split("\n"):
+ match_start = start_block.match(line)
+ match_end = end_block.match(line)
+
+ if match_start:
+ lines = list()
+ lines.append(line)
+ block_started = True
+
+ elif match_end:
+ if lines:
+ lines.append(line)
+ blocks.append("\n".join(lines))
+ block_started = False
+
+ elif block_started:
+ if lines:
+ lines.append(line)
+
+ regex_items = [re.compile(r) for r in attrs["items"]]
+ objects = list()
+
+ for block in blocks:
+ if isinstance(value, Mapping) and "key" not in value:
+ items = list()
+ for regex in regex_items:
+ match = regex.search(block)
+ if match:
+ item_values = match.groupdict()
+ item_values["match"] = list(match.groups())
+ items.append(item_values)
+ else:
+ items.append(None)
+
+ obj = {}
+ for k, v in iteritems(value):
+ try:
+ obj[k] = template(
+ v, {"item": items}, fail_on_undefined=False
+ )
+ except Exception:
+ obj[k] = None
+ objects.append(obj)
+
+ elif isinstance(value, Mapping):
+ items = list()
+ for regex in regex_items:
+ match = regex.search(block)
+ if match:
+ item_values = match.groupdict()
+ item_values["match"] = list(match.groups())
+ items.append(item_values)
+ else:
+ items.append(None)
+
+ key = template(value["key"], {"item": items})
+ values = dict(
+ [
+ (k, template(v, {"item": items}))
+ for k, v in iteritems(value["values"])
+ ]
+ )
+ objects.append({key: values})
+
+ return objects
+
+ elif "items" in attrs:
+ regexp = re.compile(attrs["items"])
+ when = attrs.get("when")
+ conditional = (
+ "{%% if %s %%}True{%% else %%}False{%% endif %%}" % when
+ )
+
+ if isinstance(value, Mapping) and "key" not in value:
+ values = list()
+
+ for item in re_matchall(regexp, output):
+ entry = {}
+
+ for item_key, item_value in iteritems(value):
+ entry[item_key] = template(item_value, {"item": item})
+
+ if when:
+ if template(conditional, {"item": entry}):
+ values.append(entry)
+ else:
+ values.append(entry)
+
+ obj[name] = values
+
+ elif isinstance(value, Mapping):
+ values = dict()
+
+ for item in re_matchall(regexp, output):
+ entry = {}
+
+ for item_key, item_value in iteritems(value["values"]):
+ entry[item_key] = template(item_value, {"item": item})
+
+ key = template(value["key"], {"item": item})
+
+ if when:
+ if template(
+ conditional, {"item": {"key": key, "value": entry}}
+ ):
+ values[key] = entry
+ else:
+ values[key] = entry
+
+ obj[name] = values
+
+ else:
+ item = re_search(regexp, output)
+ obj[name] = template(value, {"item": item})
+
+ else:
+ obj[name] = value
+
+ return obj
+
+
+def parse_cli_textfsm(value, template):
+ if not HAS_TEXTFSM:
+ raise AnsibleError(
+ "parse_cli_textfsm filter requires TextFSM library to be installed"
+ )
+
+ if not isinstance(value, string_types):
+ raise AnsibleError(
+ "parse_cli_textfsm input should be a string, but was given a input of %s"
+ % (type(value))
+ )
+
+ if not os.path.exists(template):
+ raise AnsibleError(
+ "unable to locate parse_cli_textfsm template: %s" % template
+ )
+
+ try:
+ template = open(template)
+ except IOError as exc:
+ raise AnsibleError(to_native(exc))
+
+ re_table = textfsm.TextFSM(template)
+ fsm_results = re_table.ParseText(value)
+
+ results = list()
+ for item in fsm_results:
+ results.append(dict(zip(re_table.header, item)))
+
+ return results
+
+
+def _extract_param(template, root, attrs, value):
+
+ key = None
+ when = attrs.get("when")
+ conditional = "{%% if %s %%}True{%% else %%}False{%% endif %%}" % when
+ param_to_xpath_map = attrs["items"]
+
+ if isinstance(value, Mapping):
+ key = value.get("key", None)
+ if key:
+ value = value["values"]
+
+ entries = dict() if key else list()
+
+ for element in root.findall(attrs["top"]):
+ entry = dict()
+ item_dict = dict()
+ for param, param_xpath in iteritems(param_to_xpath_map):
+ fields = None
+ try:
+ fields = element.findall(param_xpath)
+ except Exception:
+ display.warning(
+ "Failed to evaluate value of '%s' with XPath '%s'.\nUnexpected error: %s."
+ % (param, param_xpath, traceback.format_exc())
+ )
+
+ tags = param_xpath.split("/")
+
+ # check if xpath ends with attribute.
+ # If yes set attribute key/value dict to param value in case attribute matches
+ # else if it is a normal xpath assign matched element text value.
+ if len(tags) and tags[-1].endswith("]"):
+ if fields:
+ if len(fields) > 1:
+ item_dict[param] = [field.attrib for field in fields]
+ else:
+ item_dict[param] = fields[0].attrib
+ else:
+ item_dict[param] = {}
+ else:
+ if fields:
+ if len(fields) > 1:
+ item_dict[param] = [field.text for field in fields]
+ else:
+ item_dict[param] = fields[0].text
+ else:
+ item_dict[param] = None
+
+ if isinstance(value, Mapping):
+ for item_key, item_value in iteritems(value):
+ entry[item_key] = template(item_value, {"item": item_dict})
+ else:
+ entry = template(value, {"item": item_dict})
+
+ if key:
+ expanded_key = template(key, {"item": item_dict})
+ if when:
+ if template(
+ conditional,
+ {"item": {"key": expanded_key, "value": entry}},
+ ):
+ entries[expanded_key] = entry
+ else:
+ entries[expanded_key] = entry
+ else:
+ if when:
+ if template(conditional, {"item": entry}):
+ entries.append(entry)
+ else:
+ entries.append(entry)
+
+ return entries
+
+
+def parse_xml(output, tmpl):
+ if not os.path.exists(tmpl):
+ raise AnsibleError("unable to locate parse_xml template: %s" % tmpl)
+
+ if not isinstance(output, string_types):
+ raise AnsibleError(
+ "parse_xml works on string input, but given input of : %s"
+ % type(output)
+ )
+
+ root = fromstring(output)
+ try:
+ template = Template()
+ except ImportError as exc:
+ raise AnsibleError(to_native(exc))
+
+ with open(tmpl) as tmpl_fh:
+ tmpl_content = tmpl_fh.read()
+
+ spec = yaml.safe_load(tmpl_content)
+ obj = {}
+
+ for name, attrs in iteritems(spec["keys"]):
+ value = attrs["value"]
+
+ try:
+ variables = spec.get("vars", {})
+ value = template(value, variables)
+ except Exception:
+ pass
+
+ if "items" in attrs:
+ obj[name] = _extract_param(template, root, attrs, value)
+ else:
+ obj[name] = value
+
+ return obj
+
+
+def type5_pw(password, salt=None):
+ if not isinstance(password, string_types):
+ raise AnsibleFilterError(
+ "type5_pw password input should be a string, but was given a input of %s"
+ % (type(password).__name__)
+ )
+
+ salt_chars = u"".join(
+ (to_text(string.ascii_letters), to_text(string.digits), u"./")
+ )
+ if salt is not None and not isinstance(salt, string_types):
+ raise AnsibleFilterError(
+ "type5_pw salt input should be a string, but was given a input of %s"
+ % (type(salt).__name__)
+ )
+ elif not salt:
+ salt = random_password(length=4, chars=salt_chars)
+ elif not set(salt) <= set(salt_chars):
+ raise AnsibleFilterError(
+ "type5_pw salt used inproper characters, must be one of %s"
+ % (salt_chars)
+ )
+
+ encrypted_password = passlib_or_crypt(password, "md5_crypt", salt=salt)
+
+ return encrypted_password
+
+
+def hash_salt(password):
+
+ split_password = password.split("$")
+ if len(split_password) != 4:
+ raise AnsibleFilterError(
+ "Could not parse salt out password correctly from {0}".format(
+ password
+ )
+ )
+ else:
+ return split_password[2]
+
+
+def comp_type5(
+ unencrypted_password, encrypted_password, return_original=False
+):
+
+ salt = hash_salt(encrypted_password)
+ if type5_pw(unencrypted_password, salt) == encrypted_password:
+ if return_original is True:
+ return encrypted_password
+ else:
+ return True
+ return False
+
+
+def vlan_parser(vlan_list, first_line_len=48, other_line_len=44):
+
+ """
+ Input: Unsorted list of vlan integers
+ Output: Sorted string list of integers according to IOS-like vlan list rules
+
+ 1. Vlans are listed in ascending order
+ 2. Runs of 3 or more consecutive vlans are listed with a dash
+ 3. The first line of the list can be first_line_len characters long
+ 4. Subsequent list lines can be other_line_len characters
+ """
+
+ # Sort and remove duplicates
+ sorted_list = sorted(set(vlan_list))
+
+ if sorted_list[0] < 1 or sorted_list[-1] > 4094:
+ raise AnsibleFilterError("Valid VLAN range is 1-4094")
+
+ parse_list = []
+ idx = 0
+ while idx < len(sorted_list):
+ start = idx
+ end = start
+ while end < len(sorted_list) - 1:
+ if sorted_list[end + 1] - sorted_list[end] == 1:
+ end += 1
+ else:
+ break
+
+ if start == end:
+ # Single VLAN
+ parse_list.append(str(sorted_list[idx]))
+ elif start + 1 == end:
+ # Run of 2 VLANs
+ parse_list.append(str(sorted_list[start]))
+ parse_list.append(str(sorted_list[end]))
+ else:
+ # Run of 3 or more VLANs
+ parse_list.append(
+ str(sorted_list[start]) + "-" + str(sorted_list[end])
+ )
+ idx = end + 1
+
+ line_count = 0
+ result = [""]
+ for vlans in parse_list:
+ # First line (" switchport trunk allowed vlan ")
+ if line_count == 0:
+ if len(result[line_count] + vlans) > first_line_len:
+ result.append("")
+ line_count += 1
+ result[line_count] += vlans + ","
+ else:
+ result[line_count] += vlans + ","
+
+ # Subsequent lines (" switchport trunk allowed vlan add ")
+ else:
+ if len(result[line_count] + vlans) > other_line_len:
+ result.append("")
+ line_count += 1
+ result[line_count] += vlans + ","
+ else:
+ result[line_count] += vlans + ","
+
+ # Remove trailing orphan commas
+ for idx in range(0, len(result)):
+ result[idx] = result[idx].rstrip(",")
+
+ # Sometimes text wraps to next line, but there are no remaining VLANs
+ if "" in result:
+ result.remove("")
+
+ return result
+
+
+class FilterModule(object):
+ """Filters for working with output from network devices"""
+
+ filter_map = {
+ "parse_cli": parse_cli,
+ "parse_cli_textfsm": parse_cli_textfsm,
+ "parse_xml": parse_xml,
+ "type5_pw": type5_pw,
+ "hash_salt": hash_salt,
+ "comp_type5": comp_type5,
+ "vlan_parser": vlan_parser,
+ }
+
+ def filters(self):
+ return self.filter_map
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/httpapi/restconf.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/httpapi/restconf.py
new file mode 100644
index 00000000..8afb3e5e
--- /dev/null
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/httpapi/restconf.py
@@ -0,0 +1,91 @@
+# Copyright (c) 2018 Cisco and/or its affiliates.
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """author: Ansible Networking Team
+httpapi: restconf
+short_description: HttpApi Plugin for devices supporting Restconf API
+description:
+- This HttpApi plugin provides methods to connect to Restconf API endpoints.
+options:
+ root_path:
+ type: str
+ description:
+ - Specifies the location of the Restconf root.
+ default: /restconf
+ vars:
+ - name: ansible_httpapi_restconf_root
+"""
+
+import json
+
+from ansible.module_utils._text import to_text
+from ansible.module_utils.connection import ConnectionError
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible.plugins.httpapi import HttpApiBase
+
+
+CONTENT_TYPE = "application/yang-data+json"
+
+
+class HttpApi(HttpApiBase):
+ def send_request(self, data, **message_kwargs):
+ if data:
+ data = json.dumps(data)
+
+ path = "/".join(
+ [
+ self.get_option("root_path").rstrip("/"),
+ message_kwargs.get("path", "").lstrip("/"),
+ ]
+ )
+
+ headers = {
+ "Content-Type": message_kwargs.get("content_type") or CONTENT_TYPE,
+ "Accept": message_kwargs.get("accept") or CONTENT_TYPE,
+ }
+ response, response_data = self.connection.send(
+ path, data, headers=headers, method=message_kwargs.get("method")
+ )
+
+ return handle_response(response, response_data)
+
+
+def handle_response(response, response_data):
+ try:
+ response_data = json.loads(response_data.read())
+ except ValueError:
+ response_data = response_data.read()
+
+ if isinstance(response, HTTPError):
+ if response_data:
+ if "errors" in response_data:
+ errors = response_data["errors"]["error"]
+ error_text = "\n".join(
+ (error["error-message"] for error in errors)
+ )
+ else:
+ error_text = response_data
+
+ raise ConnectionError(error_text, code=response.code)
+ raise ConnectionError(to_text(response), code=response.code)
+
+ return response_data
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py
index 64150405..bc458eb5 100644
--- a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py
@@ -29,7 +29,7 @@ import re
import hashlib
from ansible.module_utils.six.moves import zip
-from ansible.module_utils.common.text.converters import to_bytes, to_native
+from ansible.module_utils._text import to_bytes, to_native
DEFAULT_COMMENT_TOKENS = ["#", "!", "/*", "*/", "echo"]
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py
index 2afa650e..477d3184 100644
--- a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py
@@ -79,7 +79,7 @@ class FactsBase(object):
self._module.fail_json(
msg="Subset must be one of [%s], got %s"
% (
- ", ".join(sorted(list(valid_subsets))),
+ ", ".join(sorted([item for item in valid_subsets])),
subset,
)
)
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py
index 1857f7df..53a91e8c 100644
--- a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py
@@ -27,7 +27,7 @@
#
import sys
-from ansible.module_utils.common.text.converters import to_text, to_bytes
+from ansible.module_utils._text import to_text, to_bytes
from ansible.module_utils.connection import Connection, ConnectionError
try:
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py
index 149b4413..555fc713 100644
--- a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py
@@ -28,7 +28,7 @@
import traceback
import json
-from ansible.module_utils.common.text.converters import to_text, to_native
+from ansible.module_utils._text import to_text, to_native
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
from ansible.module_utils.connection import Connection, ConnectionError
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py
index 4095f594..64eca157 100644
--- a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py
@@ -36,12 +36,26 @@ import json
from itertools import chain
-from ansible.module_utils.common.text.converters import to_text, to_bytes
-from ansible.module_utils.six.moves.collections_abc import Mapping
+from ansible.module_utils._text import to_text, to_bytes
+from ansible.module_utils.common._collections_compat import Mapping
from ansible.module_utils.six import iteritems, string_types
from ansible.module_utils import basic
from ansible.module_utils.parsing.convert_bool import boolean
+# Backwards compatibility for 3rd party modules
+# TODO(pabelanger): With move to ansible.netcommon, we should clean this code
+# up and have modules import directly themself.
+from ansible.module_utils.common.network import ( # noqa: F401
+ to_bits,
+ is_netmask,
+ is_masklen,
+ to_netmask,
+ to_masklen,
+ to_subnet,
+ to_ipv6_network,
+ VALID_MASKS,
+)
+
try:
from jinja2 import Environment, StrictUndefined
from jinja2.exceptions import UndefinedError
@@ -593,7 +607,7 @@ def remove_empties(cfg_dict):
elif (
isinstance(val, list)
and val
- and all(isinstance(x, dict) for x in val)
+ and all([isinstance(x, dict) for x in val])
):
child_val = [remove_empties(x) for x in val]
if child_val:
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py
new file mode 100644
index 00000000..1f03299b
--- /dev/null
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py
@@ -0,0 +1,147 @@
+#
+# (c) 2018 Red Hat, Inc.
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+import json
+
+from copy import deepcopy
+from contextlib import contextmanager
+
+try:
+ from lxml.etree import fromstring, tostring
+except ImportError:
+ from xml.etree.ElementTree import fromstring, tostring
+
+from ansible.module_utils._text import to_text, to_bytes
+from ansible.module_utils.connection import Connection, ConnectionError
+from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.netconf import (
+ NetconfConnection,
+)
+
+
+IGNORE_XML_ATTRIBUTE = ()
+
+
+def get_connection(module):
+ if hasattr(module, "_netconf_connection"):
+ return module._netconf_connection
+
+ capabilities = get_capabilities(module)
+ network_api = capabilities.get("network_api")
+ if network_api == "netconf":
+ module._netconf_connection = NetconfConnection(module._socket_path)
+ else:
+ module.fail_json(msg="Invalid connection type %s" % network_api)
+
+ return module._netconf_connection
+
+
+def get_capabilities(module):
+ if hasattr(module, "_netconf_capabilities"):
+ return module._netconf_capabilities
+
+ capabilities = Connection(module._socket_path).get_capabilities()
+ module._netconf_capabilities = json.loads(capabilities)
+ return module._netconf_capabilities
+
+
+def lock_configuration(module, target=None):
+ conn = get_connection(module)
+ return conn.lock(target=target)
+
+
+def unlock_configuration(module, target=None):
+ conn = get_connection(module)
+ return conn.unlock(target=target)
+
+
+@contextmanager
+def locked_config(module, target=None):
+ try:
+ lock_configuration(module, target=target)
+ yield
+ finally:
+ unlock_configuration(module, target=target)
+
+
+def get_config(module, source, filter=None, lock=False):
+ conn = get_connection(module)
+ try:
+ locked = False
+ if lock:
+ conn.lock(target=source)
+ locked = True
+ response = conn.get_config(source=source, filter=filter)
+
+ except ConnectionError as e:
+ module.fail_json(
+ msg=to_text(e, errors="surrogate_then_replace").strip()
+ )
+
+ finally:
+ if locked:
+ conn.unlock(target=source)
+
+ return response
+
+
+def get(module, filter, lock=False):
+ conn = get_connection(module)
+ try:
+ locked = False
+ if lock:
+ conn.lock(target="running")
+ locked = True
+
+ response = conn.get(filter=filter)
+
+ except ConnectionError as e:
+ module.fail_json(
+ msg=to_text(e, errors="surrogate_then_replace").strip()
+ )
+
+ finally:
+ if locked:
+ conn.unlock(target="running")
+
+ return response
+
+
+def dispatch(module, request):
+ conn = get_connection(module)
+ try:
+ response = conn.dispatch(request)
+ except ConnectionError as e:
+ module.fail_json(
+ msg=to_text(e, errors="surrogate_then_replace").strip()
+ )
+
+ return response
+
+
+def sanitize_xml(data):
+ tree = fromstring(
+ to_bytes(deepcopy(data), errors="surrogate_then_replace")
+ )
+ for element in tree.getiterator():
+ # remove attributes
+ attribute = element.attrib
+ if attribute:
+ for key in list(attribute):
+ if key not in IGNORE_XML_ATTRIBUTE:
+ attribute.pop(key)
+ return to_text(tostring(tree), errors="surrogate_then_replace").strip()
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py
new file mode 100644
index 00000000..fba46be0
--- /dev/null
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py
@@ -0,0 +1,61 @@
+# This code is part of Ansible, but is an independent component.
+# This particular file snippet, and this file snippet only, is BSD licensed.
+# Modules you write using this snippet, which is embedded dynamically by Ansible
+# still belong to the author of the module, and may assign their own license
+# to the complete work.
+#
+# (c) 2018 Red Hat Inc.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+from ansible.module_utils.connection import Connection
+
+
+def get(module, path=None, content=None, fields=None, output="json"):
+ if path is None:
+ raise ValueError("path value must be provided")
+ if content:
+ path += "?" + "content=%s" % content
+ if fields:
+ path += "?" + "field=%s" % fields
+
+ accept = None
+ if output == "xml":
+ accept = "application/yang-data+xml"
+
+ connection = Connection(module._socket_path)
+ return connection.send_request(
+ None, path=path, method="GET", accept=accept
+ )
+
+
+def edit_config(module, path=None, content=None, method="GET", format="json"):
+ if path is None:
+ raise ValueError("path value must be provided")
+
+ content_type = None
+ if format == "xml":
+ content_type = "application/yang-data+xml"
+
+ connection = Connection(module._socket_path)
+ return connection.send_request(
+ content, path=path, method=method, content_type=content_type
+ )
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/modules/cli_config.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/modules/cli_config.py
index 9d07e856..c1384c1d 100644
--- a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/modules/cli_config.py
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/modules/cli_config.py
@@ -206,7 +206,7 @@ import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
def validate_args(module, device_operations):
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/modules/net_get.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/modules/net_get.py
new file mode 100644
index 00000000..f0910f52
--- /dev/null
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/modules/net_get.py
@@ -0,0 +1,71 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# (c) 2018, Ansible by Red Hat, inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+
+ANSIBLE_METADATA = {
+ "metadata_version": "1.1",
+ "status": ["preview"],
+ "supported_by": "network",
+}
+
+
+DOCUMENTATION = """module: net_get
+author: Deepak Agrawal (@dagrawal)
+short_description: Copy a file from a network device to Ansible Controller
+description:
+- This module provides functionality to copy file from network device to ansible controller.
+extends_documentation_fragment:
+- ansible.netcommon.network_agnostic
+options:
+ src:
+ description:
+ - Specifies the source file. The path to the source file can either be the full
+ path on the network device or a relative path as per path supported by destination
+ network device.
+ required: true
+ protocol:
+ description:
+ - Protocol used to transfer file.
+ default: scp
+ choices:
+ - scp
+ - sftp
+ dest:
+ description:
+ - Specifies the destination file. The path to the destination file can either
+ be the full path on the Ansible control host or a relative path from the playbook
+ or role root directory.
+ default:
+ - Same filename as specified in I(src). The path will be playbook root or role
+ root directory if playbook is part of a role.
+requirements:
+- scp
+notes:
+- Some devices need specific configurations to be enabled before scp can work These
+ configuration should be pre-configured before using this module e.g ios - C(ip scp
+ server enable).
+- User privilege to do scp on network device should be pre-configured e.g. ios - need
+ user privilege 15 by default for allowing scp.
+- Default destination of source file.
+"""
+
+EXAMPLES = """
+- name: copy file from the network device to Ansible controller
+ net_get:
+ src: running_cfg_ios1.txt
+
+- name: copy file from ios to common location at /tmp
+ net_get:
+ src: running_cfg_sw1.txt
+ dest : /tmp/ios1.txt
+"""
+
+RETURN = """
+"""
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/modules/net_put.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/modules/net_put.py
new file mode 100644
index 00000000..2fc4a98c
--- /dev/null
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/modules/net_put.py
@@ -0,0 +1,82 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# (c) 2018, Ansible by Red Hat, inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+
+ANSIBLE_METADATA = {
+ "metadata_version": "1.1",
+ "status": ["preview"],
+ "supported_by": "network",
+}
+
+
+DOCUMENTATION = """module: net_put
+author: Deepak Agrawal (@dagrawal)
+short_description: Copy a file from Ansible Controller to a network device
+description:
+- This module provides functionality to copy file from Ansible controller to network
+ devices.
+extends_documentation_fragment:
+- ansible.netcommon.network_agnostic
+options:
+ src:
+ description:
+ - Specifies the source file. The path to the source file can either be the full
+ path on the Ansible control host or a relative path from the playbook or role
+ root directory.
+ required: true
+ protocol:
+ description:
+ - Protocol used to transfer file.
+ default: scp
+ choices:
+ - scp
+ - sftp
+ dest:
+ description:
+ - Specifies the destination file. The path to destination file can either be the
+ full path or relative path as supported by network_os.
+ default:
+ - Filename from src and at default directory of user shell on network_os.
+ required: false
+ mode:
+ description:
+ - Set the file transfer mode. If mode is set to I(text) then I(src) file will
+ go through Jinja2 template engine to replace any vars if present in the src
+ file. If mode is set to I(binary) then file will be copied as it is to destination
+ device.
+ default: binary
+ choices:
+ - binary
+ - text
+requirements:
+- scp
+notes:
+- Some devices need specific configurations to be enabled before scp can work These
+ configuration should be pre-configured before using this module e.g ios - C(ip scp
+ server enable).
+- User privilege to do scp on network device should be pre-configured e.g. ios - need
+ user privilege 15 by default for allowing scp.
+- Default destination of source file.
+"""
+
+EXAMPLES = """
+- name: copy file from ansible controller to a network device
+ net_put:
+ src: running_cfg_ios1.txt
+
+- name: copy file at root dir of flash in slot 3 of sw1(ios)
+ net_put:
+ src: running_cfg_sw1.txt
+ protocol: sftp
+ dest : flash3:/running_cfg_sw1.txt
+"""
+
+RETURN = """
+"""
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py
new file mode 100644
index 00000000..e9332f26
--- /dev/null
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py
@@ -0,0 +1,70 @@
+#
+# (c) 2017 Red Hat Inc.
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = """author: Ansible Networking Team
+netconf: default
+short_description: Use default netconf plugin to run standard netconf commands as
+ per RFC
+description:
+- This default plugin provides low level abstraction apis for sending and receiving
+ netconf commands as per Netconf RFC specification.
+options:
+ ncclient_device_handler:
+ type: str
+ default: default
+ description:
+ - Specifies the ncclient device handler name for network os that support default
+ netconf implementation as per Netconf RFC specification. To identify the ncclient
+ device handler name refer ncclient library documentation.
+"""
+import json
+
+from ansible.module_utils._text import to_text
+from ansible.plugins.netconf import NetconfBase
+
+
+class Netconf(NetconfBase):
+ def get_text(self, ele, tag):
+ try:
+ return to_text(
+ ele.find(tag).text, errors="surrogate_then_replace"
+ ).strip()
+ except AttributeError:
+ pass
+
+ def get_device_info(self):
+ device_info = dict()
+ device_info["network_os"] = "default"
+ return device_info
+
+ def get_capabilities(self):
+ result = dict()
+ result["rpc"] = self.get_base_rpc()
+ result["network_api"] = "netconf"
+ result["device_info"] = self.get_device_info()
+ result["server_capabilities"] = [c for c in self.m.server_capabilities]
+ result["client_capabilities"] = [c for c in self.m.client_capabilities]
+ result["session_id"] = self.m.session_id
+ result["device_operations"] = self.get_device_operations(
+ result["server_capabilities"]
+ )
+ return json.dumps(result)
diff --git a/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/cliconf/ios.py b/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/cliconf/ios.py
index b9cb19d7..feba971a 100644
--- a/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/cliconf/ios.py
+++ b/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/cliconf/ios.py
@@ -38,7 +38,7 @@ import json
from collections.abc import Mapping
from ansible.errors import AnsibleConnectionFailure
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
from ansible.module_utils.six import iteritems
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.config import (
NetworkConfig,
diff --git a/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py b/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py
index c16d84c6..6818a0ce 100644
--- a/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py
+++ b/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py
@@ -27,7 +27,7 @@
#
import json
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
from ansible.module_utils.basic import env_fallback
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import (
to_list,
diff --git a/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py b/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py
index 0b3be2a9..ef383fcc 100644
--- a/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py
+++ b/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py
@@ -134,7 +134,7 @@ failed_conditions:
"""
import time
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.parsing import (
Conditional,
diff --git a/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py b/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py
index 5048bbb5..beec5b8d 100644
--- a/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py
+++ b/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py
@@ -34,8 +34,7 @@ extends_documentation_fragment:
- cisco.ios.ios
notes:
- Tested against IOS 15.6
-- Abbreviated commands are NOT idempotent,
- see L(Network FAQ,../network/user_guide/faq.html#why-do-the-config-modules-always-return-changed-true-with-abbreviated-commands).
+- Abbreviated commands are NOT idempotent, see L(Network FAQ,../network/user_guide/faq.html#why-do-the-config-modules-always-return-changed-true-with-abbreviated-commands).
options:
lines:
description:
@@ -327,7 +326,7 @@ time:
"""
import json
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
from ansible.module_utils.connection import ConnectionError
from ansible_collections.cisco.ios.plugins.module_utils.network.ios.ios import (
run_commands,
@@ -576,7 +575,6 @@ def main():
)
if running_config.sha1 != base_config.sha1:
- before, after = "", ""
if module.params["diff_against"] == "intended":
before = running_config
after = base_config
diff --git a/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/terminal/ios.py b/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/terminal/ios.py
index 97169529..29f31b0e 100644
--- a/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/terminal/ios.py
+++ b/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/terminal/ios.py
@@ -24,7 +24,7 @@ import json
import re
from ansible.errors import AnsibleConnectionFailure
-from ansible.module_utils.common.text.converters import to_text, to_bytes
+from ansible.module_utils._text import to_text, to_bytes
from ansible.plugins.terminal import TerminalBase
from ansible.utils.display import Display
diff --git a/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/cliconf/vyos.py b/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/cliconf/vyos.py
index 1f351dc5..3212615f 100644
--- a/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/cliconf/vyos.py
+++ b/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/cliconf/vyos.py
@@ -37,7 +37,7 @@ import json
from collections.abc import Mapping
from ansible.errors import AnsibleConnectionFailure
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.config import (
NetworkConfig,
)
diff --git a/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py b/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py
index 7e8b2048..908395a6 100644
--- a/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py
+++ b/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py
@@ -27,7 +27,7 @@
#
import json
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
from ansible.module_utils.basic import env_fallback
from ansible.module_utils.connection import Connection, ConnectionError
diff --git a/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py b/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py
index 7f7c30c2..18538491 100644
--- a/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py
+++ b/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py
@@ -133,7 +133,7 @@ warnings:
"""
import time
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.parsing import (
Conditional,
@@ -192,7 +192,7 @@ def main():
interval = module.params["interval"]
match = module.params["match"]
- for dummy in range(retries):
+ for _ in range(retries):
responses = run_commands(module, commands)
for item in list(conditionals):
@@ -213,7 +213,7 @@ def main():
module.fail_json(msg=msg, failed_conditions=failed_conditions)
result.update(
- {"stdout": responses, "stdout_lines": list(to_lines(responses)), }
+ {"stdout": responses, "stdout_lines": list(to_lines(responses)),}
)
module.exit_json(**result)
diff --git a/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py b/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py
index e65f3ffd..b899045a 100644
--- a/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py
+++ b/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py
@@ -178,7 +178,7 @@ time:
"""
import re
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import ConnectionError
from ansible_collections.vyos.vyos.plugins.module_utils.network.vyos.vyos import (
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/action/win_copy.py b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/action/win_copy.py
index 79f72ef6..adb918be 100644
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/action/win_copy.py
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/action/win_copy.py
@@ -18,7 +18,7 @@ import zipfile
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleFileNotFound
-from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
+from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.plugins.action import ActionBase
from ansible.utils.hashing import checksum
@@ -439,7 +439,7 @@ class ActionModule(ActionBase):
source_full = self._loader.get_real_file(source, decrypt=decrypt)
except AnsibleFileNotFound as e:
result['failed'] = True
- result['msg'] = "could not find src=%s, %s" % (source, to_text(e))
+ result['msg'] = "could not find src=%s, %s" % (source_full, to_text(e))
return result
original_basename = os.path.basename(source)
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/action/win_reboot.py b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/action/win_reboot.py
deleted file mode 100644
index f1fad4d8..00000000
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/action/win_reboot.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# Copyright: (c) 2018, Matt Davis <mdavis@ansible.com>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from ansible.errors import AnsibleError
-from ansible.module_utils.common.text.converters import to_native
-from ansible.module_utils.common.validation import check_type_str, check_type_float
-from ansible.plugins.action import ActionBase
-from ansible.utils.display import Display
-
-from ansible_collections.ansible.windows.plugins.plugin_utils._reboot import reboot_host
-
-display = Display()
-
-
-def _positive_float(val):
- float_val = check_type_float(val)
- if float_val < 0:
- return 0
-
- else:
- return float_val
-
-
-class ActionModule(ActionBase):
- TRANSFERS_FILES = False
- _VALID_ARGS = frozenset((
- 'boot_time_command',
- 'connect_timeout',
- 'connect_timeout_sec',
- 'msg',
- 'post_reboot_delay',
- 'post_reboot_delay_sec',
- 'pre_reboot_delay',
- 'pre_reboot_delay_sec',
- 'reboot_timeout',
- 'reboot_timeout_sec',
- 'shutdown_timeout',
- 'shutdown_timeout_sec',
- 'test_command',
- ))
-
- def run(self, tmp=None, task_vars=None):
- self._supports_check_mode = True
- self._supports_async = True
-
- if self._play_context.check_mode:
- return {'changed': True, 'elapsed': 0, 'rebooted': True}
-
- if task_vars is None:
- task_vars = {}
-
- super(ActionModule, self).run(tmp, task_vars)
-
- parameters = {}
- for names, check_func in [
- (['boot_time_command'], check_type_str),
- (['connect_timeout', 'connect_timeout_sec'], _positive_float),
- (['msg'], check_type_str),
- (['post_reboot_delay', 'post_reboot_delay_sec'], _positive_float),
- (['pre_reboot_delay', 'pre_reboot_delay_sec'], _positive_float),
- (['reboot_timeout', 'reboot_timeout_sec'], _positive_float),
- (['test_command'], check_type_str),
- ]:
- for name in names:
- value = self._task.args.get(name, None)
- if value:
- break
- else:
- value = None
-
- # Defaults are applied in reboot_action so skip adding to kwargs if the input wasn't set (None)
- if value is not None:
- try:
- value = check_func(value)
- except TypeError as e:
- raise AnsibleError("Invalid value given for '%s': %s." % (names[0], to_native(e)))
-
- # Setting a lower value and kill PowerShell when sending the shutdown command. Just use the defaults
- # if this is the case.
- if names[0] == 'pre_reboot_delay' and value < 2:
- continue
-
- parameters[names[0]] = value
-
- result = reboot_host(self._task.action, self._connection, **parameters)
-
- # Not needed for testing and collection_name kwargs causes sanity error
- # Historical behaviour had ignore_errors=True being able to ignore unreachable hosts and not just task errors.
- # This snippet will allow that to continue but state that it will be removed in a future version and to use
- # ignore_unreachable to ignore unreachable hosts.
- # if result['unreachable'] and self._task.ignore_errors and not self._task.ignore_unreachable:
- # dep_msg = "Host was unreachable but is being skipped because ignore_errors=True is set. In the future " \
- # "only ignore_unreachable will be able to ignore an unreachable host for %s" % self._task.action
- # display.deprecated(dep_msg, date="2023-05-01", collection_name="ansible.windows")
- # result['unreachable'] = False
- # result['failed'] = True
-
- return result
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_stat.ps1 b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_stat.ps1
index 9d29d6fc..071eb11c 100644
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_stat.ps1
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_stat.ps1
@@ -95,7 +95,7 @@ If ($null -ne $info) {
isreadonly = ($attributes -contains "ReadOnly")
isreg = $false
isshared = $false
- nlink = 1 # Number of links to the file (hard links), overridden below if islnk
+ nlink = 1 # Number of links to the file (hard links), overriden below if islnk
# lnk_target = islnk or isjunction Target of the symlink. Note that relative paths remain relative
# lnk_source = islnk os isjunction Target of the symlink normalized for the remote filesystem
hlnk_targets = @()
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/plugin_utils/_quote.py b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/plugin_utils/_quote.py
deleted file mode 100644
index 718a0990..00000000
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/plugin_utils/_quote.py
+++ /dev/null
@@ -1,114 +0,0 @@
-# Copyright (c) 2021 Ansible Project
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-"""Quoting helpers for Windows
-
-This contains code to help with quoting values for use in the variable Windows
-shell. Right now it should only be used in ansible.windows as the interface is
-not final and could be subject to change.
-"""
-
-# FOR INTERNAL COLLECTION USE ONLY
-# The interfaces in this file are meant for use within the ansible.windows collection
-# and may not remain stable to outside uses. Changes may be made in ANY release, even a bugfix release.
-# See also: https://github.com/ansible/community/issues/539#issuecomment-780839686
-# Please open an issue if you have questions about this.
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import re
-
-from ansible.module_utils.six import text_type
-
-
-_UNSAFE_C = re.compile(u'[\\s\t"]')
-_UNSAFE_CMD = re.compile(u'[\\s\\(\\)\\^\\|%!"<>&]')
-
-# PowerShell has 5 characters it uses as a single quote, we need to double up on all of them.
-# https://github.com/PowerShell/PowerShell/blob/b7cb335f03fe2992d0cbd61699de9d9aafa1d7c1/src/System.Management.Automation/engine/parser/CharTraits.cs#L265-L272
-# https://github.com/PowerShell/PowerShell/blob/b7cb335f03fe2992d0cbd61699de9d9aafa1d7c1/src/System.Management.Automation/engine/parser/CharTraits.cs#L18-L21
-_UNSAFE_PWSH = re.compile(u"(['\u2018\u2019\u201a\u201b])")
-
-
-def quote_c(s): # type: (text_type) -> text_type
- """Quotes a value for the raw Win32 process command line.
-
- Quotes a value to be safely used by anything that calls the Win32
- CreateProcess API.
-
- Args:
- s: The string to quote.
-
- Returns:
- (text_type): The quoted string value.
- """
- # https://docs.microsoft.com/en-us/archive/blogs/twistylittlepassagesallalike/everyone-quotes-command-line-arguments-the-wrong-way
- if not s:
- return u'""'
-
- if not _UNSAFE_C.search(s):
- return s
-
- # Replace any double quotes in an argument with '\"'.
- s = s.replace('"', '\\"')
-
- # We need to double up on any '\' chars that preceded a double quote (now '\"').
- s = re.sub(r'(\\+)\\"', r'\1\1\"', s)
-
- # Double up '\' at the end of the argument so it doesn't escape out end quote.
- s = re.sub(r'(\\+)$', r'\1\1', s)
-
- # Finally wrap the entire argument in double quotes now we've escaped the double quotes within.
- return u'"{0}"'.format(s)
-
-
-def quote_cmd(s): # type: (text_type) -> text_type
- """Quotes a value for cmd.
-
- Quotes a value to be safely used by a command prompt call.
-
- Args:
- s: The string to quote.
-
- Returns:
- (text_type): The quoted string value.
- """
- # https://docs.microsoft.com/en-us/archive/blogs/twistylittlepassagesallalike/everyone-quotes-command-line-arguments-the-wrong-way#a-better-method-of-quoting
- if not s:
- return u'""'
-
- if not _UNSAFE_CMD.search(s):
- return s
-
- # Escape the metachars as we are quoting the string to stop cmd from interpreting that metachar. For example
- # 'file &whoami.exe' would result in 'whoami.exe' being executed and then that output being used as the argument
- # instead of the literal string.
- # https://stackoverflow.com/questions/3411771/multiple-character-replace-with-python
- for c in u'^()%!"<>&|': # '^' must be the first char that we scan and replace
- if c in s:
- # I can't find any docs that explicitly say this but to escape ", it needs to be prefixed with \^.
- s = s.replace(c, (u"\\^" if c == u'"' else u"^") + c)
-
- return u'^"{0}^"'.format(s)
-
-
-def quote_pwsh(s): # type: (text_type) -> text_type
- """Quotes a value for PowerShell.
-
- Quotes a value to be safely used by a PowerShell expression. The input
- string because something that is safely wrapped in single quotes.
-
- Args:
- s: The string to quote.
-
- Returns:
- (text_type): The quoted string value.
- """
- # https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_quoting_rules?view=powershell-5.1
- if not s:
- return u"''"
-
- # We should always quote values in PowerShell as it has conflicting rules where strings can and can't be quoted.
- # This means we quote the entire arg with single quotes and just double up on the single quote equivalent chars.
- return u"'{0}'".format(_UNSAFE_PWSH.sub(u'\\1\\1', s))
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/plugin_utils/_reboot.py b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/plugin_utils/_reboot.py
deleted file mode 100644
index 2399ee48..00000000
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/plugin_utils/_reboot.py
+++ /dev/null
@@ -1,620 +0,0 @@
-# Copyright: (c) 2021, Ansible Project
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-"""Reboot action for Windows hosts
-
-This contains the code to reboot a Windows host for use by other action plugins
-in this collection. Right now it should only be used in this collection as the
-interface is not final and count be subject to change.
-"""
-
-# FOR INTERNAL COLLECTION USE ONLY
-# The interfaces in this file are meant for use within the ansible.windows collection
-# and may not remain stable to outside uses. Changes may be made in ANY release, even a bugfix release.
-# See also: https://github.com/ansible/community/issues/539#issuecomment-780839686
-# Please open an issue if you have questions about this.
-
-import datetime
-import json
-import random
-import time
-import traceback
-import uuid
-import typing as t
-
-from ansible.errors import AnsibleConnectionFailure, AnsibleError
-from ansible.module_utils.common.text.converters import to_text
-from ansible.plugins.connection import ConnectionBase
-from ansible.utils.display import Display
-
-from ansible_collections.ansible.windows.plugins.plugin_utils._quote import quote_pwsh
-
-
-# This is not ideal but the psrp connection plugin doesn't catch all these exceptions as an AnsibleConnectionFailure.
-# Until we can guarantee we are using a version of psrp that handles all this we try to handle those issues.
-try:
- from requests.exceptions import (
- RequestException,
- )
-except ImportError:
- RequestException = AnsibleConnectionFailure
-
-
-_LOGON_UI_KEY = (
- r"HKLM:\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Winlogon\AutoLogonChecked"
-)
-
-_DEFAULT_BOOT_TIME_COMMAND = (
- "(Get-CimInstance -ClassName Win32_OperatingSystem -Property LastBootUpTime)"
- ".LastBootUpTime.ToFileTime()"
-)
-
-T = t.TypeVar("T")
-
-display = Display()
-
-
-class _ReturnResultException(Exception):
- """Used to sneak results back to the return dict from an exception"""
-
- def __init__(self, msg, **result):
- super().__init__(msg)
- self.result = result
-
-
-class _TestCommandFailure(Exception):
- """Differentiates between a connection failure and just a command assertion failure during the reboot loop"""
-
-
-def reboot_host(
- task_action: str,
- connection: ConnectionBase,
- boot_time_command: str = _DEFAULT_BOOT_TIME_COMMAND,
- connect_timeout: int = 5,
- msg: str = "Reboot initiated by Ansible",
- post_reboot_delay: int = 0,
- pre_reboot_delay: int = 2,
- reboot_timeout: int = 600,
- test_command: t.Optional[str] = None,
-) -> t.Dict[str, t.Any]:
- """Reboot a Windows Host.
-
- Used by action plugins in ansible.windows to reboot a Windows host. It
- takes in the connection plugin so it can run the commands on the targeted
- host and monitor the reboot process. The return dict will have the
- following keys set:
-
- changed: Whether a change occurred (reboot was done)
- elapsed: Seconds elapsed between the reboot and it coming back online
- failed: Whether a failure occurred
- unreachable: Whether it failed to connect to the host on the first cmd
- rebooted: Whether the host was rebooted
-
- When failed=True there may be more keys to give some information around
- the failure like msg, exception. There are other keys that might be
- returned as well but they are dependent on the failure that occurred.
-
- Verbosity levels used:
- 2: Message when each reboot step is completed
- 4: Connection plugin operations and their results
- 5: Raw commands run and the results of those commands
- Debug: Everything, very verbose
-
- Args:
- task_action: The name of the action plugin that is running for logging.
- connection: The connection plugin to run the reboot commands on.
- boot_time_command: The command to run when getting the boot timeout.
- connect_timeout: Override the connection timeout of the connection
- plugin when polling the rebooted host.
- msg: The message to display to interactive users when rebooting the
- host.
- post_reboot_delay: Seconds to wait after sending the reboot command
- before checking to see if it has returned.
- pre_reboot_delay: Seconds to wait when sending the reboot command.
- reboot_timeout: Seconds to wait while polling for the host to come
- back online.
- test_command: Command to run when the host is back online and
- determines the machine is ready for management. When not defined
- the default command should wait until the reboot is complete and
- all pre-login configuration has completed.
-
- Returns:
- (Dict[str, Any]): The return result as a dictionary. Use the 'failed'
- key to determine if there was a failure or not.
- """
- result: t.Dict[str, t.Any] = {
- "changed": False,
- "elapsed": 0,
- "failed": False,
- "unreachable": False,
- "rebooted": False,
- }
- host_context = {"do_close_on_reset": True}
-
- # Get current boot time. A lot of tasks that require a reboot leave the WSMan stack in a bad place. Will try to
- # get the initial boot time 3 times before giving up.
- try:
- previous_boot_time = _do_until_success_or_retry_limit(
- task_action,
- connection,
- host_context,
- "pre-reboot boot time check",
- 3,
- _get_system_boot_time,
- task_action,
- connection,
- boot_time_command,
- )
-
- except Exception as e:
- # Report a the failure based on the last exception received.
- if isinstance(e, _ReturnResultException):
- result.update(e.result)
-
- if isinstance(e, AnsibleConnectionFailure):
- result["unreachable"] = True
- else:
- result["failed"] = True
-
- result["msg"] = str(e)
- result["exception"] = traceback.format_exc()
- return result
-
- # Get the original connection_timeout option var so it can be reset after
- original_connection_timeout: t.Optional[float] = None
- try:
- original_connection_timeout = connection.get_option("connection_timeout")
- display.vvvv(
- f"{task_action}: saving original connection_timeout of {original_connection_timeout}"
- )
- except KeyError:
- display.vvvv(
- f"{task_action}: connection_timeout connection option has not been set"
- )
-
- # Initiate reboot
- # This command may be wrapped in other shells or command making it hard to detect what shutdown.exe actually
- # returned. We use this hackery to return a json that contains the stdout/stderr/rc as a structured object for our
- # code to parse and detect if something went wrong.
- reboot_command = """$ErrorActionPreference = 'Continue'
-
-if ($%s) {
- Remove-Item -LiteralPath '%s' -Force -ErrorAction SilentlyContinue
-}
-
-$stdout = $null
-$stderr = . { shutdown.exe /r /t %s /c %s | Set-Variable stdout } 2>&1 | ForEach-Object ToString
-
-ConvertTo-Json -Compress -InputObject @{
- stdout = (@($stdout) -join "`n")
- stderr = (@($stderr) -join "`n")
- rc = $LASTEXITCODE
-}
-""" % (
- str(not test_command),
- _LOGON_UI_KEY,
- int(pre_reboot_delay),
- quote_pwsh(msg),
- )
-
- expected_test_result = (
- None # We cannot have an expected result if the command is user defined
- )
- if not test_command:
- # It turns out that LogonUI will create this registry key if it does not exist when it's about to show the
- # logon prompt. Normally this is a volatile key but if someone has explicitly created it that might no longer
- # be the case. We ensure it is not present on a reboot so we can wait until LogonUI creates it to determine
- # the host is actually online and ready, e.g. no configurations/updates still to be applied.
- # We echo a known successful statement to catch issues with powershell failing to start but the rc mysteriously
- # being 0 causing it to consider a successful reboot too early (seen on ssh connections).
- expected_test_result = f"success-{uuid.uuid4()}"
- test_command = f"Get-Item -LiteralPath '{_LOGON_UI_KEY}' -ErrorAction Stop; '{expected_test_result}'"
-
- start = None
- try:
- _perform_reboot(task_action, connection, reboot_command)
-
- start = datetime.datetime.utcnow()
- result["changed"] = True
- result["rebooted"] = True
-
- if post_reboot_delay != 0:
- display.vv(
- f"{task_action}: waiting an additional {post_reboot_delay} seconds"
- )
- time.sleep(post_reboot_delay)
-
- # Keep on trying to run the last boot time check until it is successful or the timeout is raised
- display.vv(f"{task_action} validating reboot")
- _do_until_success_or_timeout(
- task_action,
- connection,
- host_context,
- "last boot time check",
- reboot_timeout,
- _check_boot_time,
- task_action,
- connection,
- host_context,
- previous_boot_time,
- boot_time_command,
- connect_timeout,
- )
-
- # Reset the connection plugin connection timeout back to the original
- if original_connection_timeout is not None:
- _set_connection_timeout(
- task_action,
- connection,
- host_context,
- original_connection_timeout,
- )
-
- # Run test command until ti is successful or a timeout occurs
- display.vv(f"{task_action} running post reboot test command")
- _do_until_success_or_timeout(
- task_action,
- connection,
- host_context,
- "post-reboot test command",
- reboot_timeout,
- _run_test_command,
- task_action,
- connection,
- test_command,
- expected=expected_test_result,
- )
-
- display.vv(f"{task_action}: system successfully rebooted")
-
- except Exception as e:
- if isinstance(e, _ReturnResultException):
- result.update(e.result)
-
- result["failed"] = True
- result["msg"] = str(e)
- result["exception"] = traceback.format_exc()
-
- if start:
- elapsed = datetime.datetime.utcnow() - start
- result["elapsed"] = elapsed.seconds
-
- return result
-
-
-def _check_boot_time(
- task_action: str,
- connection: ConnectionBase,
- host_context: t.Dict[str, t.Any],
- previous_boot_time: int,
- boot_time_command: str,
- timeout: int,
-):
- """Checks the system boot time has been changed or not"""
- display.vvvv("%s: attempting to get system boot time" % task_action)
-
- # override connection timeout from defaults to custom value
- if timeout:
- _set_connection_timeout(task_action, connection, host_context, timeout)
-
- # try and get boot time
- current_boot_time = _get_system_boot_time(
- task_action, connection, boot_time_command
- )
- if current_boot_time == previous_boot_time:
- raise _TestCommandFailure("boot time has not changed")
-
-
-def _do_until_success_or_retry_limit(
- task_action: str,
- connection: ConnectionBase,
- host_context: t.Dict[str, t.Any],
- action_desc: str,
- retries: int,
- func: t.Callable[..., T],
- *args: t.Any,
- **kwargs: t.Any,
-) -> t.Optional[T]:
- """Runs the function multiple times ignoring errors until the retry limit is hit"""
-
- def wait_condition(idx):
- return idx < retries
-
- return _do_until_success_or_condition(
- task_action,
- connection,
- host_context,
- action_desc,
- wait_condition,
- func,
- *args,
- **kwargs,
- )
-
-
-def _do_until_success_or_timeout(
- task_action: str,
- connection: ConnectionBase,
- host_context: t.Dict[str, t.Any],
- action_desc: str,
- timeout: float,
- func: t.Callable[..., T],
- *args: t.Any,
- **kwargs: t.Any,
-) -> t.Optional[T]:
- """Runs the function multiple times ignoring errors until a timeout occurs"""
- max_end_time = datetime.datetime.utcnow() + datetime.timedelta(seconds=timeout)
-
- def wait_condition(idx):
- return datetime.datetime.utcnow() < max_end_time
-
- try:
- return _do_until_success_or_condition(
- task_action,
- connection,
- host_context,
- action_desc,
- wait_condition,
- func,
- *args,
- **kwargs,
- )
- except Exception:
- raise Exception(
- "Timed out waiting for %s (timeout=%s)" % (action_desc, timeout)
- )
-
-
-def _do_until_success_or_condition(
- task_action: str,
- connection: ConnectionBase,
- host_context: t.Dict[str, t.Any],
- action_desc: str,
- condition: t.Callable[[int], bool],
- func: t.Callable[..., T],
- *args: t.Any,
- **kwargs: t.Any,
-) -> t.Optional[T]:
- """Runs the function multiple times ignoring errors until the condition is false"""
- fail_count = 0
- max_fail_sleep = 12
- reset_required = False
- last_error = None
-
- while fail_count == 0 or condition(fail_count):
- try:
- if reset_required:
- # Keep on trying the reset until it succeeds.
- _reset_connection(task_action, connection, host_context)
- reset_required = False
-
- else:
- res = func(*args, **kwargs)
- display.vvvvv("%s: %s success" % (task_action, action_desc))
-
- return res
-
- except Exception as e:
- last_error = e
-
- if not isinstance(e, _TestCommandFailure):
- # The error may be due to a connection problem, just reset the connection just in case
- reset_required = True
-
- # Use exponential backoff with a max timeout, plus a little bit of randomness
- random_int = random.randint(0, 1000) / 1000
- fail_sleep = 2**fail_count + random_int
- if fail_sleep > max_fail_sleep:
- fail_sleep = max_fail_sleep + random_int
-
- try:
- error = str(e).splitlines()[-1]
- except IndexError:
- error = str(e)
-
- display.vvvvv(
- "{action}: {desc} fail {e_type} '{err}', retrying in {sleep:.4} seconds...\n{tcb}".format(
- action=task_action,
- desc=action_desc,
- e_type=type(e).__name__,
- err=error,
- sleep=fail_sleep,
- tcb=traceback.format_exc(),
- )
- )
-
- fail_count += 1
- time.sleep(fail_sleep)
-
- if last_error:
- raise last_error
-
- return None
-
-
-def _execute_command(
- task_action: str,
- connection: ConnectionBase,
- command: str,
-) -> t.Tuple[int, str, str]:
- """Runs a command on the Windows host and returned the result"""
- display.vvvvv(f"{task_action}: running command: {command}")
-
- # Need to wrap the command in our PowerShell encoded wrapper. This is done to align the command input to a
- # common shell and to allow the psrp connection plugin to report the correct exit code without manually setting
- # $LASTEXITCODE for just that plugin.
- command = connection._shell._encode_script(command)
-
- try:
- rc, stdout, stderr = connection.exec_command(
- command, in_data=None, sudoable=False
- )
- except RequestException as e:
- # The psrp connection plugin should be doing this but until we can guarantee it does we just convert it here
- # to ensure AnsibleConnectionFailure refers to actual connection errors.
- raise AnsibleConnectionFailure(f"Failed to connect to the host: {e}")
-
- rc = rc or 0
- stdout = to_text(stdout, errors="surrogate_or_strict").strip()
- stderr = to_text(stderr, errors="surrogate_or_strict").strip()
-
- display.vvvvv(
- f"{task_action}: command result - rc: {rc}, stdout: {stdout}, stderr: {stderr}"
- )
-
- return rc, stdout, stderr
-
-
-def _get_system_boot_time(
- task_action: str,
- connection: ConnectionBase,
- boot_time_command: str,
-) -> str:
- """Gets a unique identifier to represent the boot time of the Windows host"""
- display.vvvv(f"{task_action}: getting boot time")
- rc, stdout, stderr = _execute_command(task_action, connection, boot_time_command)
-
- if rc != 0:
- msg = f"{task_action}: failed to get host boot time info"
- raise _ReturnResultException(msg, rc=rc, stdout=stdout, stderr=stderr)
-
- display.vvvv(f"{task_action}: last boot time: {stdout}")
- return stdout
-
-
-def _perform_reboot(
- task_action: str,
- connection: ConnectionBase,
- reboot_command: str,
- handle_abort: bool = True,
-) -> None:
- """Runs the reboot command"""
- display.vv(f"{task_action}: rebooting server...")
-
- stdout = stderr = None
- try:
- rc, stdout, stderr = _execute_command(task_action, connection, reboot_command)
-
- except AnsibleConnectionFailure as e:
- # If the connection is closed too quickly due to the system being shutdown, carry on
- display.vvvv(f"{task_action}: AnsibleConnectionFailure caught and handled: {e}")
- rc = 0
-
- if stdout:
- try:
- reboot_result = json.loads(stdout)
- except getattr(json.decoder, "JSONDecodeError", ValueError):
- # While the reboot command should output json it may have failed for some other reason. We continue
- # reporting with that output instead
- pass
- else:
- stdout = reboot_result.get("stdout", stdout)
- stderr = reboot_result.get("stderr", stderr)
- rc = int(reboot_result.get("rc", rc))
-
- # Test for "A system shutdown has already been scheduled. (1190)" and handle it gracefully
- if handle_abort and (rc == 1190 or (rc != 0 and stderr and "(1190)" in stderr)):
- display.warning("A scheduled reboot was pre-empted by Ansible.")
-
- # Try to abort (this may fail if it was already aborted)
- rc, stdout, stderr = _execute_command(
- task_action, connection, "shutdown.exe /a"
- )
- display.vvvv(
- f"{task_action}: result from trying to abort existing shutdown - rc: {rc}, stdout: {stdout}, stderr: {stderr}"
- )
-
- return _perform_reboot(
- task_action, connection, reboot_command, handle_abort=False
- )
-
- if rc != 0:
- msg = f"{task_action}: Reboot command failed"
- raise _ReturnResultException(msg, rc=rc, stdout=stdout, stderr=stderr)
-
-
-def _reset_connection(
- task_action: str,
- connection: ConnectionBase,
- host_context: t.Dict[str, t.Any],
- ignore_errors: bool = False,
-) -> None:
- """Resets the connection handling any errors"""
-
- def _wrap_conn_err(func, *args, **kwargs):
- try:
- func(*args, **kwargs)
-
- except (AnsibleError, RequestException) as e:
- if ignore_errors:
- return False
-
- raise AnsibleError(e)
-
- return True
-
- # While reset() should probably better handle this some connection plugins don't clear the existing connection on
- # reset() leaving resources still in use on the target (WSMan shells). Instead we try to manually close the
- # connection then call reset. If it fails once we want to skip closing to avoid a perpetual loop and just hope
- # reset() brings us back into a good state. If it's successful we still want to try it again.
- if host_context["do_close_on_reset"]:
- display.vvvv(f"{task_action}: closing connection plugin")
- try:
- success = _wrap_conn_err(connection.close)
-
- except Exception:
- host_context["do_close_on_reset"] = False
- raise
-
- host_context["do_close_on_reset"] = success
-
- # For some connection plugins (ssh) reset actually does something more than close so we also class that
- display.vvvv(f"{task_action}: resetting connection plugin")
- try:
- _wrap_conn_err(connection.reset)
-
- except AttributeError:
- # Not all connection plugins have reset so we just ignore those, close should have done our job.
- pass
-
-
-def _run_test_command(
- task_action: str,
- connection: ConnectionBase,
- command: str,
- expected: t.Optional[str] = None,
-) -> None:
- """Runs the user specified test command until the host is able to run it properly"""
- display.vvvv(f"{task_action}: attempting post-reboot test command")
-
- rc, stdout, stderr = _execute_command(task_action, connection, command)
-
- if rc != 0:
- msg = f"{task_action}: Test command failed - rc: {rc}, stdout: {stdout}, stderr: {stderr}"
- raise _TestCommandFailure(msg)
-
- if expected and expected not in stdout:
- msg = f"{task_action}: Test command failed - '{expected}' was not in stdout: {stdout}"
- raise _TestCommandFailure(msg)
-
-
-def _set_connection_timeout(
- task_action: str,
- connection: ConnectionBase,
- host_context: t.Dict[str, t.Any],
- timeout: float,
-) -> None:
- """Sets the connection plugin connection_timeout option and resets the connection"""
- try:
- current_connection_timeout = connection.get_option("connection_timeout")
- except KeyError:
- # Not all connection plugins implement this, just ignore the setting if it doesn't work
- return
-
- if timeout == current_connection_timeout:
- return
-
- display.vvvv(f"{task_action}: setting connect_timeout {timeout}")
- connection.set_option("connection_timeout", timeout)
-
- _reset_connection(task_action, connection, host_context, ignore_errors=True)
diff --git a/test/support/windows-integration/plugins/action/win_copy.py b/test/support/windows-integration/plugins/action/win_copy.py
index 79f72ef6..adb918be 100644
--- a/test/support/windows-integration/plugins/action/win_copy.py
+++ b/test/support/windows-integration/plugins/action/win_copy.py
@@ -18,7 +18,7 @@ import zipfile
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleFileNotFound
-from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
+from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.plugins.action import ActionBase
from ansible.utils.hashing import checksum
@@ -439,7 +439,7 @@ class ActionModule(ActionBase):
source_full = self._loader.get_real_file(source, decrypt=decrypt)
except AnsibleFileNotFound as e:
result['failed'] = True
- result['msg'] = "could not find src=%s, %s" % (source, to_text(e))
+ result['msg'] = "could not find src=%s, %s" % (source_full, to_text(e))
return result
original_basename = os.path.basename(source)
diff --git a/test/support/windows-integration/plugins/action/win_reboot.py b/test/support/windows-integration/plugins/action/win_reboot.py
index 76f4a66b..c408f4f3 100644
--- a/test/support/windows-integration/plugins/action/win_reboot.py
+++ b/test/support/windows-integration/plugins/action/win_reboot.py
@@ -4,9 +4,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from datetime import datetime, timezone
+from datetime import datetime
-from ansible.module_utils.common.text.converters import to_native
+from ansible.errors import AnsibleError
+from ansible.module_utils._text import to_native
from ansible.plugins.action import ActionBase
from ansible.plugins.action.reboot import ActionModule as RebootActionModule
from ansible.utils.display import Display
@@ -64,7 +65,7 @@ class ActionModule(RebootActionModule, ActionBase):
result = {}
reboot_result = self._low_level_execute_command(reboot_command, sudoable=self.DEFAULT_SUDOABLE)
- result['start'] = datetime.now(timezone.utc)
+ result['start'] = datetime.utcnow()
# Test for "A system shutdown has already been scheduled. (1190)" and handle it gracefully
stdout = reboot_result['stdout']
diff --git a/test/support/windows-integration/plugins/modules/win_stat.ps1 b/test/support/windows-integration/plugins/modules/win_stat.ps1
index 9d29d6fc..071eb11c 100644
--- a/test/support/windows-integration/plugins/modules/win_stat.ps1
+++ b/test/support/windows-integration/plugins/modules/win_stat.ps1
@@ -95,7 +95,7 @@ If ($null -ne $info) {
isreadonly = ($attributes -contains "ReadOnly")
isreg = $false
isshared = $false
- nlink = 1 # Number of links to the file (hard links), overridden below if islnk
+ nlink = 1 # Number of links to the file (hard links), overriden below if islnk
# lnk_target = islnk or isjunction Target of the symlink. Note that relative paths remain relative
# lnk_source = islnk os isjunction Target of the symlink normalized for the remote filesystem
hlnk_targets = @()
diff --git a/test/units/_vendor/test_vendor.py b/test/units/_vendor/test_vendor.py
index 265f5b27..84b850e2 100644
--- a/test/units/_vendor/test_vendor.py
+++ b/test/units/_vendor/test_vendor.py
@@ -1,22 +1,27 @@
# (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
import os
import pkgutil
import pytest
import sys
-from unittest.mock import patch
+from unittest.mock import MagicMock, NonCallableMagicMock, patch
def reset_internal_vendor_package():
import ansible
ansible_vendor_path = os.path.join(os.path.dirname(ansible.__file__), '_vendor')
- list(map(sys.path.remove, [path for path in sys.path if path == ansible_vendor_path]))
+ if ansible_vendor_path in sys.path:
+ sys.path.remove(ansible_vendor_path)
for pkg in ['ansible._vendor', 'ansible']:
- sys.modules.pop(pkg, None)
+ if pkg in sys.modules:
+ del sys.modules[pkg]
def test_package_path_masking():
@@ -45,10 +50,16 @@ def test_vendored(vendored_pkg_names=None):
import ansible
ansible_vendor_path = os.path.join(os.path.dirname(ansible.__file__), '_vendor')
assert sys.path[0] == ansible_vendor_path
+
+ if ansible_vendor_path in previous_path:
+ previous_path.remove(ansible_vendor_path)
+
assert sys.path[1:] == previous_path
def test_vendored_conflict():
with pytest.warns(UserWarning) as w:
+ import pkgutil
+ import sys
test_vendored(vendored_pkg_names=['sys', 'pkgutil']) # pass a real package we know is already loaded
- assert any(list('pkgutil, sys' in str(msg.message) for msg in w)) # ensure both conflicting modules are listed and sorted
+ assert any('pkgutil, sys' in str(msg.message) for msg in w) # ensure both conflicting modules are listed and sorted
diff --git a/test/units/ansible_test/diff/add_binary_file.diff b/test/units/ansible_test/diff/add_binary_file.diff
deleted file mode 100644
index ef8f3628..00000000
--- a/test/units/ansible_test/diff/add_binary_file.diff
+++ /dev/null
@@ -1,4 +0,0 @@
-diff --git a/binary.dat b/binary.dat
-new file mode 100644
-index 0000000000..f76dd238ad
-Binary files /dev/null and b/binary.dat differ
diff --git a/test/units/ansible_test/diff/add_text_file.diff b/test/units/ansible_test/diff/add_text_file.diff
deleted file mode 100644
index 068d0138..00000000
--- a/test/units/ansible_test/diff/add_text_file.diff
+++ /dev/null
@@ -1,8 +0,0 @@
-diff --git a/test.txt b/test.txt
-new file mode 100644
-index 0000000000..814f4a4229
---- /dev/null
-+++ b/test.txt
-@@ -0,0 +1,2 @@
-+one
-+two
diff --git a/test/units/ansible_test/diff/add_trailing_newline.diff b/test/units/ansible_test/diff/add_trailing_newline.diff
deleted file mode 100644
index d83df60f..00000000
--- a/test/units/ansible_test/diff/add_trailing_newline.diff
+++ /dev/null
@@ -1,9 +0,0 @@
-diff --git a/test.txt b/test.txt
-index 9ed40b4425..814f4a4229 100644
---- a/test.txt
-+++ b/test.txt
-@@ -1,2 +1,2 @@
- one
--two
-\ No newline at end of file
-+two
diff --git a/test/units/ansible_test/diff/add_two_text_files.diff b/test/units/ansible_test/diff/add_two_text_files.diff
deleted file mode 100644
index f0c8fb02..00000000
--- a/test/units/ansible_test/diff/add_two_text_files.diff
+++ /dev/null
@@ -1,16 +0,0 @@
-diff --git a/one.txt b/one.txt
-new file mode 100644
-index 0000000000..99b976670b
---- /dev/null
-+++ b/one.txt
-@@ -0,0 +1,2 @@
-+One
-+1
-diff --git a/two.txt b/two.txt
-new file mode 100644
-index 0000000000..da06cc0974
---- /dev/null
-+++ b/two.txt
-@@ -0,0 +1,2 @@
-+Two
-+2
diff --git a/test/units/ansible_test/diff/context_no_trailing_newline.diff b/test/units/ansible_test/diff/context_no_trailing_newline.diff
deleted file mode 100644
index 519d635a..00000000
--- a/test/units/ansible_test/diff/context_no_trailing_newline.diff
+++ /dev/null
@@ -1,8 +0,0 @@
-diff --git a/test.txt b/test.txt
-index 9ed40b4425..64c5e5885a 100644
---- a/test.txt
-+++ b/test.txt
-@@ -1,2 +1 @@
--one
- two
-\ No newline at end of file
diff --git a/test/units/ansible_test/diff/multiple_context_lines.diff b/test/units/ansible_test/diff/multiple_context_lines.diff
deleted file mode 100644
index fd98b7ad..00000000
--- a/test/units/ansible_test/diff/multiple_context_lines.diff
+++ /dev/null
@@ -1,10 +0,0 @@
-diff --git a/test.txt b/test.txt
-index 949a655cb3..08c59a7cf1 100644
---- a/test.txt
-+++ b/test.txt
-@@ -1,5 +1,3 @@
- One
--Two
- Three
--Four
- Five
diff --git a/test/units/ansible_test/diff/parse_delete.diff b/test/units/ansible_test/diff/parse_delete.diff
deleted file mode 100644
index 866d43cc..00000000
--- a/test/units/ansible_test/diff/parse_delete.diff
+++ /dev/null
@@ -1,16 +0,0 @@
-diff --git a/changelogs/fragments/79263-runme-sh-logging-3cb482385bd59058.yaml b/changelogs/fragments/79263-runme-sh-logging-3cb482385bd59058.yaml
-deleted file mode 100644
-index a5bc88ffe3..0000000000
---- a/changelogs/fragments/79263-runme-sh-logging-3cb482385bd59058.yaml
-+++ /dev/null
-@@ -1,10 +0,0 @@
-----
--
--trivial:
-- - >-
-- integration tests — added command invocation logging via ``set -x``
-- to ``runme.sh`` scripts where it was missing and improved failing
-- fast in those scripts that use pipes (via ``set -o pipefail``).
-- See `PR #79263` https://github.com/ansible/ansible/pull/79263>`__.
--
--...
diff --git a/test/units/ansible_test/diff/parse_rename.diff b/test/units/ansible_test/diff/parse_rename.diff
deleted file mode 100644
index 54563727..00000000
--- a/test/units/ansible_test/diff/parse_rename.diff
+++ /dev/null
@@ -1,8 +0,0 @@
-diff --git a/packaging/debian/ansible-base.dirs b/packaging/debian/ansible-core.dirs
-similarity index 100%
-rename from packaging/debian/ansible-base.dirs
-rename to packaging/debian/ansible-core.dirs
-diff --git a/packaging/debian/ansible-base.install b/packaging/debian/ansible-core.install
-similarity index 100%
-rename from packaging/debian/ansible-base.install
-rename to packaging/debian/ansible-core.install
diff --git a/test/units/ansible_test/diff/remove_trailing_newline.diff b/test/units/ansible_test/diff/remove_trailing_newline.diff
deleted file mode 100644
index c0750ae1..00000000
--- a/test/units/ansible_test/diff/remove_trailing_newline.diff
+++ /dev/null
@@ -1,9 +0,0 @@
-diff --git a/test.txt b/test.txt
-index 814f4a4229..9ed40b4425 100644
---- a/test.txt
-+++ b/test.txt
-@@ -1,2 +1,2 @@
- one
--two
-+two
-\ No newline at end of file
diff --git a/test/units/ansible_test/test_diff.py b/test/units/ansible_test/test_diff.py
deleted file mode 100644
index 26ef5226..00000000
--- a/test/units/ansible_test/test_diff.py
+++ /dev/null
@@ -1,178 +0,0 @@
-"""Tests for the diff module."""
-from __future__ import annotations
-
-import pathlib
-import pytest
-import typing as t
-
-if t.TYPE_CHECKING: # pragma: no cover
- # noinspection PyProtectedMember
- from ansible_test._internal.diff import FileDiff
-
-
-@pytest.fixture()
-def diffs(request: pytest.FixtureRequest) -> list[FileDiff]:
- """A fixture which returns the parsed diff associated with the current test."""
- return get_parsed_diff(request.node.name.removeprefix('test_'))
-
-
-def get_parsed_diff(name: str) -> list[FileDiff]:
- """Parse and return the named git diff."""
- cache = pathlib.Path(__file__).parent / 'diff' / f'{name}.diff'
- content = cache.read_text()
- lines = content.splitlines()
-
- assert lines
-
- # noinspection PyProtectedMember
- from ansible_test._internal.diff import parse_diff
-
- diffs = parse_diff(lines)
-
- assert diffs
-
- for item in diffs:
- assert item.headers
- assert item.is_complete
-
- item.old.format_lines()
- item.new.format_lines()
-
- for line_range in item.old.ranges:
- assert line_range[1] >= line_range[0] > 0
-
- for line_range in item.new.ranges:
- assert line_range[1] >= line_range[0] > 0
-
- return diffs
-
-
-def test_add_binary_file(diffs: list[FileDiff]) -> None:
- """Add a binary file."""
- assert len(diffs) == 1
-
- assert diffs[0].old.exists
- assert diffs[0].new.exists
-
- assert diffs[0].old.path == 'binary.dat'
- assert diffs[0].new.path == 'binary.dat'
-
- assert diffs[0].old.eof_newline
- assert diffs[0].new.eof_newline
-
-
-def test_add_text_file(diffs: list[FileDiff]) -> None:
- """Add a new file."""
- assert len(diffs) == 1
-
- assert not diffs[0].old.exists
- assert diffs[0].new.exists
-
- assert diffs[0].old.path == 'test.txt'
- assert diffs[0].new.path == 'test.txt'
-
- assert diffs[0].old.eof_newline
- assert diffs[0].new.eof_newline
-
-
-def test_remove_trailing_newline(diffs: list[FileDiff]) -> None:
- """Remove the trailing newline from a file."""
- assert len(diffs) == 1
-
- assert diffs[0].old.exists
- assert diffs[0].new.exists
-
- assert diffs[0].old.path == 'test.txt'
- assert diffs[0].new.path == 'test.txt'
-
- assert diffs[0].old.eof_newline
- assert not diffs[0].new.eof_newline
-
-
-def test_add_trailing_newline(diffs: list[FileDiff]) -> None:
- """Add a trailing newline to a file."""
- assert len(diffs) == 1
-
- assert diffs[0].old.exists
- assert diffs[0].new.exists
-
- assert diffs[0].old.path == 'test.txt'
- assert diffs[0].new.path == 'test.txt'
-
- assert not diffs[0].old.eof_newline
- assert diffs[0].new.eof_newline
-
-
-def test_add_two_text_files(diffs: list[FileDiff]) -> None:
- """Add two text files."""
- assert len(diffs) == 2
-
- assert not diffs[0].old.exists
- assert diffs[0].new.exists
-
- assert diffs[0].old.path == 'one.txt'
- assert diffs[0].new.path == 'one.txt'
-
- assert diffs[0].old.eof_newline
- assert diffs[0].new.eof_newline
-
- assert not diffs[1].old.exists
- assert diffs[1].new.exists
-
- assert diffs[1].old.path == 'two.txt'
- assert diffs[1].new.path == 'two.txt'
-
- assert diffs[1].old.eof_newline
- assert diffs[1].new.eof_newline
-
-
-def test_context_no_trailing_newline(diffs: list[FileDiff]) -> None:
- """Context without a trailing newline."""
- assert len(diffs) == 1
-
- assert diffs[0].old.exists
- assert diffs[0].new.exists
-
- assert diffs[0].old.path == 'test.txt'
- assert diffs[0].new.path == 'test.txt'
-
- assert not diffs[0].old.eof_newline
- assert not diffs[0].new.eof_newline
-
-
-def test_multiple_context_lines(diffs: list[FileDiff]) -> None:
- """Multiple context lines."""
- assert len(diffs) == 1
-
- assert diffs[0].old.exists
- assert diffs[0].new.exists
-
- assert diffs[0].old.path == 'test.txt'
- assert diffs[0].new.path == 'test.txt'
-
- assert diffs[0].old.eof_newline
- assert diffs[0].new.eof_newline
-
-
-def test_parse_delete(diffs: list[FileDiff]) -> None:
- """Delete files."""
- assert len(diffs) == 1
-
- assert diffs[0].old.exists
- assert not diffs[0].new.exists
-
- assert diffs[0].old.path == 'changelogs/fragments/79263-runme-sh-logging-3cb482385bd59058.yaml'
- assert diffs[0].new.path == 'changelogs/fragments/79263-runme-sh-logging-3cb482385bd59058.yaml'
-
-
-def test_parse_rename(diffs) -> None:
- """Rename files."""
- assert len(diffs) == 2
-
- assert all(item.old.path != item.new.path and item.old.exists and item.new.exists for item in diffs)
-
- assert diffs[0].old.path == 'packaging/debian/ansible-base.dirs'
- assert diffs[0].new.path == 'packaging/debian/ansible-core.dirs'
-
- assert diffs[1].old.path == 'packaging/debian/ansible-base.install'
- assert diffs[1].new.path == 'packaging/debian/ansible-core.install'
diff --git a/test/units/cli/arguments/test_optparse_helpers.py b/test/units/cli/arguments/test_optparse_helpers.py
index ae8e8d73..082c9be4 100644
--- a/test/units/cli/arguments/test_optparse_helpers.py
+++ b/test/units/cli/arguments/test_optparse_helpers.py
@@ -14,7 +14,10 @@ from ansible.cli.arguments import option_helpers as opt_help
from ansible import __path__ as ansible_path
from ansible.release import __version__ as ansible_version
-cpath = C.DEFAULT_MODULE_PATH
+if C.DEFAULT_MODULE_PATH is None:
+ cpath = u'Default w/o overrides'
+else:
+ cpath = C.DEFAULT_MODULE_PATH
FAKE_PROG = u'ansible-cli-test'
VERSION_OUTPUT = opt_help.version(prog=FAKE_PROG)
diff --git a/test/units/cli/galaxy/test_execute_list_collection.py b/test/units/cli/galaxy/test_execute_list_collection.py
index 5641cb86..e8a834d9 100644
--- a/test/units/cli/galaxy/test_execute_list_collection.py
+++ b/test/units/cli/galaxy/test_execute_list_collection.py
@@ -5,29 +5,37 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
-import pathlib
-
import pytest
-from ansible import constants as C
from ansible import context
from ansible.cli.galaxy import GalaxyCLI
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.galaxy import collection
from ansible.galaxy.dependency_resolution.dataclasses import Requirement
-from ansible.module_utils.common.text.converters import to_native
-from ansible.plugins.loader import init_plugin_loader
+from ansible.module_utils._text import to_native
+
+
+def path_exists(path):
+ if to_native(path) == '/root/.ansible/collections/ansible_collections/sandwiches/ham':
+ return False
+ elif to_native(path) == '/usr/share/ansible/collections/ansible_collections/sandwiches/reuben':
+ return False
+ elif to_native(path) == 'nope':
+ return False
+ else:
+ return True
def isdir(path):
if to_native(path) == 'nope':
return False
- return True
+ else:
+ return True
def cliargs(collections_paths=None, collection_name=None):
if collections_paths is None:
- collections_paths = ['/root/.ansible/collections', '/usr/share/ansible/collections']
+ collections_paths = ['~/root/.ansible/collections', '/usr/share/ansible/collections']
context.CLIARGS._store = {
'collections_path': collections_paths,
@@ -38,61 +46,95 @@ def cliargs(collections_paths=None, collection_name=None):
@pytest.fixture
-def mock_from_path(mocker, monkeypatch):
- collection_args = {
- '/usr/share/ansible/collections/ansible_collections/sandwiches/pbj': (
+def mock_collection_objects(mocker):
+ mocker.patch('ansible.cli.galaxy.GalaxyCLI._resolve_path', side_effect=['/root/.ansible/collections', '/usr/share/ansible/collections'])
+ mocker.patch('ansible.cli.galaxy.validate_collection_path',
+ side_effect=['/root/.ansible/collections/ansible_collections', '/usr/share/ansible/collections/ansible_collections'])
+
+ collection_args_1 = (
+ (
'sandwiches.pbj',
- '1.0.0',
- '/usr/share/ansible/collections/ansible_collections/sandwiches/pbj',
+ '1.5.0',
+ None,
'dir',
None,
),
- '/usr/share/ansible/collections/ansible_collections/sandwiches/ham': (
- 'sandwiches.ham',
- '1.0.0',
- '/usr/share/ansible/collections/ansible_collections/sandwiches/ham',
+ (
+ 'sandwiches.reuben',
+ '2.5.0',
+ None,
'dir',
None,
),
- '/root/.ansible/collections/ansible_collections/sandwiches/pbj': (
+ )
+
+ collection_args_2 = (
+ (
'sandwiches.pbj',
- '1.5.0',
- '/root/.ansible/collections/ansible_collections/sandwiches/pbj',
+ '1.0.0',
+ None,
'dir',
None,
),
- '/root/.ansible/collections/ansible_collections/sandwiches/reuben': (
- 'sandwiches.reuben',
- '2.5.0',
- '/root/.ansible/collections/ansible_collections/sandwiches/reuben',
+ (
+ 'sandwiches.ham',
+ '1.0.0',
+ None,
'dir',
None,
),
- }
-
- def dispatch_requirement(path, am):
- return Requirement(*collection_args[to_native(path)])
-
- files_mock = mocker.MagicMock()
- mocker.patch('ansible.galaxy.collection.files', return_value=files_mock)
- files_mock.glob.return_value = []
-
- mocker.patch.object(pathlib.Path, 'is_dir', return_value=True)
- for path, args in collection_args.items():
- files_mock.glob.return_value.append(pathlib.Path(args[2]))
+ )
- mocker.patch('ansible.galaxy.collection.Candidate.from_dir_path_as_unknown', side_effect=dispatch_requirement)
+ collections_path_1 = [Requirement(*cargs) for cargs in collection_args_1]
+ collections_path_2 = [Requirement(*cargs) for cargs in collection_args_2]
- monkeypatch.setattr(C, 'COLLECTIONS_PATHS', ['/root/.ansible/collections', '/usr/share/ansible/collections'])
+ mocker.patch('ansible.cli.galaxy.find_existing_collections', side_effect=[collections_path_1, collections_path_2])
-def test_execute_list_collection_all(mocker, capsys, mock_from_path, tmp_path_factory):
+@pytest.fixture
+def mock_from_path(mocker):
+ def _from_path(collection_name='pbj'):
+ collection_args = {
+ 'sandwiches.pbj': (
+ (
+ 'sandwiches.pbj',
+ '1.5.0',
+ None,
+ 'dir',
+ None,
+ ),
+ (
+ 'sandwiches.pbj',
+ '1.0.0',
+ None,
+ 'dir',
+ None,
+ ),
+ ),
+ 'sandwiches.ham': (
+ (
+ 'sandwiches.ham',
+ '1.0.0',
+ None,
+ 'dir',
+ None,
+ ),
+ ),
+ }
+
+ from_path_objects = [Requirement(*args) for args in collection_args[collection_name]]
+ mocker.patch('ansible.cli.galaxy.Requirement.from_dir_path_as_unknown', side_effect=from_path_objects)
+
+ return _from_path
+
+
+def test_execute_list_collection_all(mocker, capsys, mock_collection_objects, tmp_path_factory):
"""Test listing all collections from multiple paths"""
cliargs()
- init_plugin_loader()
mocker.patch('os.path.exists', return_value=True)
+ mocker.patch('os.path.isdir', return_value=True)
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list'])
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
@@ -110,20 +152,21 @@ def test_execute_list_collection_all(mocker, capsys, mock_from_path, tmp_path_fa
assert out_lines[5] == 'sandwiches.reuben 2.5.0 '
assert out_lines[6] == ''
assert out_lines[7] == '# /usr/share/ansible/collections/ansible_collections'
- assert out_lines[8] == 'Collection Version'
- assert out_lines[9] == '----------------- -------'
- assert out_lines[10] == 'sandwiches.ham 1.0.0 '
- assert out_lines[11] == 'sandwiches.pbj 1.0.0 '
+ assert out_lines[8] == 'Collection Version'
+ assert out_lines[9] == '-------------- -------'
+ assert out_lines[10] == 'sandwiches.ham 1.0.0 '
+ assert out_lines[11] == 'sandwiches.pbj 1.0.0 '
-def test_execute_list_collection_specific(mocker, capsys, mock_from_path, tmp_path_factory):
+def test_execute_list_collection_specific(mocker, capsys, mock_collection_objects, mock_from_path, tmp_path_factory):
"""Test listing a specific collection"""
collection_name = 'sandwiches.ham'
+ mock_from_path(collection_name)
cliargs(collection_name=collection_name)
- init_plugin_loader()
-
+ mocker.patch('os.path.exists', path_exists)
+ mocker.patch('os.path.isdir', return_value=True)
mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name)
mocker.patch('ansible.cli.galaxy._get_collection_widths', return_value=(14, 5))
@@ -143,14 +186,15 @@ def test_execute_list_collection_specific(mocker, capsys, mock_from_path, tmp_pa
assert out_lines[4] == 'sandwiches.ham 1.0.0 '
-def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_from_path, tmp_path_factory):
+def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_collection_objects, mock_from_path, tmp_path_factory):
"""Test listing a specific collection that exists at multiple paths"""
collection_name = 'sandwiches.pbj'
+ mock_from_path(collection_name)
cliargs(collection_name=collection_name)
- init_plugin_loader()
-
+ mocker.patch('os.path.exists', path_exists)
+ mocker.patch('os.path.isdir', return_value=True)
mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name)
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name])
@@ -177,8 +221,6 @@ def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_from_pa
def test_execute_list_collection_specific_invalid_fqcn(mocker, tmp_path_factory):
"""Test an invalid fully qualified collection name (FQCN)"""
- init_plugin_loader()
-
collection_name = 'no.good.name'
cliargs(collection_name=collection_name)
@@ -196,7 +238,6 @@ def test_execute_list_collection_no_valid_paths(mocker, capsys, tmp_path_factory
"""Test listing collections when no valid paths are given"""
cliargs()
- init_plugin_loader()
mocker.patch('os.path.exists', return_value=True)
mocker.patch('os.path.isdir', return_value=False)
@@ -216,14 +257,13 @@ def test_execute_list_collection_no_valid_paths(mocker, capsys, tmp_path_factory
assert 'exists, but it\nis not a directory.' in err
-def test_execute_list_collection_one_invalid_path(mocker, capsys, mock_from_path, tmp_path_factory):
+def test_execute_list_collection_one_invalid_path(mocker, capsys, mock_collection_objects, tmp_path_factory):
"""Test listing all collections when one invalid path is given"""
- cliargs(collections_paths=['nope'])
- init_plugin_loader()
-
+ cliargs()
mocker.patch('os.path.exists', return_value=True)
mocker.patch('os.path.isdir', isdir)
+ mocker.patch('ansible.cli.galaxy.GalaxyCLI._resolve_path', side_effect=['/root/.ansible/collections', 'nope'])
mocker.patch('ansible.utils.color.ANSIBLE_COLOR', False)
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', '-p', 'nope'])
diff --git a/test/units/cli/test_adhoc.py b/test/units/cli/test_adhoc.py
index 7bcca471..18775f5d 100644
--- a/test/units/cli/test_adhoc.py
+++ b/test/units/cli/test_adhoc.py
@@ -93,15 +93,19 @@ def test_run_no_extra_vars():
assert exec_info.value.code == 2
-def test_ansible_version(capsys):
+def test_ansible_version(capsys, mocker):
adhoc_cli = AdHocCLI(args=['/bin/ansible', '--version'])
with pytest.raises(SystemExit):
adhoc_cli.run()
version = capsys.readouterr()
- version_lines = version.out.splitlines()
+ try:
+ version_lines = version.out.splitlines()
+ except AttributeError:
+ # Python 2.6 does return a named tuple, so get the first item
+ version_lines = version[0].splitlines()
assert len(version_lines) == 9, 'Incorrect number of lines in "ansible --version" output'
- assert re.match(r'ansible \[core [0-9.a-z]+\]', version_lines[0]), 'Incorrect ansible version line in "ansible --version" output'
+ assert re.match(r'ansible \[core [0-9.a-z]+\]$', version_lines[0]), 'Incorrect ansible version line in "ansible --version" output'
assert re.match(' config file = .*$', version_lines[1]), 'Incorrect config file line in "ansible --version" output'
assert re.match(' configured module search path = .*$', version_lines[2]), 'Incorrect module search path in "ansible --version" output'
assert re.match(' ansible python module location = .*$', version_lines[3]), 'Incorrect python module location in "ansible --version" output'
diff --git a/test/units/cli/test_data/collection_skeleton/README.md b/test/units/cli/test_data/collection_skeleton/README.md
index 2e3e4ce5..4cfd8afe 100644
--- a/test/units/cli/test_data/collection_skeleton/README.md
+++ b/test/units/cli/test_data/collection_skeleton/README.md
@@ -1 +1 @@
-A readme
+A readme \ No newline at end of file
diff --git a/test/units/cli/test_data/collection_skeleton/docs/My Collection.md b/test/units/cli/test_data/collection_skeleton/docs/My Collection.md
index 0d6781bc..6fa917f2 100644
--- a/test/units/cli/test_data/collection_skeleton/docs/My Collection.md
+++ b/test/units/cli/test_data/collection_skeleton/docs/My Collection.md
@@ -1 +1 @@
-Welcome to my test collection doc for {{ namespace }}.
+Welcome to my test collection doc for {{ namespace }}. \ No newline at end of file
diff --git a/test/units/cli/test_doc.py b/test/units/cli/test_doc.py
index 50b714eb..b10f0888 100644
--- a/test/units/cli/test_doc.py
+++ b/test/units/cli/test_doc.py
@@ -5,7 +5,7 @@ __metaclass__ = type
import pytest
from ansible.cli.doc import DocCLI, RoleMixin
-from ansible.plugins.loader import module_loader, init_plugin_loader
+from ansible.plugins.loader import module_loader
TTY_IFY_DATA = {
@@ -118,7 +118,6 @@ def test_builtin_modules_list():
args = ['ansible-doc', '-l', 'ansible.builtin', '-t', 'module']
obj = DocCLI(args=args)
obj.parse()
- init_plugin_loader()
result = obj._list_plugins('module', module_loader)
assert len(result) > 0
diff --git a/test/units/cli/test_galaxy.py b/test/units/cli/test_galaxy.py
index 80a2dfae..8ff56408 100644
--- a/test/units/cli/test_galaxy.py
+++ b/test/units/cli/test_galaxy.py
@@ -20,8 +20,6 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import contextlib
-
import ansible
from io import BytesIO
import json
@@ -39,7 +37,7 @@ from ansible.cli.galaxy import GalaxyCLI
from ansible.galaxy import collection
from ansible.galaxy.api import GalaxyAPI
from ansible.errors import AnsibleError
-from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
+from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.utils import context_objects as co
from ansible.utils.display import Display
from units.compat import unittest
@@ -62,7 +60,8 @@ class TestGalaxy(unittest.TestCase):
cls.temp_dir = tempfile.mkdtemp(prefix='ansible-test_galaxy-')
os.chdir(cls.temp_dir)
- shutil.rmtree("./delete_me", ignore_errors=True)
+ if os.path.exists("./delete_me"):
+ shutil.rmtree("./delete_me")
# creating framework for a role
gc = GalaxyCLI(args=["ansible-galaxy", "init", "--offline", "delete_me"])
@@ -72,7 +71,8 @@ class TestGalaxy(unittest.TestCase):
# making a temp dir for role installation
cls.role_path = os.path.join(tempfile.mkdtemp(), "roles")
- os.makedirs(cls.role_path)
+ if not os.path.isdir(cls.role_path):
+ os.makedirs(cls.role_path)
# creating a tar file name for class data
cls.role_tar = './delete_me.tar.gz'
@@ -80,29 +80,37 @@ class TestGalaxy(unittest.TestCase):
# creating a temp file with installation requirements
cls.role_req = './delete_me_requirements.yml'
- with open(cls.role_req, "w") as fd:
- fd.write("- 'src': '%s'\n 'name': '%s'\n 'path': '%s'" % (cls.role_tar, cls.role_name, cls.role_path))
+ fd = open(cls.role_req, "w")
+ fd.write("- 'src': '%s'\n 'name': '%s'\n 'path': '%s'" % (cls.role_tar, cls.role_name, cls.role_path))
+ fd.close()
@classmethod
def makeTar(cls, output_file, source_dir):
''' used for making a tarfile from a role directory '''
# adding directory into a tar file
- with tarfile.open(output_file, "w:gz") as tar:
+ try:
+ tar = tarfile.open(output_file, "w:gz")
tar.add(source_dir, arcname=os.path.basename(source_dir))
+ except AttributeError: # tarfile obj. has no attribute __exit__ prior to python 2. 7
+ pass
+ finally: # ensuring closure of tarfile obj
+ tar.close()
@classmethod
def tearDownClass(cls):
'''After tests are finished removes things created in setUpClass'''
# deleting the temp role directory
- shutil.rmtree(cls.role_dir, ignore_errors=True)
- with contextlib.suppress(FileNotFoundError):
+ if os.path.exists(cls.role_dir):
+ shutil.rmtree(cls.role_dir)
+ if os.path.exists(cls.role_req):
os.remove(cls.role_req)
- with contextlib.suppress(FileNotFoundError):
+ if os.path.exists(cls.role_tar):
os.remove(cls.role_tar)
- shutil.rmtree(cls.role_path, ignore_errors=True)
+ if os.path.isdir(cls.role_path):
+ shutil.rmtree(cls.role_path)
os.chdir('/')
- shutil.rmtree(cls.temp_dir, ignore_errors=True)
+ shutil.rmtree(cls.temp_dir)
def setUp(self):
# Reset the stored command line args
@@ -129,7 +137,8 @@ class TestGalaxy(unittest.TestCase):
role_info = {'name': 'some_role_name',
'galaxy_info': galaxy_info}
display_result = gc._display_role_info(role_info)
- self.assertNotEqual(display_result.find('\n\tgalaxy_info:'), -1, 'Expected galaxy_info to be indented once')
+ if display_result.find('\n\tgalaxy_info:') == -1:
+ self.fail('Expected galaxy_info to be indented once')
def test_run(self):
''' verifies that the GalaxyCLI object's api is created and that execute() is called. '''
@@ -167,9 +176,7 @@ class TestGalaxy(unittest.TestCase):
with patch.object(ansible.utils.display.Display, "display", return_value=None) as mocked_display:
# testing that error expected is raised
self.assertRaises(AnsibleError, gc.run)
- assert mocked_display.call_count == 2
- assert mocked_display.mock_calls[0].args[0] == "Starting galaxy role install process"
- assert "fake_role_name was NOT installed successfully" in mocked_display.mock_calls[1].args[0]
+ self.assertTrue(mocked_display.called_once_with("- downloading role 'fake_role_name', owned by "))
def test_exit_without_ignore_with_flag(self):
''' tests that GalaxyCLI exits without the error specified if the --ignore-errors flag is used '''
@@ -177,9 +184,7 @@ class TestGalaxy(unittest.TestCase):
gc = GalaxyCLI(args=["ansible-galaxy", "install", "--server=None", "fake_role_name", "--ignore-errors"])
with patch.object(ansible.utils.display.Display, "display", return_value=None) as mocked_display:
gc.run()
- assert mocked_display.call_count == 2
- assert mocked_display.mock_calls[0].args[0] == "Starting galaxy role install process"
- assert "fake_role_name was NOT installed successfully" in mocked_display.mock_calls[1].args[0]
+ self.assertTrue(mocked_display.called_once_with("- downloading role 'fake_role_name', owned by "))
def test_parse_no_action(self):
''' testing the options parser when no action is given '''
@@ -272,6 +277,8 @@ class ValidRoleTests(object):
# Make temp directory for testing
cls.test_dir = tempfile.mkdtemp()
+ if not os.path.isdir(cls.test_dir):
+ os.makedirs(cls.test_dir)
cls.role_dir = os.path.join(cls.test_dir, role_name)
cls.role_name = role_name
@@ -290,8 +297,9 @@ class ValidRoleTests(object):
cls.role_skeleton_path = gc.galaxy.default_role_skeleton_path
@classmethod
- def tearDownRole(cls):
- shutil.rmtree(cls.test_dir, ignore_errors=True)
+ def tearDownClass(cls):
+ if os.path.isdir(cls.test_dir):
+ shutil.rmtree(cls.test_dir)
def test_metadata(self):
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
@@ -341,10 +349,6 @@ class TestGalaxyInitDefault(unittest.TestCase, ValidRoleTests):
def setUpClass(cls):
cls.setUpRole(role_name='delete_me')
- @classmethod
- def tearDownClass(cls):
- cls.tearDownRole()
-
def test_metadata_contents(self):
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
metadata = yaml.safe_load(mf)
@@ -357,10 +361,6 @@ class TestGalaxyInitAPB(unittest.TestCase, ValidRoleTests):
def setUpClass(cls):
cls.setUpRole('delete_me_apb', galaxy_args=['--type=apb'])
- @classmethod
- def tearDownClass(cls):
- cls.tearDownRole()
-
def test_metadata_apb_tag(self):
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
metadata = yaml.safe_load(mf)
@@ -391,10 +391,6 @@ class TestGalaxyInitContainer(unittest.TestCase, ValidRoleTests):
def setUpClass(cls):
cls.setUpRole('delete_me_container', galaxy_args=['--type=container'])
- @classmethod
- def tearDownClass(cls):
- cls.tearDownRole()
-
def test_metadata_container_tag(self):
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
metadata = yaml.safe_load(mf)
@@ -426,10 +422,6 @@ class TestGalaxyInitSkeleton(unittest.TestCase, ValidRoleTests):
role_skeleton_path = os.path.join(os.path.split(__file__)[0], 'test_data', 'role_skeleton')
cls.setUpRole('delete_me_skeleton', skeleton_path=role_skeleton_path, use_explicit_type=True)
- @classmethod
- def tearDownClass(cls):
- cls.tearDownRole()
-
def test_empty_files_dir(self):
files_dir = os.path.join(self.role_dir, 'files')
self.assertTrue(os.path.isdir(files_dir))
@@ -771,20 +763,6 @@ def test_collection_install_with_names(collection_install):
assert mock_install.call_args[0][6] is False # force_deps
-def test_collection_install_with_invalid_requirements_format(collection_install):
- output_dir = collection_install[2]
-
- requirements_file = os.path.join(output_dir, 'requirements.yml')
- with open(requirements_file, 'wb') as req_obj:
- req_obj.write(b'"invalid"')
-
- galaxy_args = ['ansible-galaxy', 'collection', 'install', '--requirements-file', requirements_file,
- '--collections-path', output_dir]
-
- with pytest.raises(AnsibleError, match="Expecting requirements yaml to be a list or dictionary but got str"):
- GalaxyCLI(args=galaxy_args).run()
-
-
def test_collection_install_with_requirements_file(collection_install):
mock_install, mock_warning, output_dir = collection_install
@@ -1264,7 +1242,12 @@ def test_install_implicit_role_with_collections(requirements_file, monkeypatch):
assert len(mock_role_install.call_args[0][0]) == 1
assert str(mock_role_install.call_args[0][0][0]) == 'namespace.name'
- assert not any(list('contains collections which will be ignored' in mock_call[1][0] for mock_call in mock_display.mock_calls))
+ found = False
+ for mock_call in mock_display.mock_calls:
+ if 'contains collections which will be ignored' in mock_call[1][0]:
+ found = True
+ break
+ assert not found
@pytest.mark.parametrize('requirements_file', ['''
@@ -1291,7 +1274,12 @@ def test_install_explicit_role_with_collections(requirements_file, monkeypatch):
assert len(mock_role_install.call_args[0][0]) == 1
assert str(mock_role_install.call_args[0][0][0]) == 'namespace.name'
- assert any(list('contains collections which will be ignored' in mock_call[1][0] for mock_call in mock_display.mock_calls))
+ found = False
+ for mock_call in mock_display.mock_calls:
+ if 'contains collections which will be ignored' in mock_call[1][0]:
+ found = True
+ break
+ assert found
@pytest.mark.parametrize('requirements_file', ['''
@@ -1318,7 +1306,12 @@ def test_install_role_with_collections_and_path(requirements_file, monkeypatch):
assert len(mock_role_install.call_args[0][0]) == 1
assert str(mock_role_install.call_args[0][0][0]) == 'namespace.name'
- assert any(list('contains collections which will be ignored' in mock_call[1][0] for mock_call in mock_display.mock_calls))
+ found = False
+ for mock_call in mock_display.mock_calls:
+ if 'contains collections which will be ignored' in mock_call[1][0]:
+ found = True
+ break
+ assert found
@pytest.mark.parametrize('requirements_file', ['''
@@ -1345,4 +1338,9 @@ def test_install_collection_with_roles(requirements_file, monkeypatch):
assert mock_role_install.call_count == 0
- assert any(list('contains roles which will be ignored' in mock_call[1][0] for mock_call in mock_display.mock_calls))
+ found = False
+ for mock_call in mock_display.mock_calls:
+ if 'contains roles which will be ignored' in mock_call[1][0]:
+ found = True
+ break
+ assert found
diff --git a/test/units/cli/test_vault.py b/test/units/cli/test_vault.py
index f1399c3f..2304f4d5 100644
--- a/test/units/cli/test_vault.py
+++ b/test/units/cli/test_vault.py
@@ -29,7 +29,7 @@ from units.mock.vault_helper import TextVaultSecret
from ansible import context, errors
from ansible.cli.vault import VaultCLI
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
from ansible.utils import context_objects as co
@@ -171,28 +171,7 @@ class TestVaultCli(unittest.TestCase):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'create', '/dev/null/foo'])
cli.parse()
- self.assertRaisesRegex(errors.AnsibleOptionsError,
- "not a tty, editor cannot be opened",
- cli.run)
-
- @patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
- @patch('ansible.cli.vault.VaultEditor')
- def test_create_skip_tty_check(self, mock_vault_editor, mock_setup_vault_secrets):
- mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
- cli = VaultCLI(args=['ansible-vault', 'create', '--skip-tty-check', '/dev/null/foo'])
- cli.parse()
- cli.run()
-
- @patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
- @patch('ansible.cli.vault.VaultEditor')
- def test_create_with_tty(self, mock_vault_editor, mock_setup_vault_secrets):
- mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
- self.tty_stdout_patcher = patch('ansible.cli.sys.stdout.isatty', return_value=True)
- self.tty_stdout_patcher.start()
- cli = VaultCLI(args=['ansible-vault', 'create', '/dev/null/foo'])
- cli.parse()
cli.run()
- self.tty_stdout_patcher.stop()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
diff --git a/test/units/compat/mock.py b/test/units/compat/mock.py
index 03154609..58dc78e0 100644
--- a/test/units/compat/mock.py
+++ b/test/units/compat/mock.py
@@ -6,7 +6,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
try:
- from unittest.mock import ( # pylint: disable=unused-import
+ from unittest.mock import (
call,
patch,
mock_open,
diff --git a/test/units/config/manager/test_find_ini_config_file.py b/test/units/config/manager/test_find_ini_config_file.py
index e67eecd9..df411388 100644
--- a/test/units/config/manager/test_find_ini_config_file.py
+++ b/test/units/config/manager/test_find_ini_config_file.py
@@ -13,7 +13,7 @@ import stat
import pytest
from ansible.config.manager import find_ini_config_file
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
real_exists = os.path.exists
real_isdir = os.path.isdir
@@ -28,17 +28,22 @@ cfg_in_homedir = os.path.expanduser('~/.ansible.cfg')
@pytest.fixture
-def setup_env(request, monkeypatch):
+def setup_env(request):
cur_config = os.environ.get('ANSIBLE_CONFIG', None)
cfg_path = request.param[0]
if cfg_path is None and cur_config:
- monkeypatch.delenv('ANSIBLE_CONFIG')
+ del os.environ['ANSIBLE_CONFIG']
else:
- monkeypatch.setenv('ANSIBLE_CONFIG', request.param[0])
+ os.environ['ANSIBLE_CONFIG'] = request.param[0]
yield
+ if cur_config is None and cfg_path:
+ del os.environ['ANSIBLE_CONFIG']
+ else:
+ os.environ['ANSIBLE_CONFIG'] = cur_config
+
@pytest.fixture
def setup_existing_files(request, monkeypatch):
@@ -49,8 +54,10 @@ def setup_existing_files(request, monkeypatch):
return False
def _os_access(path, access):
- assert to_text(path) in (request.param[0])
- return True
+ if to_text(path) in (request.param[0]):
+ return True
+ else:
+ return False
# Enable user and system dirs so that we know cwd takes precedence
monkeypatch.setattr("os.path.exists", _os_path_exists)
@@ -155,11 +162,13 @@ class TestFindIniFile:
real_stat = os.stat
def _os_stat(path):
- assert path == working_dir
- from posix import stat_result
- stat_info = list(real_stat(path))
- stat_info[stat.ST_MODE] |= stat.S_IWOTH
- return stat_result(stat_info)
+ if path == working_dir:
+ from posix import stat_result
+ stat_info = list(real_stat(path))
+ stat_info[stat.ST_MODE] |= stat.S_IWOTH
+ return stat_result(stat_info)
+ else:
+ return real_stat(path)
monkeypatch.setattr('os.stat', _os_stat)
@@ -178,11 +187,13 @@ class TestFindIniFile:
real_stat = os.stat
def _os_stat(path):
- assert path == working_dir
- from posix import stat_result
- stat_info = list(real_stat(path))
- stat_info[stat.ST_MODE] |= stat.S_IWOTH
- return stat_result(stat_info)
+ if path == working_dir:
+ from posix import stat_result
+ stat_info = list(real_stat(path))
+ stat_info[stat.ST_MODE] |= stat.S_IWOTH
+ return stat_result(stat_info)
+ else:
+ return real_stat(path)
monkeypatch.setattr('os.stat', _os_stat)
@@ -204,14 +215,14 @@ class TestFindIniFile:
real_stat = os.stat
def _os_stat(path):
- if path != working_dir:
+ if path == working_dir:
+ from posix import stat_result
+ stat_info = list(real_stat(path))
+ stat_info[stat.ST_MODE] |= stat.S_IWOTH
+ return stat_result(stat_info)
+ else:
return real_stat(path)
- from posix import stat_result
- stat_info = list(real_stat(path))
- stat_info[stat.ST_MODE] |= stat.S_IWOTH
- return stat_result(stat_info)
-
monkeypatch.setattr('os.stat', _os_stat)
warnings = set()
@@ -229,11 +240,13 @@ class TestFindIniFile:
real_stat = os.stat
def _os_stat(path):
- assert path == working_dir
- from posix import stat_result
- stat_info = list(real_stat(path))
- stat_info[stat.ST_MODE] |= stat.S_IWOTH
- return stat_result(stat_info)
+ if path == working_dir:
+ from posix import stat_result
+ stat_info = list(real_stat(path))
+ stat_info[stat.ST_MODE] |= stat.S_IWOTH
+ return stat_result(stat_info)
+ else:
+ return real_stat(path)
monkeypatch.setattr('os.stat', _os_stat)
diff --git a/test/units/config/test3.cfg b/test/units/config/test3.cfg
deleted file mode 100644
index dab92956..00000000
--- a/test/units/config/test3.cfg
+++ /dev/null
@@ -1,4 +0,0 @@
-[colors]
-unreachable=bright red
-verbose=rgb013
-debug=gray10
diff --git a/test/units/config/test_manager.py b/test/units/config/test_manager.py
index 0848276c..8ef40437 100644
--- a/test/units/config/test_manager.py
+++ b/test/units/config/test_manager.py
@@ -10,7 +10,7 @@ import os
import os.path
import pytest
-from ansible.config.manager import ConfigManager, ensure_type, resolve_path, get_config_type
+from ansible.config.manager import ConfigManager, Setting, ensure_type, resolve_path, get_config_type
from ansible.errors import AnsibleOptionsError, AnsibleError
from ansible.module_utils.six import integer_types, string_types
from ansible.parsing.yaml.objects import AnsibleVaultEncryptedUnicode
@@ -18,7 +18,6 @@ from ansible.parsing.yaml.objects import AnsibleVaultEncryptedUnicode
curdir = os.path.dirname(__file__)
cfg_file = os.path.join(curdir, 'test.cfg')
cfg_file2 = os.path.join(curdir, 'test2.cfg')
-cfg_file3 = os.path.join(curdir, 'test3.cfg')
ensure_test_data = [
('a,b', 'list', list),
@@ -66,15 +65,6 @@ ensure_test_data = [
('None', 'none', type(None))
]
-ensure_unquoting_test_data = [
- ('"value"', '"value"', 'str', 'env'),
- ('"value"', '"value"', 'str', 'yaml'),
- ('"value"', 'value', 'str', 'ini'),
- ('\'value\'', 'value', 'str', 'ini'),
- ('\'\'value\'\'', '\'value\'', 'str', 'ini'),
- ('""value""', '"value"', 'str', 'ini')
-]
-
class TestConfigManager:
@classmethod
@@ -89,11 +79,6 @@ class TestConfigManager:
def test_ensure_type(self, value, expected_type, python_type):
assert isinstance(ensure_type(value, expected_type), python_type)
- @pytest.mark.parametrize("value, expected_value, value_type, origin", ensure_unquoting_test_data)
- def test_ensure_type_unquoting(self, value, expected_value, value_type, origin):
- actual_value = ensure_type(value, value_type, origin)
- assert actual_value == expected_value
-
def test_resolve_path(self):
assert os.path.join(curdir, 'test.yml') == resolve_path('./test.yml', cfg_file)
@@ -157,16 +142,3 @@ class TestConfigManager:
actual_value = ensure_type(vault_var, value_type)
assert actual_value == "vault text"
-
-
-@pytest.mark.parametrize(("key", "expected_value"), (
- ("COLOR_UNREACHABLE", "bright red"),
- ("COLOR_VERBOSE", "rgb013"),
- ("COLOR_DEBUG", "gray10")))
-def test_256color_support(key, expected_value):
- # GIVEN: a config file containing 256-color values with default definitions
- manager = ConfigManager(cfg_file3)
- # WHEN: get config values
- actual_value = manager.get_config_value(key)
- # THEN: no error
- assert actual_value == expected_value
diff --git a/test/units/executor/module_common/conftest.py b/test/units/executor/module_common/conftest.py
deleted file mode 100644
index f0eef12e..00000000
--- a/test/units/executor/module_common/conftest.py
+++ /dev/null
@@ -1,10 +0,0 @@
-import pytest
-
-
-@pytest.fixture
-def templar():
- class FakeTemplar:
- def template(self, template_string, *args, **kwargs):
- return template_string
-
- return FakeTemplar()
diff --git a/test/units/executor/module_common/test_modify_module.py b/test/units/executor/module_common/test_modify_module.py
index 89e4a163..dceef763 100644
--- a/test/units/executor/module_common/test_modify_module.py
+++ b/test/units/executor/module_common/test_modify_module.py
@@ -8,6 +8,9 @@ __metaclass__ = type
import pytest
from ansible.executor.module_common import modify_module
+from ansible.module_utils.six import PY2
+
+from test_module_common import templar
FAKE_OLD_MODULE = b'''#!/usr/bin/python
@@ -19,7 +22,10 @@ print('{"result": "%s"}' % sys.executable)
@pytest.fixture
def fake_old_module_open(mocker):
m = mocker.mock_open(read_data=FAKE_OLD_MODULE)
- mocker.patch('builtins.open', m)
+ if PY2:
+ mocker.patch('__builtin__.open', m)
+ else:
+ mocker.patch('builtins.open', m)
# this test no longer makes sense, since a Python module will always either have interpreter discovery run or
# an explicit interpreter passed (so we'll never default to the module shebang)
diff --git a/test/units/executor/module_common/test_module_common.py b/test/units/executor/module_common/test_module_common.py
index 6e2a4956..fa6add8c 100644
--- a/test/units/executor/module_common/test_module_common.py
+++ b/test/units/executor/module_common/test_module_common.py
@@ -27,6 +27,7 @@ import ansible.errors
from ansible.executor import module_common as amc
from ansible.executor.interpreter_discovery import InterpreterDiscoveryRequiredError
+from ansible.module_utils.six import PY2
class TestStripComments:
@@ -43,16 +44,15 @@ class TestStripComments:
assert amc._strip_comments(all_comments) == u""
def test_all_whitespace(self):
- all_whitespace = (
- '\n'
- ' \n'
- '\n'
- ' \n'
- '\t\t\r\n'
- '\n'
- ' '
- )
-
+ # Note: Do not remove the spaces on the blank lines below. They're
+ # test data to show that the lines get removed despite having spaces
+ # on them
+ all_whitespace = u"""
+
+
+
+\t\t\r\n
+ """ # nopep8
assert amc._strip_comments(all_whitespace) == u""
def test_somewhat_normal(self):
@@ -80,16 +80,31 @@ class TestSlurp:
def test_slurp_file(self, mocker):
mocker.patch('os.path.exists', side_effect=lambda x: True)
m = mocker.mock_open(read_data='This is a test')
- mocker.patch('builtins.open', m)
+ if PY2:
+ mocker.patch('__builtin__.open', m)
+ else:
+ mocker.patch('builtins.open', m)
assert amc._slurp('some_file') == 'This is a test'
def test_slurp_file_with_newlines(self, mocker):
mocker.patch('os.path.exists', side_effect=lambda x: True)
m = mocker.mock_open(read_data='#!/usr/bin/python\ndef test(args):\nprint("hi")\n')
- mocker.patch('builtins.open', m)
+ if PY2:
+ mocker.patch('__builtin__.open', m)
+ else:
+ mocker.patch('builtins.open', m)
assert amc._slurp('some_file') == '#!/usr/bin/python\ndef test(args):\nprint("hi")\n'
+@pytest.fixture
+def templar():
+ class FakeTemplar:
+ def template(self, template_string, *args, **kwargs):
+ return template_string
+
+ return FakeTemplar()
+
+
class TestGetShebang:
"""Note: We may want to change the API of this function in the future. It isn't a great API"""
def test_no_interpreter_set(self, templar):
diff --git a/test/units/executor/module_common/test_recursive_finder.py b/test/units/executor/module_common/test_recursive_finder.py
index 95b49d35..8136a006 100644
--- a/test/units/executor/module_common/test_recursive_finder.py
+++ b/test/units/executor/module_common/test_recursive_finder.py
@@ -29,7 +29,7 @@ from io import BytesIO
import ansible.errors
from ansible.executor.module_common import recursive_finder
-from ansible.plugins.loader import init_plugin_loader
+
# These are the modules that are brought in by module_utils/basic.py This may need to be updated
# when basic.py gains new imports
@@ -42,6 +42,7 @@ MODULE_UTILS_BASIC_FILES = frozenset(('ansible/__init__.py',
'ansible/module_utils/basic.py',
'ansible/module_utils/six/__init__.py',
'ansible/module_utils/_text.py',
+ 'ansible/module_utils/common/_collections_compat.py',
'ansible/module_utils/common/_json_compat.py',
'ansible/module_utils/common/collections.py',
'ansible/module_utils/common/parameters.py',
@@ -78,8 +79,6 @@ ANSIBLE_LIB = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.pa
@pytest.fixture
def finder_containers():
- init_plugin_loader()
-
FinderContainers = namedtuple('FinderContainers', ['zf'])
zipoutput = BytesIO()
diff --git a/test/units/executor/test_interpreter_discovery.py b/test/units/executor/test_interpreter_discovery.py
index 10fc64be..43db5950 100644
--- a/test/units/executor/test_interpreter_discovery.py
+++ b/test/units/executor/test_interpreter_discovery.py
@@ -9,7 +9,7 @@ __metaclass__ = type
from unittest.mock import MagicMock
from ansible.executor.interpreter_discovery import discover_interpreter
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
mock_ubuntu_platform_res = to_text(
r'{"osrelease_content": "NAME=\"Ubuntu\"\nVERSION=\"16.04.5 LTS (Xenial Xerus)\"\nID=ubuntu\nID_LIKE=debian\n'
@@ -20,7 +20,7 @@ mock_ubuntu_platform_res = to_text(
def test_discovery_interpreter_linux_auto_legacy():
- res1 = u'PLATFORM\nLinux\nFOUND\n/usr/bin/python\n/usr/bin/python3\nENDFOUND'
+ res1 = u'PLATFORM\nLinux\nFOUND\n/usr/bin/python\n/usr/bin/python3.5\n/usr/bin/python3\nENDFOUND'
mock_action = MagicMock()
mock_action._low_level_execute_command.side_effect = [{'stdout': res1}, {'stdout': mock_ubuntu_platform_res}]
@@ -35,7 +35,7 @@ def test_discovery_interpreter_linux_auto_legacy():
def test_discovery_interpreter_linux_auto_legacy_silent():
- res1 = u'PLATFORM\nLinux\nFOUND\n/usr/bin/python\n/usr/bin/python3\nENDFOUND'
+ res1 = u'PLATFORM\nLinux\nFOUND\n/usr/bin/python\n/usr/bin/python3.5\n/usr/bin/python3\nENDFOUND'
mock_action = MagicMock()
mock_action._low_level_execute_command.side_effect = [{'stdout': res1}, {'stdout': mock_ubuntu_platform_res}]
@@ -47,7 +47,7 @@ def test_discovery_interpreter_linux_auto_legacy_silent():
def test_discovery_interpreter_linux_auto():
- res1 = u'PLATFORM\nLinux\nFOUND\n/usr/bin/python\n/usr/bin/python3\nENDFOUND'
+ res1 = u'PLATFORM\nLinux\nFOUND\n/usr/bin/python\n/usr/bin/python3.5\n/usr/bin/python3\nENDFOUND'
mock_action = MagicMock()
mock_action._low_level_execute_command.side_effect = [{'stdout': res1}, {'stdout': mock_ubuntu_platform_res}]
diff --git a/test/units/executor/test_play_iterator.py b/test/units/executor/test_play_iterator.py
index 0fc59756..6670888e 100644
--- a/test/units/executor/test_play_iterator.py
+++ b/test/units/executor/test_play_iterator.py
@@ -25,7 +25,6 @@ from unittest.mock import patch, MagicMock
from ansible.executor.play_iterator import HostState, PlayIterator, IteratingStates, FailedStates
from ansible.playbook import Playbook
from ansible.playbook.play_context import PlayContext
-from ansible.plugins.loader import init_plugin_loader
from units.mock.loader import DictDataLoader
from units.mock.path import mock_unfrackpath_noop
@@ -86,8 +85,7 @@ class TestPlayIterator(unittest.TestCase):
always:
- name: role always task
debug: msg="always task in block in role"
- - name: role include_tasks
- include_tasks: foo.yml
+ - include: foo.yml
- name: role task after include
debug: msg="after include in role"
- block:
@@ -172,12 +170,12 @@ class TestPlayIterator(unittest.TestCase):
self.assertIsNotNone(task)
self.assertEqual(task.name, "role always task")
self.assertIsNotNone(task._role)
- # role include_tasks
- (host_state, task) = itr.get_next_task_for_host(hosts[0])
- self.assertIsNotNone(task)
- self.assertEqual(task.action, 'include_tasks')
- self.assertEqual(task.name, "role include_tasks")
- self.assertIsNotNone(task._role)
+ # role include task
+ # (host_state, task) = itr.get_next_task_for_host(hosts[0])
+ # self.assertIsNotNone(task)
+ # self.assertEqual(task.action, 'debug')
+ # self.assertEqual(task.name, "role included task")
+ # self.assertIsNotNone(task._role)
# role task after include
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
@@ -288,7 +286,6 @@ class TestPlayIterator(unittest.TestCase):
self.assertNotIn(hosts[0], failed_hosts)
def test_play_iterator_nested_blocks(self):
- init_plugin_loader()
fake_loader = DictDataLoader({
"test_play.yml": """
- hosts: all
@@ -430,11 +427,12 @@ class TestPlayIterator(unittest.TestCase):
)
# iterate past first task
- dummy, task = itr.get_next_task_for_host(hosts[0])
+ _, task = itr.get_next_task_for_host(hosts[0])
while (task and task.action != 'debug'):
- dummy, task = itr.get_next_task_for_host(hosts[0])
+ _, task = itr.get_next_task_for_host(hosts[0])
- self.assertIsNotNone(task, 'iterated past end of play while looking for place to insert tasks')
+ if task is None:
+ raise Exception("iterated past end of play while looking for place to insert tasks")
# get the current host state and copy it so we can mutate it
s = itr.get_host_state(hosts[0])
diff --git a/test/units/executor/test_task_executor.py b/test/units/executor/test_task_executor.py
index 66ab0036..315d26ae 100644
--- a/test/units/executor/test_task_executor.py
+++ b/test/units/executor/test_task_executor.py
@@ -25,7 +25,7 @@ from units.compat import unittest
from unittest.mock import patch, MagicMock
from ansible.errors import AnsibleError
from ansible.executor.task_executor import TaskExecutor, remove_omit
-from ansible.plugins.loader import action_loader, lookup_loader
+from ansible.plugins.loader import action_loader, lookup_loader, module_loader
from ansible.parsing.yaml.objects import AnsibleUnicode
from ansible.utils.unsafe_proxy import AnsibleUnsafeText, AnsibleUnsafeBytes
from ansible.module_utils.six import text_type
@@ -57,7 +57,6 @@ class TestTaskExecutor(unittest.TestCase):
loader=fake_loader,
shared_loader_obj=mock_shared_loader,
final_q=mock_queue,
- variable_manager=MagicMock(),
)
def test_task_executor_run(self):
@@ -85,7 +84,6 @@ class TestTaskExecutor(unittest.TestCase):
loader=fake_loader,
shared_loader_obj=mock_shared_loader,
final_q=mock_queue,
- variable_manager=MagicMock(),
)
te._get_loop_items = MagicMock(return_value=None)
@@ -104,7 +102,7 @@ class TestTaskExecutor(unittest.TestCase):
self.assertIn("failed", res)
def test_task_executor_run_clean_res(self):
- te = TaskExecutor(None, MagicMock(), None, None, None, None, None, None, None)
+ te = TaskExecutor(None, MagicMock(), None, None, None, None, None, None)
te._get_loop_items = MagicMock(return_value=[1])
te._run_loop = MagicMock(
return_value=[
@@ -152,7 +150,6 @@ class TestTaskExecutor(unittest.TestCase):
loader=fake_loader,
shared_loader_obj=mock_shared_loader,
final_q=mock_queue,
- variable_manager=MagicMock(),
)
items = te._get_loop_items()
@@ -189,7 +186,6 @@ class TestTaskExecutor(unittest.TestCase):
loader=fake_loader,
shared_loader_obj=mock_shared_loader,
final_q=mock_queue,
- variable_manager=MagicMock(),
)
def _execute(variables):
@@ -210,7 +206,6 @@ class TestTaskExecutor(unittest.TestCase):
loader=DictDataLoader({}),
shared_loader_obj=MagicMock(),
final_q=MagicMock(),
- variable_manager=MagicMock(),
)
context = MagicMock(resolved=False)
@@ -219,20 +214,20 @@ class TestTaskExecutor(unittest.TestCase):
action_loader.has_plugin.return_value = True
action_loader.get.return_value = mock.sentinel.handler
+ mock_connection = MagicMock()
mock_templar = MagicMock()
action = 'namespace.prefix_suffix'
te._task.action = action
- te._connection = MagicMock()
- with patch('ansible.executor.task_executor.start_connection'):
- handler = te._get_action_handler(mock_templar)
+ handler = te._get_action_handler(mock_connection, mock_templar)
self.assertIs(mock.sentinel.handler, handler)
- action_loader.has_plugin.assert_called_once_with(action, collection_list=te._task.collections)
+ action_loader.has_plugin.assert_called_once_with(
+ action, collection_list=te._task.collections)
- action_loader.get.assert_called_with(
- te._task.action, task=te._task, connection=te._connection,
+ action_loader.get.assert_called_once_with(
+ te._task.action, task=te._task, connection=mock_connection,
play_context=te._play_context, loader=te._loader,
templar=mock_templar, shared_loader_obj=te._shared_loader_obj,
collection_list=te._task.collections)
@@ -247,7 +242,6 @@ class TestTaskExecutor(unittest.TestCase):
loader=DictDataLoader({}),
shared_loader_obj=MagicMock(),
final_q=MagicMock(),
- variable_manager=MagicMock(),
)
context = MagicMock(resolved=False)
@@ -257,21 +251,20 @@ class TestTaskExecutor(unittest.TestCase):
action_loader.get.return_value = mock.sentinel.handler
action_loader.__contains__.return_value = True
+ mock_connection = MagicMock()
mock_templar = MagicMock()
action = 'namespace.netconf_suffix'
module_prefix = action.split('_', 1)[0]
te._task.action = action
- te._connection = MagicMock()
- with patch('ansible.executor.task_executor.start_connection'):
- handler = te._get_action_handler(mock_templar)
+ handler = te._get_action_handler(mock_connection, mock_templar)
self.assertIs(mock.sentinel.handler, handler)
action_loader.has_plugin.assert_has_calls([mock.call(action, collection_list=te._task.collections), # called twice
mock.call(module_prefix, collection_list=te._task.collections)])
- action_loader.get.assert_called_with(
- module_prefix, task=te._task, connection=te._connection,
+ action_loader.get.assert_called_once_with(
+ module_prefix, task=te._task, connection=mock_connection,
play_context=te._play_context, loader=te._loader,
templar=mock_templar, shared_loader_obj=te._shared_loader_obj,
collection_list=te._task.collections)
@@ -286,7 +279,6 @@ class TestTaskExecutor(unittest.TestCase):
loader=DictDataLoader({}),
shared_loader_obj=MagicMock(),
final_q=MagicMock(),
- variable_manager=MagicMock(),
)
action_loader = te._shared_loader_obj.action_loader
@@ -297,22 +289,20 @@ class TestTaskExecutor(unittest.TestCase):
context = MagicMock(resolved=False)
module_loader.find_plugin_with_context.return_value = context
+ mock_connection = MagicMock()
mock_templar = MagicMock()
action = 'namespace.prefix_suffix'
module_prefix = action.split('_', 1)[0]
te._task.action = action
- te._connection = MagicMock()
-
- with patch('ansible.executor.task_executor.start_connection'):
- handler = te._get_action_handler(mock_templar)
+ handler = te._get_action_handler(mock_connection, mock_templar)
self.assertIs(mock.sentinel.handler, handler)
action_loader.has_plugin.assert_has_calls([mock.call(action, collection_list=te._task.collections),
mock.call(module_prefix, collection_list=te._task.collections)])
- action_loader.get.assert_called_with(
- 'ansible.legacy.normal', task=te._task, connection=te._connection,
+ action_loader.get.assert_called_once_with(
+ 'ansible.legacy.normal', task=te._task, connection=mock_connection,
play_context=te._play_context, loader=te._loader,
templar=mock_templar, shared_loader_obj=te._shared_loader_obj,
collection_list=None)
@@ -328,7 +318,6 @@ class TestTaskExecutor(unittest.TestCase):
mock_task.become = False
mock_task.retries = 0
mock_task.delay = -1
- mock_task.delegate_to = None
mock_task.register = 'foo'
mock_task.until = None
mock_task.changed_when = None
@@ -340,7 +329,6 @@ class TestTaskExecutor(unittest.TestCase):
# other reason is that if I specify 0 here, the test fails. ;)
mock_task.async_val = 1
mock_task.poll = 0
- mock_task.evaluate_conditional_with_result.return_value = (True, None)
mock_play_context = MagicMock()
mock_play_context.post_validate.return_value = None
@@ -355,9 +343,6 @@ class TestTaskExecutor(unittest.TestCase):
mock_action = MagicMock()
mock_queue = MagicMock()
- mock_vm = MagicMock()
- mock_vm.get_delegated_vars_and_hostname.return_value = {}, None
-
shared_loader = MagicMock()
new_stdin = None
job_vars = dict(omit="XXXXXXXXXXXXXXXXXXX")
@@ -371,14 +356,11 @@ class TestTaskExecutor(unittest.TestCase):
loader=fake_loader,
shared_loader_obj=shared_loader,
final_q=mock_queue,
- variable_manager=mock_vm,
)
te._get_connection = MagicMock(return_value=mock_connection)
context = MagicMock()
-
- with patch('ansible.executor.task_executor.start_connection'):
- te._get_action_handler_with_context = MagicMock(return_value=get_with_context_result(mock_action, context))
+ te._get_action_handler_with_context = MagicMock(return_value=get_with_context_result(mock_action, context))
mock_action.run.return_value = dict(ansible_facts=dict())
res = te._execute()
@@ -410,6 +392,8 @@ class TestTaskExecutor(unittest.TestCase):
mock_play_context = MagicMock()
+ mock_connection = MagicMock()
+
mock_action = MagicMock()
mock_queue = MagicMock()
@@ -428,7 +412,6 @@ class TestTaskExecutor(unittest.TestCase):
loader=fake_loader,
shared_loader_obj=shared_loader,
final_q=mock_queue,
- variable_manager=MagicMock(),
)
te._connection = MagicMock()
diff --git a/test/units/galaxy/test_api.py b/test/units/galaxy/test_api.py
index b019f1aa..064aff29 100644
--- a/test/units/galaxy/test_api.py
+++ b/test/units/galaxy/test_api.py
@@ -24,7 +24,7 @@ from ansible.errors import AnsibleError
from ansible.galaxy import api as galaxy_api
from ansible.galaxy.api import CollectionVersionMetadata, GalaxyAPI, GalaxyError
from ansible.galaxy.token import BasicAuthToken, GalaxyToken, KeycloakToken
-from ansible.module_utils.common.text.converters import to_native, to_text
+from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.six.moves.urllib import error as urllib_error
from ansible.utils import context_objects as co
from ansible.utils.display import Display
@@ -463,9 +463,10 @@ def test_publish_failure(api_version, collection_url, response, expected, collec
def test_wait_import_task(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
- mock_token_get = MagicMock()
- mock_token_get.return_value = 'my token'
- monkeypatch.setattr(token_ins, 'get', mock_token_get)
+ if token_ins:
+ mock_token_get = MagicMock()
+ mock_token_get.return_value = 'my token'
+ monkeypatch.setattr(token_ins, 'get', mock_token_get)
mock_open = MagicMock()
mock_open.return_value = StringIO(u'{"state":"success","finished_at":"time"}')
@@ -495,9 +496,10 @@ def test_wait_import_task(server_url, api_version, token_type, token_ins, import
def test_wait_import_task_multiple_requests(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
- mock_token_get = MagicMock()
- mock_token_get.return_value = 'my token'
- monkeypatch.setattr(token_ins, 'get', mock_token_get)
+ if token_ins:
+ mock_token_get = MagicMock()
+ mock_token_get.return_value = 'my token'
+ monkeypatch.setattr(token_ins, 'get', mock_token_get)
mock_open = MagicMock()
mock_open.side_effect = [
@@ -541,9 +543,10 @@ def test_wait_import_task_multiple_requests(server_url, api_version, token_type,
def test_wait_import_task_with_failure(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
- mock_token_get = MagicMock()
- mock_token_get.return_value = 'my token'
- monkeypatch.setattr(token_ins, 'get', mock_token_get)
+ if token_ins:
+ mock_token_get = MagicMock()
+ mock_token_get.return_value = 'my token'
+ monkeypatch.setattr(token_ins, 'get', mock_token_get)
mock_open = MagicMock()
mock_open.side_effect = [
@@ -617,9 +620,10 @@ def test_wait_import_task_with_failure(server_url, api_version, token_type, toke
def test_wait_import_task_with_failure_no_error(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
- mock_token_get = MagicMock()
- mock_token_get.return_value = 'my token'
- monkeypatch.setattr(token_ins, 'get', mock_token_get)
+ if token_ins:
+ mock_token_get = MagicMock()
+ mock_token_get.return_value = 'my token'
+ monkeypatch.setattr(token_ins, 'get', mock_token_get)
mock_open = MagicMock()
mock_open.side_effect = [
@@ -689,9 +693,10 @@ def test_wait_import_task_with_failure_no_error(server_url, api_version, token_t
def test_wait_import_task_timeout(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
- mock_token_get = MagicMock()
- mock_token_get.return_value = 'my token'
- monkeypatch.setattr(token_ins, 'get', mock_token_get)
+ if token_ins:
+ mock_token_get = MagicMock()
+ mock_token_get.return_value = 'my token'
+ monkeypatch.setattr(token_ins, 'get', mock_token_get)
def return_response(*args, **kwargs):
return StringIO(u'{"state":"waiting"}')
diff --git a/test/units/galaxy/test_collection.py b/test/units/galaxy/test_collection.py
index 991184ae..106251c5 100644
--- a/test/units/galaxy/test_collection.py
+++ b/test/units/galaxy/test_collection.py
@@ -20,11 +20,10 @@ from unittest.mock import MagicMock, mock_open, patch
import ansible.constants as C
from ansible import context
-from ansible.cli import galaxy
-from ansible.cli.galaxy import GalaxyCLI
+from ansible.cli.galaxy import GalaxyCLI, SERVER_DEF
from ansible.errors import AnsibleError
from ansible.galaxy import api, collection, token
-from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
+from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.six.moves import builtins
from ansible.utils import context_objects as co
from ansible.utils.display import Display
@@ -172,6 +171,28 @@ def manifest_info(manifest_template):
@pytest.fixture()
+def files_manifest_info():
+ return {
+ "files": [
+ {
+ "name": ".",
+ "ftype": "dir",
+ "chksum_type": None,
+ "chksum_sha256": None,
+ "format": 1
+ },
+ {
+ "name": "README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "individual_file_checksum",
+ "format": 1
+ }
+ ],
+ "format": 1}
+
+
+@pytest.fixture()
def manifest(manifest_info):
b_data = to_bytes(json.dumps(manifest_info))
@@ -224,19 +245,23 @@ def test_cli_options(required_signature_count, valid, monkeypatch):
{
'url': 'https://galaxy.ansible.com',
'validate_certs': 'False',
+ 'v3': 'False',
},
# Expected server attributes
{
'validate_certs': False,
+ '_available_api_versions': {},
},
),
(
{
'url': 'https://galaxy.ansible.com',
'validate_certs': 'True',
+ 'v3': 'True',
},
{
'validate_certs': True,
+ '_available_api_versions': {'v3': '/v3'},
},
),
],
@@ -254,6 +279,7 @@ def test_bool_type_server_config_options(config, server, monkeypatch):
"server_list=server1\n",
"[galaxy_server.server1]",
"url=%s" % config['url'],
+ "v3=%s" % config['v3'],
"validate_certs=%s\n" % config['validate_certs'],
]
@@ -273,6 +299,7 @@ def test_bool_type_server_config_options(config, server, monkeypatch):
assert galaxy_cli.api_servers[0].name == 'server1'
assert galaxy_cli.api_servers[0].validate_certs == server['validate_certs']
+ assert galaxy_cli.api_servers[0]._available_api_versions == server['_available_api_versions']
@pytest.mark.parametrize('global_ignore_certs', [True, False])
@@ -384,55 +411,6 @@ def test_validate_certs_server_config(ignore_certs_cfg, ignore_certs_cli, expect
assert galaxy_cli.api_servers[2].validate_certs is expected_server3_validate_certs
-@pytest.mark.parametrize(
- ["timeout_cli", "timeout_cfg", "timeout_fallback", "expected_timeout"],
- [
- (None, None, None, 60),
- (None, None, 10, 10),
- (None, 20, 10, 20),
- (30, 20, 10, 30),
- ]
-)
-def test_timeout_server_config(timeout_cli, timeout_cfg, timeout_fallback, expected_timeout, monkeypatch):
- cli_args = [
- 'ansible-galaxy',
- 'collection',
- 'install',
- 'namespace.collection:1.0.0',
- ]
- if timeout_cli is not None:
- cli_args.extend(["--timeout", f"{timeout_cli}"])
-
- cfg_lines = ["[galaxy]", "server_list=server1"]
- if timeout_fallback is not None:
- cfg_lines.append(f"server_timeout={timeout_fallback}")
-
- # fix default in server config since C.GALAXY_SERVER_TIMEOUT was already evaluated
- server_additional = galaxy.SERVER_ADDITIONAL.copy()
- server_additional['timeout']['default'] = timeout_fallback
- monkeypatch.setattr(galaxy, 'SERVER_ADDITIONAL', server_additional)
-
- cfg_lines.extend(["[galaxy_server.server1]", "url=https://galaxy.ansible.com/api/"])
- if timeout_cfg is not None:
- cfg_lines.append(f"timeout={timeout_cfg}")
-
- monkeypatch.setattr(C, 'GALAXY_SERVER_LIST', ['server1'])
-
- with tempfile.NamedTemporaryFile(suffix='.cfg') as tmp_file:
- tmp_file.write(to_bytes('\n'.join(cfg_lines), errors='surrogate_or_strict'))
- tmp_file.flush()
-
- monkeypatch.setattr(C.config, '_config_file', tmp_file.name)
- C.config._parse_config_file()
-
- galaxy_cli = GalaxyCLI(args=cli_args)
- mock_execute_install = MagicMock()
- monkeypatch.setattr(galaxy_cli, '_execute_install_collection', mock_execute_install)
- galaxy_cli.run()
-
- assert galaxy_cli.api_servers[0].timeout == expected_timeout
-
-
def test_build_collection_no_galaxy_yaml():
fake_path = u'/fake/ÅÑŚÌβŁÈ/path'
expected = to_native("The collection galaxy.yml path '%s/galaxy.yml' does not exist." % fake_path)
@@ -501,19 +479,19 @@ def test_build_with_existing_files_and_manifest(collection_input):
with tarfile.open(output_artifact, mode='r') as actual:
members = actual.getmembers()
- manifest_file = [m for m in members if m.path == "MANIFEST.json"][0]
+ manifest_file = next(m for m in members if m.path == "MANIFEST.json")
manifest_file_obj = actual.extractfile(manifest_file.name)
manifest_file_text = manifest_file_obj.read()
manifest_file_obj.close()
assert manifest_file_text != b'{"collection_info": {"version": "6.6.6"}, "version": 1}'
- json_file = [m for m in members if m.path == "MANIFEST.json"][0]
+ json_file = next(m for m in members if m.path == "MANIFEST.json")
json_file_obj = actual.extractfile(json_file.name)
json_file_text = json_file_obj.read()
json_file_obj.close()
assert json_file_text != b'{"files": [], "format": 1}'
- sub_manifest_file = [m for m in members if m.path == "plugins/MANIFEST.json"][0]
+ sub_manifest_file = next(m for m in members if m.path == "plugins/MANIFEST.json")
sub_manifest_file_obj = actual.extractfile(sub_manifest_file.name)
sub_manifest_file_text = sub_manifest_file_obj.read()
sub_manifest_file_obj.close()
@@ -640,7 +618,7 @@ def test_build_ignore_files_and_folders(collection_input, monkeypatch):
tests_file.write('random')
tests_file.flush()
- actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel, None)
+ actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel)
assert actual['format'] == 1
for manifest_entry in actual['files']:
@@ -676,7 +654,7 @@ def test_build_ignore_older_release_in_root(collection_input, monkeypatch):
file_obj.write('random')
file_obj.flush()
- actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel, None)
+ actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel)
assert actual['format'] == 1
plugin_release_found = False
@@ -704,7 +682,7 @@ def test_build_ignore_patterns(collection_input, monkeypatch):
actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection',
['*.md', 'plugins/action', 'playbooks/*.j2'],
- Sentinel, None)
+ Sentinel)
assert actual['format'] == 1
expected_missing = [
@@ -755,7 +733,7 @@ def test_build_ignore_symlink_target_outside_collection(collection_input, monkey
link_path = os.path.join(input_dir, 'plugins', 'connection')
os.symlink(outside_dir, link_path)
- actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel, None)
+ actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel)
for manifest_entry in actual['files']:
assert manifest_entry['name'] != 'plugins/connection'
@@ -779,7 +757,7 @@ def test_build_copy_symlink_target_inside_collection(collection_input):
os.symlink(roles_target, roles_link)
- actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel, None)
+ actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel)
linked_entries = [e for e in actual['files'] if e['name'].startswith('playbooks/roles/linked')]
assert len(linked_entries) == 1
@@ -812,11 +790,11 @@ def test_build_with_symlink_inside_collection(collection_input):
with tarfile.open(output_artifact, mode='r') as actual:
members = actual.getmembers()
- linked_folder = [m for m in members if m.path == 'playbooks/roles/linked'][0]
+ linked_folder = next(m for m in members if m.path == 'playbooks/roles/linked')
assert linked_folder.type == tarfile.SYMTYPE
assert linked_folder.linkname == '../../roles/linked'
- linked_file = [m for m in members if m.path == 'docs/README.md'][0]
+ linked_file = next(m for m in members if m.path == 'docs/README.md')
assert linked_file.type == tarfile.SYMTYPE
assert linked_file.linkname == '../README.md'
@@ -824,7 +802,7 @@ def test_build_with_symlink_inside_collection(collection_input):
actual_file = secure_hash_s(linked_file_obj.read())
linked_file_obj.close()
- assert actual_file == '08f24200b9fbe18903e7a50930c9d0df0b8d7da3' # shasum test/units/cli/test_data/collection_skeleton/README.md
+ assert actual_file == '63444bfc766154e1bc7557ef6280de20d03fcd81'
def test_publish_no_wait(galaxy_server, collection_artifact, monkeypatch):
@@ -876,6 +854,57 @@ def test_publish_with_wait(galaxy_server, collection_artifact, monkeypatch):
% galaxy_server.api_server
+def test_find_existing_collections(tmp_path_factory, monkeypatch):
+ test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
+ concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
+ collection1 = os.path.join(test_dir, 'namespace1', 'collection1')
+ collection2 = os.path.join(test_dir, 'namespace2', 'collection2')
+ fake_collection1 = os.path.join(test_dir, 'namespace3', 'collection3')
+ fake_collection2 = os.path.join(test_dir, 'namespace4')
+ os.makedirs(collection1)
+ os.makedirs(collection2)
+ os.makedirs(os.path.split(fake_collection1)[0])
+
+ open(fake_collection1, 'wb+').close()
+ open(fake_collection2, 'wb+').close()
+
+ collection1_manifest = json.dumps({
+ 'collection_info': {
+ 'namespace': 'namespace1',
+ 'name': 'collection1',
+ 'version': '1.2.3',
+ 'authors': ['Jordan Borean'],
+ 'readme': 'README.md',
+ 'dependencies': {},
+ },
+ 'format': 1,
+ })
+ with open(os.path.join(collection1, 'MANIFEST.json'), 'wb') as manifest_obj:
+ manifest_obj.write(to_bytes(collection1_manifest))
+
+ mock_warning = MagicMock()
+ monkeypatch.setattr(Display, 'warning', mock_warning)
+
+ actual = list(collection.find_existing_collections(test_dir, artifacts_manager=concrete_artifact_cm))
+
+ assert len(actual) == 2
+ for actual_collection in actual:
+ if '%s.%s' % (actual_collection.namespace, actual_collection.name) == 'namespace1.collection1':
+ assert actual_collection.namespace == 'namespace1'
+ assert actual_collection.name == 'collection1'
+ assert actual_collection.ver == '1.2.3'
+ assert to_text(actual_collection.src) == collection1
+ else:
+ assert actual_collection.namespace == 'namespace2'
+ assert actual_collection.name == 'collection2'
+ assert actual_collection.ver == '*'
+ assert to_text(actual_collection.src) == collection2
+
+ assert mock_warning.call_count == 1
+ assert mock_warning.mock_calls[0][1][0] == "Collection at '%s' does not have a MANIFEST.json file, nor has it galaxy.yml: " \
+ "cannot detect version." % to_text(collection2)
+
+
def test_download_file(tmp_path_factory, monkeypatch):
temp_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
@@ -1082,7 +1111,7 @@ def test_verify_file_hash_deleted_file(manifest_info):
with patch.object(collection.os.path, 'isfile', MagicMock(return_value=False)) as mock_isfile:
collection._verify_file_hash(b'path/', 'file', digest, error_queue)
- mock_isfile.assert_called_once()
+ assert mock_isfile.called_once
assert len(error_queue) == 1
assert error_queue[0].installed is None
@@ -1105,7 +1134,7 @@ def test_verify_file_hash_matching_hash(manifest_info):
with patch.object(collection.os.path, 'isfile', MagicMock(return_value=True)) as mock_isfile:
collection._verify_file_hash(b'path/', 'file', digest, error_queue)
- mock_isfile.assert_called_once()
+ assert mock_isfile.called_once
assert error_queue == []
@@ -1127,7 +1156,7 @@ def test_verify_file_hash_mismatching_hash(manifest_info):
with patch.object(collection.os.path, 'isfile', MagicMock(return_value=True)) as mock_isfile:
collection._verify_file_hash(b'path/', 'file', different_digest, error_queue)
- mock_isfile.assert_called_once()
+ assert mock_isfile.called_once
assert len(error_queue) == 1
assert error_queue[0].installed == digest
diff --git a/test/units/galaxy/test_collection_install.py b/test/units/galaxy/test_collection_install.py
index a61ae406..2118f0ec 100644
--- a/test/units/galaxy/test_collection_install.py
+++ b/test/units/galaxy/test_collection_install.py
@@ -18,6 +18,7 @@ import yaml
from io import BytesIO, StringIO
from unittest.mock import MagicMock, patch
+from unittest import mock
import ansible.module_utils.six.moves.urllib.error as urllib_error
@@ -26,7 +27,7 @@ from ansible.cli.galaxy import GalaxyCLI
from ansible.errors import AnsibleError
from ansible.galaxy import collection, api, dependency_resolution
from ansible.galaxy.dependency_resolution.dataclasses import Candidate, Requirement
-from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
+from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.common.process import get_bin_path
from ansible.utils import context_objects as co
from ansible.utils.display import Display
@@ -52,6 +53,78 @@ def call_galaxy_cli(args):
co.GlobalCLIArgs._Singleton__instance = orig
+def artifact_json(namespace, name, version, dependencies, server):
+ json_str = json.dumps({
+ 'artifact': {
+ 'filename': '%s-%s-%s.tar.gz' % (namespace, name, version),
+ 'sha256': '2d76f3b8c4bab1072848107fb3914c345f71a12a1722f25c08f5d3f51f4ab5fd',
+ 'size': 1234,
+ },
+ 'download_url': '%s/download/%s-%s-%s.tar.gz' % (server, namespace, name, version),
+ 'metadata': {
+ 'namespace': namespace,
+ 'name': name,
+ 'dependencies': dependencies,
+ },
+ 'version': version
+ })
+ return to_text(json_str)
+
+
+def artifact_versions_json(namespace, name, versions, galaxy_api, available_api_versions=None):
+ results = []
+ available_api_versions = available_api_versions or {}
+ api_version = 'v2'
+ if 'v3' in available_api_versions:
+ api_version = 'v3'
+ for version in versions:
+ results.append({
+ 'href': '%s/api/%s/%s/%s/versions/%s/' % (galaxy_api.api_server, api_version, namespace, name, version),
+ 'version': version,
+ })
+
+ if api_version == 'v2':
+ json_str = json.dumps({
+ 'count': len(versions),
+ 'next': None,
+ 'previous': None,
+ 'results': results
+ })
+
+ if api_version == 'v3':
+ response = {'meta': {'count': len(versions)},
+ 'data': results,
+ 'links': {'first': None,
+ 'last': None,
+ 'next': None,
+ 'previous': None},
+ }
+ json_str = json.dumps(response)
+ return to_text(json_str)
+
+
+def error_json(galaxy_api, errors_to_return=None, available_api_versions=None):
+ errors_to_return = errors_to_return or []
+ available_api_versions = available_api_versions or {}
+
+ response = {}
+
+ api_version = 'v2'
+ if 'v3' in available_api_versions:
+ api_version = 'v3'
+
+ if api_version == 'v2':
+ assert len(errors_to_return) <= 1
+ if errors_to_return:
+ response = errors_to_return[0]
+
+ if api_version == 'v3':
+ response['errors'] = errors_to_return
+
+ json_str = json.dumps(response)
+ return to_text(json_str)
+
+
@pytest.fixture(autouse='function')
def reset_cli_args():
co.GlobalCLIArgs._Singleton__instance = None
@@ -298,27 +371,6 @@ def test_build_requirement_from_tar(collection_artifact):
assert actual.ver == u'0.1.0'
-def test_build_requirement_from_tar_url(tmp_path_factory):
- test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
- concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
- test_url = 'https://example.com/org/repo/sample.tar.gz'
- expected = fr"^Failed to download collection tar from '{to_text(test_url)}'"
-
- with pytest.raises(AnsibleError, match=expected):
- Requirement.from_requirement_dict({'name': test_url, 'type': 'url'}, concrete_artifact_cm)
-
-
-def test_build_requirement_from_tar_url_wrong_type(tmp_path_factory):
- test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
- concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
- test_url = 'https://example.com/org/repo/sample.tar.gz'
- expected = fr"^Unable to find collection artifact file at '{to_text(test_url)}'\.$"
-
- with pytest.raises(AnsibleError, match=expected):
- # Specified wrong collection type for http URL
- Requirement.from_requirement_dict({'name': test_url, 'type': 'file'}, concrete_artifact_cm)
-
-
def test_build_requirement_from_tar_fail_not_tar(tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
test_file = os.path.join(test_dir, b'fake.tar.gz')
@@ -843,8 +895,7 @@ def test_install_collections_from_tar(collection_artifact, monkeypatch):
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
- collection.install_collections(
- requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False, set())
+ collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False)
assert os.path.isdir(collection_path)
@@ -868,6 +919,57 @@ def test_install_collections_from_tar(collection_artifact, monkeypatch):
assert display_msgs[2] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" % to_text(collection_path)
+def test_install_collections_existing_without_force(collection_artifact, monkeypatch):
+ collection_path, collection_tar = collection_artifact
+ temp_path = os.path.split(collection_tar)[0]
+
+ mock_display = MagicMock()
+ monkeypatch.setattr(Display, 'display', mock_display)
+
+ concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
+
+ assert os.path.isdir(collection_path)
+
+ requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
+ collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False)
+
+ assert os.path.isdir(collection_path)
+
+ actual_files = os.listdir(collection_path)
+ actual_files.sort()
+ assert actual_files == [b'README.md', b'docs', b'galaxy.yml', b'playbooks', b'plugins', b'roles', b'runme.sh']
+
+ # Filter out the progress cursor display calls.
+ display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2] and len(m[1]) == 1]
+ assert len(display_msgs) == 1
+
+ assert display_msgs[0] == 'Nothing to do. All requested collections are already installed. If you want to reinstall them, consider using `--force`.'
+
+ for msg in display_msgs:
+ assert 'WARNING' not in msg
+
+
+def test_install_missing_metadata_warning(collection_artifact, monkeypatch):
+ collection_path, collection_tar = collection_artifact
+ temp_path = os.path.split(collection_tar)[0]
+
+ mock_display = MagicMock()
+ monkeypatch.setattr(Display, 'display', mock_display)
+
+ for file in [b'MANIFEST.json', b'galaxy.yml']:
+ b_path = os.path.join(collection_path, file)
+ if os.path.isfile(b_path):
+ os.unlink(b_path)
+
+ concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
+ requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
+ collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False)
+
+ display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2] and len(m[1]) == 1]
+
+ assert 'WARNING' in display_msgs[0]
+
+
# Makes sure we don't get stuck in some recursive loop
@pytest.mark.parametrize('collection_artifact', [
{'ansible_namespace.collection': '>=0.0.1'},
@@ -882,8 +984,7 @@ def test_install_collection_with_circular_dependency(collection_artifact, monkey
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
- collection.install_collections(
- requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False, set())
+ collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False)
assert os.path.isdir(collection_path)
@@ -920,8 +1021,7 @@ def test_install_collection_with_no_dependency(collection_artifact, monkeypatch)
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
- collection.install_collections(
- requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False, set())
+ collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False)
assert os.path.isdir(collection_path)
diff --git a/test/units/galaxy/test_role_install.py b/test/units/galaxy/test_role_install.py
index 819ed186..687fcac1 100644
--- a/test/units/galaxy/test_role_install.py
+++ b/test/units/galaxy/test_role_install.py
@@ -7,7 +7,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import json
import os
import functools
import pytest
@@ -17,7 +16,7 @@ from io import StringIO
from ansible import context
from ansible.cli.galaxy import GalaxyCLI
from ansible.galaxy import api, role, Galaxy
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
from ansible.utils import context_objects as co
@@ -25,7 +24,7 @@ def call_galaxy_cli(args):
orig = co.GlobalCLIArgs._Singleton__instance
co.GlobalCLIArgs._Singleton__instance = None
try:
- return GalaxyCLI(args=['ansible-galaxy', 'role'] + args).run()
+ GalaxyCLI(args=['ansible-galaxy', 'role'] + args).run()
finally:
co.GlobalCLIArgs._Singleton__instance = orig
@@ -121,22 +120,6 @@ def test_role_download_github_no_download_url_for_version(init_mock_temp_file, m
assert mock_role_download_api.mock_calls[0][1][0] == 'https://github.com/test_owner/test_role/archive/0.0.1.tar.gz'
-@pytest.mark.parametrize(
- 'state,rc',
- [('SUCCESS', 0), ('FAILED', 1),]
-)
-def test_role_import(state, rc, mocker, galaxy_server, monkeypatch):
- responses = [
- {"available_versions": {"v1": "v1/"}},
- {"results": [{'id': 12345, 'github_user': 'user', 'github_repo': 'role', 'github_reference': None, 'summary_fields': {'role': {'name': 'role'}}}]},
- {"results": [{'state': 'WAITING', 'id': 12345, 'summary_fields': {'task_messages': []}}]},
- {"results": [{'state': state, 'id': 12345, 'summary_fields': {'task_messages': []}}]},
- ]
- mock_api = mocker.MagicMock(side_effect=[StringIO(json.dumps(rsp)) for rsp in responses])
- monkeypatch.setattr(api, 'open_url', mock_api)
- assert call_galaxy_cli(['import', 'user', 'role']) == rc
-
-
def test_role_download_url(init_mock_temp_file, mocker, galaxy_server, mock_role_download_api, monkeypatch):
mock_api = mocker.MagicMock()
mock_api.side_effect = [
diff --git a/test/units/galaxy/test_token.py b/test/units/galaxy/test_token.py
index 9fc12d46..24af3863 100644
--- a/test/units/galaxy/test_token.py
+++ b/test/units/galaxy/test_token.py
@@ -13,7 +13,7 @@ from unittest.mock import MagicMock
import ansible.constants as C
from ansible.cli.galaxy import GalaxyCLI, SERVER_DEF
from ansible.galaxy.token import GalaxyToken, NoTokenSentinel
-from ansible.module_utils.common.text.converters import to_bytes, to_text
+from ansible.module_utils._text import to_bytes, to_text
@pytest.fixture()
diff --git a/test/units/inventory/test_host.py b/test/units/inventory/test_host.py
index 712ed302..c8f47714 100644
--- a/test/units/inventory/test_host.py
+++ b/test/units/inventory/test_host.py
@@ -69,10 +69,10 @@ class TestHost(unittest.TestCase):
def test_equals_none(self):
other = None
- assert not (self.hostA == other)
- assert not (other == self.hostA)
- assert self.hostA != other
- assert other != self.hostA
+ self.hostA == other
+ other == self.hostA
+ self.hostA != other
+ other != self.hostA
self.assertNotEqual(self.hostA, other)
def test_serialize(self):
diff --git a/test/units/mock/loader.py b/test/units/mock/loader.py
index 9dc32cae..f6ceb379 100644
--- a/test/units/mock/loader.py
+++ b/test/units/mock/loader.py
@@ -21,15 +21,16 @@ __metaclass__ = type
import os
+from ansible.errors import AnsibleParserError
from ansible.parsing.dataloader import DataLoader
-from ansible.module_utils.common.text.converters import to_bytes, to_text
+from ansible.module_utils._text import to_bytes, to_text
class DictDataLoader(DataLoader):
def __init__(self, file_mapping=None):
file_mapping = {} if file_mapping is None else file_mapping
- assert isinstance(file_mapping, dict)
+ assert type(file_mapping) == dict
super(DictDataLoader, self).__init__()
@@ -47,7 +48,11 @@ class DictDataLoader(DataLoader):
# TODO: the real _get_file_contents returns a bytestring, so we actually convert the
# unicode/text it's created with to utf-8
def _get_file_contents(self, file_name):
- return to_bytes(self._file_mapping[file_name]), False
+ path = to_text(file_name)
+ if path in self._file_mapping:
+ return to_bytes(self._file_mapping[file_name]), False
+ else:
+ raise AnsibleParserError("file not found: %s" % file_name)
def path_exists(self, path):
path = to_text(path)
@@ -86,6 +91,25 @@ class DictDataLoader(DataLoader):
self._add_known_directory(dirname)
dirname = os.path.dirname(dirname)
+ def push(self, path, content):
+ rebuild_dirs = False
+ if path not in self._file_mapping:
+ rebuild_dirs = True
+
+ self._file_mapping[path] = content
+
+ if rebuild_dirs:
+ self._build_known_directories()
+
+ def pop(self, path):
+ if path in self._file_mapping:
+ del self._file_mapping[path]
+ self._build_known_directories()
+
+ def clear(self):
+ self._file_mapping = dict()
+ self._known_directories = []
+
def get_basedir(self):
return os.getcwd()
diff --git a/test/units/mock/procenv.py b/test/units/mock/procenv.py
index 1570c87e..271a207e 100644
--- a/test/units/mock/procenv.py
+++ b/test/units/mock/procenv.py
@@ -27,7 +27,7 @@ from contextlib import contextmanager
from io import BytesIO, StringIO
from units.compat import unittest
from ansible.module_utils.six import PY3
-from ansible.module_utils.common.text.converters import to_bytes
+from ansible.module_utils._text import to_bytes
@contextmanager
@@ -54,9 +54,30 @@ def swap_stdin_and_argv(stdin_data='', argv_data=tuple()):
sys.argv = real_argv
+@contextmanager
+def swap_stdout():
+ """
+ context manager that temporarily replaces stdout for tests that need to verify output
+ """
+ old_stdout = sys.stdout
+
+ if PY3:
+ fake_stream = StringIO()
+ else:
+ fake_stream = BytesIO()
+
+ try:
+ sys.stdout = fake_stream
+
+ yield fake_stream
+ finally:
+ sys.stdout = old_stdout
+
+
class ModuleTestCase(unittest.TestCase):
- def setUp(self):
- module_args = {'_ansible_remote_tmp': '/tmp', '_ansible_keep_remote_files': False}
+ def setUp(self, module_args=None):
+ if module_args is None:
+ module_args = {'_ansible_remote_tmp': '/tmp', '_ansible_keep_remote_files': False}
args = json.dumps(dict(ANSIBLE_MODULE_ARGS=module_args))
diff --git a/test/units/mock/vault_helper.py b/test/units/mock/vault_helper.py
index 5b2fdd2a..dcce9c78 100644
--- a/test/units/mock/vault_helper.py
+++ b/test/units/mock/vault_helper.py
@@ -15,7 +15,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from ansible.module_utils.common.text.converters import to_bytes
+from ansible.module_utils._text import to_bytes
from ansible.parsing.vault import VaultSecret
diff --git a/test/units/mock/yaml_helper.py b/test/units/mock/yaml_helper.py
index 9f8b063b..1ef17215 100644
--- a/test/units/mock/yaml_helper.py
+++ b/test/units/mock/yaml_helper.py
@@ -4,6 +4,8 @@ __metaclass__ = type
import io
import yaml
+from ansible.module_utils.six import PY3
+from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.parsing.yaml.dumper import AnsibleDumper
@@ -13,14 +15,21 @@ class YamlTestUtils(object):
"""Vault related tests will want to override this.
Vault cases should setup a AnsibleLoader that has the vault password."""
+ return AnsibleLoader(stream)
def _dump_stream(self, obj, stream, dumper=None):
"""Dump to a py2-unicode or py3-string stream."""
- return yaml.dump(obj, stream, Dumper=dumper)
+ if PY3:
+ return yaml.dump(obj, stream, Dumper=dumper)
+ else:
+ return yaml.dump(obj, stream, Dumper=dumper, encoding=None)
def _dump_string(self, obj, dumper=None):
"""Dump to a py2-unicode or py3-string"""
- return yaml.dump(obj, Dumper=dumper)
+ if PY3:
+ return yaml.dump(obj, Dumper=dumper)
+ else:
+ return yaml.dump(obj, Dumper=dumper, encoding=None)
def _dump_load_cycle(self, obj):
# Each pass though a dump or load revs the 'generation'
@@ -53,3 +62,63 @@ class YamlTestUtils(object):
# should be transitive, but...
self.assertEqual(obj_2, obj_3)
self.assertEqual(string_from_object_dump, string_from_object_dump_3)
+
+ def _old_dump_load_cycle(self, obj):
+ '''Dump the passed in object to yaml, load it back up, dump again, compare.'''
+ stream = io.StringIO()
+
+ yaml_string = self._dump_string(obj, dumper=AnsibleDumper)
+ self._dump_stream(obj, stream, dumper=AnsibleDumper)
+
+ yaml_string_from_stream = stream.getvalue()
+
+ # reset stream
+ stream.seek(0)
+
+ loader = self._loader(stream)
+ # loader = AnsibleLoader(stream, vault_password=self.vault_password)
+ obj_from_stream = loader.get_data()
+
+ stream_from_string = io.StringIO(yaml_string)
+ loader2 = self._loader(stream_from_string)
+ # loader2 = AnsibleLoader(stream_from_string, vault_password=self.vault_password)
+ obj_from_string = loader2.get_data()
+
+ stream_obj_from_stream = io.StringIO()
+ stream_obj_from_string = io.StringIO()
+
+ if PY3:
+ yaml.dump(obj_from_stream, stream_obj_from_stream, Dumper=AnsibleDumper)
+ yaml.dump(obj_from_stream, stream_obj_from_string, Dumper=AnsibleDumper)
+ else:
+ yaml.dump(obj_from_stream, stream_obj_from_stream, Dumper=AnsibleDumper, encoding=None)
+ yaml.dump(obj_from_stream, stream_obj_from_string, Dumper=AnsibleDumper, encoding=None)
+
+ yaml_string_stream_obj_from_stream = stream_obj_from_stream.getvalue()
+ yaml_string_stream_obj_from_string = stream_obj_from_string.getvalue()
+
+ stream_obj_from_stream.seek(0)
+ stream_obj_from_string.seek(0)
+
+ if PY3:
+ yaml_string_obj_from_stream = yaml.dump(obj_from_stream, Dumper=AnsibleDumper)
+ yaml_string_obj_from_string = yaml.dump(obj_from_string, Dumper=AnsibleDumper)
+ else:
+ yaml_string_obj_from_stream = yaml.dump(obj_from_stream, Dumper=AnsibleDumper, encoding=None)
+ yaml_string_obj_from_string = yaml.dump(obj_from_string, Dumper=AnsibleDumper, encoding=None)
+
+ assert yaml_string == yaml_string_obj_from_stream
+ assert yaml_string == yaml_string_obj_from_stream == yaml_string_obj_from_string
+ assert (yaml_string == yaml_string_obj_from_stream == yaml_string_obj_from_string == yaml_string_stream_obj_from_stream ==
+ yaml_string_stream_obj_from_string)
+ assert obj == obj_from_stream
+ assert obj == obj_from_string
+ assert obj == yaml_string_obj_from_stream
+ assert obj == yaml_string_obj_from_string
+ assert obj == obj_from_stream == obj_from_string == yaml_string_obj_from_stream == yaml_string_obj_from_string
+ return {'obj': obj,
+ 'yaml_string': yaml_string,
+ 'yaml_string_from_stream': yaml_string_from_stream,
+ 'obj_from_stream': obj_from_stream,
+ 'obj_from_string': obj_from_string,
+ 'yaml_string_obj_from_string': yaml_string_obj_from_string}
diff --git a/test/units/module_utils/basic/test__symbolic_mode_to_octal.py b/test/units/module_utils/basic/test__symbolic_mode_to_octal.py
index b3a73e5a..7793b348 100644
--- a/test/units/module_utils/basic/test__symbolic_mode_to_octal.py
+++ b/test/units/module_utils/basic/test__symbolic_mode_to_octal.py
@@ -63,14 +63,6 @@ DATA = ( # Going from no permissions to setting all for user, group, and/or oth
# Multiple permissions
(0o040000, u'u=rw-x+X,g=r-x+X,o=r-x+X', 0o0755),
(0o100000, u'u=rw-x+X,g=r-x+X,o=r-x+X', 0o0644),
- (0o040000, u'ug=rx,o=', 0o0550),
- (0o100000, u'ug=rx,o=', 0o0550),
- (0o040000, u'u=rx,g=r', 0o0540),
- (0o100000, u'u=rx,g=r', 0o0540),
- (0o040777, u'ug=rx,o=', 0o0550),
- (0o100777, u'ug=rx,o=', 0o0550),
- (0o040777, u'u=rx,g=r', 0o0547),
- (0o100777, u'u=rx,g=r', 0o0547),
)
UMASK_DATA = (
diff --git a/test/units/module_utils/basic/test_argument_spec.py b/test/units/module_utils/basic/test_argument_spec.py
index 5dbaf50c..211d65a2 100644
--- a/test/units/module_utils/basic/test_argument_spec.py
+++ b/test/units/module_utils/basic/test_argument_spec.py
@@ -453,7 +453,7 @@ class TestComplexOptions:
'bar1': None, 'bar2': None, 'bar3': None, 'bar4': None}]
),
# Check for elements in sub-options
- ({"foobar": [{"foo": "good", "bam": "required_one_of", "bar1": [1, "good", "yes"], "bar2": ['1', 1], "bar3": ['1.3', 1.3, 1]}]},
+ ({"foobar": [{"foo": "good", "bam": "required_one_of", "bar1": [1, "good", "yes"], "bar2": ['1', 1], "bar3":['1.3', 1.3, 1]}]},
[{'foo': 'good', 'bam1': None, 'bam2': 'test', 'bam3': None, 'bam4': None, 'bar': None, 'baz': None, 'bam': 'required_one_of',
'bar1': ["1", "good", "yes"], 'bar2': [1, 1], 'bar3': [1.3, 1.3, 1.0], 'bar4': None}]
),
diff --git a/test/units/module_utils/basic/test_command_nonexisting.py b/test/units/module_utils/basic/test_command_nonexisting.py
index 0dd3bd98..6ed7f91b 100644
--- a/test/units/module_utils/basic/test_command_nonexisting.py
+++ b/test/units/module_utils/basic/test_command_nonexisting.py
@@ -1,11 +1,14 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import sys
+import pytest
import json
import sys
import pytest
import subprocess
-from ansible.module_utils.common.text.converters import to_bytes
+import ansible.module_utils.basic
+from ansible.module_utils._text import to_bytes
from ansible.module_utils import basic
diff --git a/test/units/module_utils/basic/test_filesystem.py b/test/units/module_utils/basic/test_filesystem.py
index 50e674c4..f09cecf4 100644
--- a/test/units/module_utils/basic/test_filesystem.py
+++ b/test/units/module_utils/basic/test_filesystem.py
@@ -143,8 +143,6 @@ class TestOtherFilesystem(ModuleTestCase):
argument_spec=dict(),
)
- am.selinux_enabled = lambda: False
-
file_args = {
'path': '/path/to/file',
'mode': None,
diff --git a/test/units/module_utils/basic/test_get_available_hash_algorithms.py b/test/units/module_utils/basic/test_get_available_hash_algorithms.py
deleted file mode 100644
index d60f34cc..00000000
--- a/test/units/module_utils/basic/test_get_available_hash_algorithms.py
+++ /dev/null
@@ -1,60 +0,0 @@
-"""Unit tests to provide coverage not easily obtained from integration tests."""
-
-from __future__ import (absolute_import, division, print_function)
-
-__metaclass__ = type
-
-import hashlib
-import sys
-
-import pytest
-
-from ansible.module_utils.basic import _get_available_hash_algorithms
-
-
-@pytest.mark.skipif(sys.version_info < (2, 7, 9), reason="requires Python 2.7.9 or later")
-def test_unavailable_algorithm(mocker):
- """Simulate an available algorithm that isn't."""
- expected_algorithms = {'sha256', 'sha512'} # guaranteed to be available
-
- mocker.patch('hashlib.algorithms_available', expected_algorithms | {'not_actually_available'})
-
- available_algorithms = _get_available_hash_algorithms()
-
- assert sorted(expected_algorithms) == sorted(available_algorithms)
-
-
-@pytest.mark.skipif(sys.version_info < (2, 7, 9), reason="requires Python 2.7.9 or later")
-def test_fips_mode(mocker):
- """Simulate running in FIPS mode on Python 2.7.9 or later."""
- expected_algorithms = {'sha256', 'sha512'} # guaranteed to be available
-
- mocker.patch('hashlib.algorithms_available', expected_algorithms | {'md5'})
- mocker.patch('hashlib.md5').side_effect = ValueError() # using md5 in FIPS mode raises a ValueError
-
- available_algorithms = _get_available_hash_algorithms()
-
- assert sorted(expected_algorithms) == sorted(available_algorithms)
-
-
-@pytest.mark.skipif(sys.version_info < (2, 7, 9) or sys.version_info[:2] != (2, 7), reason="requires Python 2.7 (2.7.9 or later)")
-def test_legacy_python(mocker):
- """Simulate behavior on Python 2.7.x earlier than Python 2.7.9."""
- expected_algorithms = {'sha256', 'sha512'} # guaranteed to be available
-
- # This attribute is exclusive to Python 2.7.
- # Since `hashlib.algorithms_available` is used on Python 2.7.9 and later, only Python 2.7.0 through 2.7.8 utilize this attribute.
- mocker.patch('hashlib.algorithms', expected_algorithms)
-
- saved_algorithms = hashlib.algorithms_available
-
- # Make sure that this attribute is unavailable, to simulate running on Python 2.7.0 through 2.7.8.
- # It will be restored immediately after performing the test.
- del hashlib.algorithms_available
-
- try:
- available_algorithms = _get_available_hash_algorithms()
- finally:
- hashlib.algorithms_available = saved_algorithms
-
- assert sorted(expected_algorithms) == sorted(available_algorithms)
diff --git a/test/units/module_utils/basic/test_run_command.py b/test/units/module_utils/basic/test_run_command.py
index 259ac6c4..04211e2d 100644
--- a/test/units/module_utils/basic/test_run_command.py
+++ b/test/units/module_utils/basic/test_run_command.py
@@ -12,7 +12,7 @@ from io import BytesIO
import pytest
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.module_utils.six import PY2
from ansible.module_utils.compat import selectors
@@ -109,7 +109,7 @@ def mock_subprocess(mocker):
super(MockSelector, self).close()
self._file_objs = []
- selectors.PollSelector = MockSelector
+ selectors.DefaultSelector = MockSelector
subprocess = mocker.patch('ansible.module_utils.basic.subprocess')
subprocess._output = {mocker.sentinel.stdout: SpecialBytesIO(b'', fh=mocker.sentinel.stdout),
@@ -194,7 +194,7 @@ class TestRunCommandPrompt:
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
def test_prompt_no_match(self, mocker, rc_am):
rc_am._os._cmd_out[mocker.sentinel.stdout] = BytesIO(b'hello')
- (rc, stdout, stderr) = rc_am.run_command('foo', prompt_regex='[pP]assword:')
+ (rc, _, _) = rc_am.run_command('foo', prompt_regex='[pP]assword:')
assert rc == 0
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
@@ -204,7 +204,7 @@ class TestRunCommandPrompt:
fh=mocker.sentinel.stdout),
mocker.sentinel.stderr:
SpecialBytesIO(b'', fh=mocker.sentinel.stderr)}
- (rc, stdout, stderr) = rc_am.run_command('foo', prompt_regex=r'[pP]assword:', data=None)
+ (rc, _, _) = rc_am.run_command('foo', prompt_regex=r'[pP]assword:', data=None)
assert rc == 257
@@ -212,7 +212,7 @@ class TestRunCommandRc:
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
def test_check_rc_false(self, rc_am):
rc_am._subprocess.Popen.return_value.returncode = 1
- (rc, stdout, stderr) = rc_am.run_command('/bin/false', check_rc=False)
+ (rc, _, _) = rc_am.run_command('/bin/false', check_rc=False)
assert rc == 1
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
diff --git a/test/units/module_utils/basic/test_safe_eval.py b/test/units/module_utils/basic/test_safe_eval.py
index fdaab18a..e8538ca9 100644
--- a/test/units/module_utils/basic/test_safe_eval.py
+++ b/test/units/module_utils/basic/test_safe_eval.py
@@ -67,4 +67,4 @@ def test_invalid_strings_with_exceptions(am, code, expected, exception):
if exception is None:
assert res[1] == exception
else:
- assert isinstance(res[1], exception)
+ assert type(res[1]) == exception
diff --git a/test/units/module_utils/basic/test_sanitize_keys.py b/test/units/module_utils/basic/test_sanitize_keys.py
index 3edb216b..180f8662 100644
--- a/test/units/module_utils/basic/test_sanitize_keys.py
+++ b/test/units/module_utils/basic/test_sanitize_keys.py
@@ -6,6 +6,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import pytest
from ansible.module_utils.basic import sanitize_keys
diff --git a/test/units/module_utils/basic/test_selinux.py b/test/units/module_utils/basic/test_selinux.py
index bdb6b9de..d8557685 100644
--- a/test/units/module_utils/basic/test_selinux.py
+++ b/test/units/module_utils/basic/test_selinux.py
@@ -43,21 +43,16 @@ class TestSELinuxMU:
with patch.object(basic, 'HAVE_SELINUX', False):
assert no_args_module().selinux_enabled() is False
- # test selinux present/not-enabled
- disabled_mod = no_args_module()
- with patch.object(basic, 'selinux', create=True) as selinux:
- selinux.is_selinux_enabled.return_value = 0
- assert disabled_mod.selinux_enabled() is False
-
+ # test selinux present/not-enabled
+ disabled_mod = no_args_module()
+ with patch('ansible.module_utils.compat.selinux.is_selinux_enabled', return_value=0):
+ assert disabled_mod.selinux_enabled() is False
# ensure value is cached (same answer after unpatching)
assert disabled_mod.selinux_enabled() is False
-
# and present / enabled
- with patch.object(basic, 'HAVE_SELINUX', True):
- enabled_mod = no_args_module()
- with patch.object(basic, 'selinux', create=True) as selinux:
- selinux.is_selinux_enabled.return_value = 1
- assert enabled_mod.selinux_enabled() is True
+ enabled_mod = no_args_module()
+ with patch('ansible.module_utils.compat.selinux.is_selinux_enabled', return_value=1):
+ assert enabled_mod.selinux_enabled() is True
# ensure value is cached (same answer after unpatching)
assert enabled_mod.selinux_enabled() is True
@@ -65,16 +60,12 @@ class TestSELinuxMU:
# selinux unavailable, should return false
with patch.object(basic, 'HAVE_SELINUX', False):
assert no_args_module().selinux_mls_enabled() is False
- # selinux disabled, should return false
- with patch.object(basic, 'selinux', create=True) as selinux:
- selinux.is_selinux_mls_enabled.return_value = 0
- assert no_args_module(selinux_enabled=False).selinux_mls_enabled() is False
-
- with patch.object(basic, 'HAVE_SELINUX', True):
- # selinux enabled, should pass through the value of is_selinux_mls_enabled
- with patch.object(basic, 'selinux', create=True) as selinux:
- selinux.is_selinux_mls_enabled.return_value = 1
- assert no_args_module(selinux_enabled=True).selinux_mls_enabled() is True
+ # selinux disabled, should return false
+ with patch('ansible.module_utils.compat.selinux.is_selinux_mls_enabled', return_value=0):
+ assert no_args_module(selinux_enabled=False).selinux_mls_enabled() is False
+ # selinux enabled, should pass through the value of is_selinux_mls_enabled
+ with patch('ansible.module_utils.compat.selinux.is_selinux_mls_enabled', return_value=1):
+ assert no_args_module(selinux_enabled=True).selinux_mls_enabled() is True
def test_selinux_initial_context(self):
# selinux missing/disabled/enabled sans MLS is 3-element None
@@ -89,19 +80,16 @@ class TestSELinuxMU:
assert no_args_module().selinux_default_context(path='/foo/bar') == [None, None, None]
am = no_args_module(selinux_enabled=True, selinux_mls_enabled=True)
- with patch.object(basic, 'selinux', create=True) as selinux:
- # matchpathcon success
- selinux.matchpathcon.return_value = [0, 'unconfined_u:object_r:default_t:s0']
+ # matchpathcon success
+ with patch('ansible.module_utils.compat.selinux.matchpathcon', return_value=[0, 'unconfined_u:object_r:default_t:s0']):
assert am.selinux_default_context(path='/foo/bar') == ['unconfined_u', 'object_r', 'default_t', 's0']
- with patch.object(basic, 'selinux', create=True) as selinux:
- # matchpathcon fail (return initial context value)
- selinux.matchpathcon.return_value = [-1, '']
+ # matchpathcon fail (return initial context value)
+ with patch('ansible.module_utils.compat.selinux.matchpathcon', return_value=[-1, '']):
assert am.selinux_default_context(path='/foo/bar') == [None, None, None, None]
- with patch.object(basic, 'selinux', create=True) as selinux:
- # matchpathcon OSError
- selinux.matchpathcon.side_effect = OSError
+ # matchpathcon OSError
+ with patch('ansible.module_utils.compat.selinux.matchpathcon', side_effect=OSError):
assert am.selinux_default_context(path='/foo/bar') == [None, None, None, None]
def test_selinux_context(self):
@@ -111,23 +99,19 @@ class TestSELinuxMU:
am = no_args_module(selinux_enabled=True, selinux_mls_enabled=True)
# lgetfilecon_raw passthru
- with patch.object(basic, 'selinux', create=True) as selinux:
- selinux.lgetfilecon_raw.return_value = [0, 'unconfined_u:object_r:default_t:s0']
+ with patch('ansible.module_utils.compat.selinux.lgetfilecon_raw', return_value=[0, 'unconfined_u:object_r:default_t:s0']):
assert am.selinux_context(path='/foo/bar') == ['unconfined_u', 'object_r', 'default_t', 's0']
# lgetfilecon_raw returned a failure
- with patch.object(basic, 'selinux', create=True) as selinux:
- selinux.lgetfilecon_raw.return_value = [-1, '']
+ with patch('ansible.module_utils.compat.selinux.lgetfilecon_raw', return_value=[-1, '']):
assert am.selinux_context(path='/foo/bar') == [None, None, None, None]
# lgetfilecon_raw OSError (should bomb the module)
- with patch.object(basic, 'selinux', create=True) as selinux:
- selinux.lgetfilecon_raw.side_effect = OSError(errno.ENOENT, 'NotFound')
+ with patch('ansible.module_utils.compat.selinux.lgetfilecon_raw', side_effect=OSError(errno.ENOENT, 'NotFound')):
with pytest.raises(SystemExit):
am.selinux_context(path='/foo/bar')
- with patch.object(basic, 'selinux', create=True) as selinux:
- selinux.lgetfilecon_raw.side_effect = OSError()
+ with patch('ansible.module_utils.compat.selinux.lgetfilecon_raw', side_effect=OSError()):
with pytest.raises(SystemExit):
am.selinux_context(path='/foo/bar')
@@ -182,29 +166,25 @@ class TestSELinuxMU:
am.selinux_context = lambda path: ['bar_u', 'bar_r', None, None]
am.is_special_selinux_path = lambda path: (False, None)
- with patch.object(basic, 'selinux', create=True) as selinux:
- selinux.lsetfilecon.return_value = 0
+ with patch('ansible.module_utils.compat.selinux.lsetfilecon', return_value=0) as m:
assert am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False) is True
- selinux.lsetfilecon.assert_called_with('/path/to/file', 'foo_u:foo_r:foo_t:s0')
- selinux.lsetfilecon.reset_mock()
+ m.assert_called_with('/path/to/file', 'foo_u:foo_r:foo_t:s0')
+ m.reset_mock()
am.check_mode = True
assert am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False) is True
- assert not selinux.lsetfilecon.called
+ assert not m.called
am.check_mode = False
- with patch.object(basic, 'selinux', create=True) as selinux:
- selinux.lsetfilecon.return_value = 1
+ with patch('ansible.module_utils.compat.selinux.lsetfilecon', return_value=1):
with pytest.raises(SystemExit):
am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], True)
- with patch.object(basic, 'selinux', create=True) as selinux:
- selinux.lsetfilecon.side_effect = OSError
+ with patch('ansible.module_utils.compat.selinux.lsetfilecon', side_effect=OSError):
with pytest.raises(SystemExit):
am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], True)
am.is_special_selinux_path = lambda path: (True, ['sp_u', 'sp_r', 'sp_t', 's0'])
- with patch.object(basic, 'selinux', create=True) as selinux:
- selinux.lsetfilecon.return_value = 0
+ with patch('ansible.module_utils.compat.selinux.lsetfilecon', return_value=0) as m:
assert am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False) is True
- selinux.lsetfilecon.assert_called_with('/path/to/file', 'sp_u:sp_r:sp_t:s0')
+ m.assert_called_with('/path/to/file', 'sp_u:sp_r:sp_t:s0')
diff --git a/test/units/module_utils/basic/test_set_cwd.py b/test/units/module_utils/basic/test_set_cwd.py
index c094c622..159236b7 100644
--- a/test/units/module_utils/basic/test_set_cwd.py
+++ b/test/units/module_utils/basic/test_set_cwd.py
@@ -8,10 +8,13 @@ __metaclass__ = type
import json
import os
+import shutil
import tempfile
-from units.compat.mock import patch
-from ansible.module_utils.common.text.converters import to_bytes
+import pytest
+
+from units.compat.mock import patch, MagicMock
+from ansible.module_utils._text import to_bytes
from ansible.module_utils import basic
diff --git a/test/units/module_utils/basic/test_tmpdir.py b/test/units/module_utils/basic/test_tmpdir.py
index ec12508b..818cb9b1 100644
--- a/test/units/module_utils/basic/test_tmpdir.py
+++ b/test/units/module_utils/basic/test_tmpdir.py
@@ -14,7 +14,7 @@ import tempfile
import pytest
from units.compat.mock import patch, MagicMock
-from ansible.module_utils.common.text.converters import to_bytes
+from ansible.module_utils._text import to_bytes
from ansible.module_utils import basic
diff --git a/test/units/module_utils/common/arg_spec/test_aliases.py b/test/units/module_utils/common/arg_spec/test_aliases.py
index 7522c769..7d30fb0f 100644
--- a/test/units/module_utils/common/arg_spec/test_aliases.py
+++ b/test/units/module_utils/common/arg_spec/test_aliases.py
@@ -9,6 +9,7 @@ import pytest
from ansible.module_utils.errors import AnsibleValidationError, AnsibleValidationErrorMultiple
from ansible.module_utils.common.arg_spec import ArgumentSpecValidator, ValidationResult
+from ansible.module_utils.common.warnings import get_deprecation_messages, get_warning_messages
# id, argument spec, parameters, expected parameters, deprecation, warning
ALIAS_TEST_CASES = [
diff --git a/test/units/module_utils/common/parameters/test_handle_aliases.py b/test/units/module_utils/common/parameters/test_handle_aliases.py
index 6a8c2b2c..e20a8882 100644
--- a/test/units/module_utils/common/parameters/test_handle_aliases.py
+++ b/test/units/module_utils/common/parameters/test_handle_aliases.py
@@ -9,7 +9,7 @@ __metaclass__ = type
import pytest
from ansible.module_utils.common.parameters import _handle_aliases
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
def test_handle_aliases_no_aliases():
diff --git a/test/units/module_utils/common/parameters/test_list_deprecations.py b/test/units/module_utils/common/parameters/test_list_deprecations.py
index d667a2f0..6f0bb71a 100644
--- a/test/units/module_utils/common/parameters/test_list_deprecations.py
+++ b/test/units/module_utils/common/parameters/test_list_deprecations.py
@@ -5,10 +5,21 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
+import pytest
from ansible.module_utils.common.parameters import _list_deprecations
+@pytest.fixture
+def params():
+ return {
+ 'name': 'bob',
+ 'dest': '/etc/hosts',
+ 'state': 'present',
+ 'value': 5,
+ }
+
+
def test_list_deprecations():
argument_spec = {
'old': {'type': 'str', 'removed_in_version': '2.5'},
diff --git a/test/units/module_utils/common/test_collections.py b/test/units/module_utils/common/test_collections.py
index 8424502e..95b2a402 100644
--- a/test/units/module_utils/common/test_collections.py
+++ b/test/units/module_utils/common/test_collections.py
@@ -8,7 +8,8 @@ __metaclass__ = type
import pytest
-from ansible.module_utils.six.moves.collections_abc import Sequence
+from ansible.module_utils.six import Iterator
+from ansible.module_utils.common._collections_compat import Sequence
from ansible.module_utils.common.collections import ImmutableDict, is_iterable, is_sequence
@@ -24,6 +25,16 @@ class SeqStub:
Sequence.register(SeqStub)
+class IteratorStub(Iterator):
+ def __next__(self):
+ raise StopIteration
+
+
+class IterableStub:
+ def __iter__(self):
+ return IteratorStub()
+
+
class FakeAnsibleVaultEncryptedUnicode(Sequence):
__ENCRYPTED__ = True
@@ -31,10 +42,10 @@ class FakeAnsibleVaultEncryptedUnicode(Sequence):
self.data = data
def __getitem__(self, index):
- raise NotImplementedError() # pragma: nocover
+ return self.data[index]
def __len__(self):
- raise NotImplementedError() # pragma: nocover
+ return len(self.data)
TEST_STRINGS = u'he', u'Україна', u'Česká republika'
@@ -82,14 +93,14 @@ def test_sequence_string_types_without_strings(string_input):
@pytest.mark.parametrize(
'seq',
- ([], (), {}, set(), frozenset()),
+ ([], (), {}, set(), frozenset(), IterableStub()),
)
def test_iterable_positive(seq):
assert is_iterable(seq)
@pytest.mark.parametrize(
- 'seq', (object(), 5, 9.)
+ 'seq', (IteratorStub(), object(), 5, 9.)
)
def test_iterable_negative(seq):
assert not is_iterable(seq)
diff --git a/test/units/module_utils/common/text/converters/test_json_encode_fallback.py b/test/units/module_utils/common/text/converters/test_json_encode_fallback.py
index 808bf410..022f38f4 100644
--- a/test/units/module_utils/common/text/converters/test_json_encode_fallback.py
+++ b/test/units/module_utils/common/text/converters/test_json_encode_fallback.py
@@ -20,6 +20,12 @@ class timezone(tzinfo):
def utcoffset(self, dt):
return self._offset
+ def dst(self, dt):
+ return timedelta(0)
+
+ def tzname(self, dt):
+ return None
+
@pytest.mark.parametrize(
'test_input,expected',
diff --git a/test/units/module_utils/common/validation/test_check_missing_parameters.py b/test/units/module_utils/common/validation/test_check_missing_parameters.py
index 364f9439..6cbcb8bf 100644
--- a/test/units/module_utils/common/validation/test_check_missing_parameters.py
+++ b/test/units/module_utils/common/validation/test_check_missing_parameters.py
@@ -8,10 +8,16 @@ __metaclass__ = type
import pytest
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
+from ansible.module_utils.common.validation import check_required_one_of
from ansible.module_utils.common.validation import check_missing_parameters
+@pytest.fixture
+def arguments_terms():
+ return {"path": ""}
+
+
def test_check_missing_parameters():
assert check_missing_parameters([], {}) == []
diff --git a/test/units/module_utils/common/validation/test_check_mutually_exclusive.py b/test/units/module_utils/common/validation/test_check_mutually_exclusive.py
index acc67be8..7bf90760 100644
--- a/test/units/module_utils/common/validation/test_check_mutually_exclusive.py
+++ b/test/units/module_utils/common/validation/test_check_mutually_exclusive.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.module_utils.common.validation import check_mutually_exclusive
diff --git a/test/units/module_utils/common/validation/test_check_required_arguments.py b/test/units/module_utils/common/validation/test_check_required_arguments.py
index eb3d52e2..1dd54584 100644
--- a/test/units/module_utils/common/validation/test_check_required_arguments.py
+++ b/test/units/module_utils/common/validation/test_check_required_arguments.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.module_utils.common.validation import check_required_arguments
diff --git a/test/units/module_utils/common/validation/test_check_required_by.py b/test/units/module_utils/common/validation/test_check_required_by.py
index fcba0c14..62cccff3 100644
--- a/test/units/module_utils/common/validation/test_check_required_by.py
+++ b/test/units/module_utils/common/validation/test_check_required_by.py
@@ -8,7 +8,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.module_utils.common.validation import check_required_by
diff --git a/test/units/module_utils/common/validation/test_check_required_if.py b/test/units/module_utils/common/validation/test_check_required_if.py
index 4590b05c..4189164a 100644
--- a/test/units/module_utils/common/validation/test_check_required_if.py
+++ b/test/units/module_utils/common/validation/test_check_required_if.py
@@ -8,7 +8,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.module_utils.common.validation import check_required_if
diff --git a/test/units/module_utils/common/validation/test_check_required_one_of.py b/test/units/module_utils/common/validation/test_check_required_one_of.py
index efdba537..b0818891 100644
--- a/test/units/module_utils/common/validation/test_check_required_one_of.py
+++ b/test/units/module_utils/common/validation/test_check_required_one_of.py
@@ -8,7 +8,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.module_utils.common.validation import check_required_one_of
diff --git a/test/units/module_utils/common/validation/test_check_required_together.py b/test/units/module_utils/common/validation/test_check_required_together.py
index cf4626ab..8a2daab1 100644
--- a/test/units/module_utils/common/validation/test_check_required_together.py
+++ b/test/units/module_utils/common/validation/test_check_required_together.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.module_utils.common.validation import check_required_together
diff --git a/test/units/module_utils/common/validation/test_check_type_bits.py b/test/units/module_utils/common/validation/test_check_type_bits.py
index aa91da94..7f6b11d3 100644
--- a/test/units/module_utils/common/validation/test_check_type_bits.py
+++ b/test/units/module_utils/common/validation/test_check_type_bits.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.module_utils.common.validation import check_type_bits
diff --git a/test/units/module_utils/common/validation/test_check_type_bool.py b/test/units/module_utils/common/validation/test_check_type_bool.py
index 00b785f6..bd867dc9 100644
--- a/test/units/module_utils/common/validation/test_check_type_bool.py
+++ b/test/units/module_utils/common/validation/test_check_type_bool.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.module_utils.common.validation import check_type_bool
diff --git a/test/units/module_utils/common/validation/test_check_type_bytes.py b/test/units/module_utils/common/validation/test_check_type_bytes.py
index c29e42f8..6ff62dc2 100644
--- a/test/units/module_utils/common/validation/test_check_type_bytes.py
+++ b/test/units/module_utils/common/validation/test_check_type_bytes.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.module_utils.common.validation import check_type_bytes
diff --git a/test/units/module_utils/common/validation/test_check_type_float.py b/test/units/module_utils/common/validation/test_check_type_float.py
index a0218875..57837fae 100644
--- a/test/units/module_utils/common/validation/test_check_type_float.py
+++ b/test/units/module_utils/common/validation/test_check_type_float.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.module_utils.common.validation import check_type_float
diff --git a/test/units/module_utils/common/validation/test_check_type_int.py b/test/units/module_utils/common/validation/test_check_type_int.py
index 6f4dc6a2..22cedf61 100644
--- a/test/units/module_utils/common/validation/test_check_type_int.py
+++ b/test/units/module_utils/common/validation/test_check_type_int.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.module_utils.common.validation import check_type_int
diff --git a/test/units/module_utils/common/validation/test_check_type_jsonarg.py b/test/units/module_utils/common/validation/test_check_type_jsonarg.py
index d43bb035..e78e54bb 100644
--- a/test/units/module_utils/common/validation/test_check_type_jsonarg.py
+++ b/test/units/module_utils/common/validation/test_check_type_jsonarg.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.module_utils.common.validation import check_type_jsonarg
diff --git a/test/units/module_utils/common/validation/test_check_type_str.py b/test/units/module_utils/common/validation/test_check_type_str.py
index 71af2a0b..f10dad28 100644
--- a/test/units/module_utils/common/validation/test_check_type_str.py
+++ b/test/units/module_utils/common/validation/test_check_type_str.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.module_utils.common.validation import check_type_str
diff --git a/test/units/module_utils/compat/test_datetime.py b/test/units/module_utils/compat/test_datetime.py
deleted file mode 100644
index 66a0ad0b..00000000
--- a/test/units/module_utils/compat/test_datetime.py
+++ /dev/null
@@ -1,34 +0,0 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import datetime
-
-from ansible.module_utils.compat.datetime import utcnow, utcfromtimestamp, UTC
-from ansible.module_utils.six import PY3
-
-
-def test_utc():
- assert UTC.tzname(None) == 'UTC'
- assert UTC.utcoffset(None) == datetime.timedelta(0)
-
- if PY3:
- assert UTC.dst(None) is None
- else:
- assert UTC.dst(None) == datetime.timedelta(0)
-
-
-def test_utcnow():
- assert utcnow().tzinfo is UTC
-
-
-def test_utcfometimestamp_zero():
- dt = utcfromtimestamp(0)
-
- assert dt.tzinfo is UTC
- assert dt.year == 1970
- assert dt.month == 1
- assert dt.day == 1
- assert dt.hour == 0
- assert dt.minute == 0
- assert dt.second == 0
- assert dt.microsecond == 0
diff --git a/test/units/module_utils/conftest.py b/test/units/module_utils/conftest.py
index 8e82bf2a..8bc13c4d 100644
--- a/test/units/module_utils/conftest.py
+++ b/test/units/module_utils/conftest.py
@@ -12,8 +12,8 @@ import pytest
import ansible.module_utils.basic
from ansible.module_utils.six import PY3, string_types
-from ansible.module_utils.common.text.converters import to_bytes
-from ansible.module_utils.six.moves.collections_abc import MutableMapping
+from ansible.module_utils._text import to_bytes
+from ansible.module_utils.common._collections_compat import MutableMapping
@pytest.fixture
diff --git a/test/units/module_utils/facts/base.py b/test/units/module_utils/facts/base.py
index 3cada8f1..33d3087b 100644
--- a/test/units/module_utils/facts/base.py
+++ b/test/units/module_utils/facts/base.py
@@ -48,9 +48,6 @@ class BaseFactsTest(unittest.TestCase):
@patch('platform.system', return_value='Linux')
@patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value='systemd')
def test_collect(self, mock_gfc, mock_ps):
- self._test_collect()
-
- def _test_collect(self):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module, collected_facts=self.collected_facts)
@@ -65,3 +62,4 @@ class BaseFactsTest(unittest.TestCase):
facts_dict = fact_collector.collect_with_namespace(module=module,
collected_facts=self.collected_facts)
self.assertIsInstance(facts_dict, dict)
+ return facts_dict
diff --git a/test/units/module_utils/facts/fixtures/cpuinfo/s390x-z13-2cpu-cpuinfo b/test/units/module_utils/facts/fixtures/cpuinfo/s390x-z13-2cpu-cpuinfo
deleted file mode 100644
index 32e183fa..00000000
--- a/test/units/module_utils/facts/fixtures/cpuinfo/s390x-z13-2cpu-cpuinfo
+++ /dev/null
@@ -1,14 +0,0 @@
-vendor_id : IBM/S390
-# processors : 2
-bogomips per cpu: 3033.00
-max thread id : 0
-features : esan3 zarch stfle msa ldisp eimm dfp edat etf3eh highgprs te vx sie
-facilities : 0 1 2 3 4 6 7 8 9 10 12 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 30 31 32 33 34 35 36 37 40 41 42 43 44 45 46 47 48 49 50 51 52 53 55 57 73 74 75 76 77 80 81 82 128 129 131
-cache0 : level=1 type=Data scope=Private size=128K line_size=256 associativity=8
-cache1 : level=1 type=Instruction scope=Private size=96K line_size=256 associativity=6
-cache2 : level=2 type=Data scope=Private size=2048K line_size=256 associativity=8
-cache3 : level=2 type=Instruction scope=Private size=2048K line_size=256 associativity=8
-cache4 : level=3 type=Unified scope=Shared size=65536K line_size=256 associativity=16
-cache5 : level=4 type=Unified scope=Shared size=491520K line_size=256 associativity=30
-processor 0: version = FF, identification = FFFFFF, machine = 2964
-processor 1: version = FF, identification = FFFFFF, machine = 2964
diff --git a/test/units/module_utils/facts/fixtures/cpuinfo/s390x-z14-64cpu-cpuinfo b/test/units/module_utils/facts/fixtures/cpuinfo/s390x-z14-64cpu-cpuinfo
deleted file mode 100644
index 79fe5a93..00000000
--- a/test/units/module_utils/facts/fixtures/cpuinfo/s390x-z14-64cpu-cpuinfo
+++ /dev/null
@@ -1,1037 +0,0 @@
-vendor_id : IBM/S390
-# processors : 64
-bogomips per cpu: 21881.00
-max thread id : 1
-features : esan3 zarch stfle msa ldisp eimm dfp edat etf3eh highgprs te vx vxd vxe gs sie
-facilities : 0 1 2 3 4 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 30 31 32 33 34 35 36 37 38 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 57 58 59 60 64 65 66 67 68 69 70 71 72 73 75 76 77 78 80 81 82 128 129 130 131 132 133 134 135 138 139 141 142 144 145 146 156
-cache0 : level=1 type=Data scope=Private size=128K line_size=256 associativity=8
-cache1 : level=1 type=Instruction scope=Private size=128K line_size=256 associativity=8
-cache2 : level=2 type=Data scope=Private size=4096K line_size=256 associativity=8
-cache3 : level=2 type=Instruction scope=Private size=2048K line_size=256 associativity=8
-cache4 : level=3 type=Unified scope=Shared size=131072K line_size=256 associativity=32
-cache5 : level=4 type=Unified scope=Shared size=688128K line_size=256 associativity=42
-processor 0: version = 00, identification = FFFFFF, machine = 3906
-processor 1: version = 00, identification = FFFFFF, machine = 3906
-processor 2: version = 00, identification = FFFFFF, machine = 3906
-processor 3: version = 00, identification = FFFFFF, machine = 3906
-processor 4: version = 00, identification = FFFFFF, machine = 3906
-processor 5: version = 00, identification = FFFFFF, machine = 3906
-processor 6: version = 00, identification = FFFFFF, machine = 3906
-processor 7: version = 00, identification = FFFFFF, machine = 3906
-processor 8: version = 00, identification = FFFFFF, machine = 3906
-processor 9: version = 00, identification = FFFFFF, machine = 3906
-processor 10: version = 00, identification = FFFFFF, machine = 3906
-processor 11: version = 00, identification = FFFFFF, machine = 3906
-processor 12: version = 00, identification = FFFFFF, machine = 3906
-processor 13: version = 00, identification = FFFFFF, machine = 3906
-processor 14: version = 00, identification = FFFFFF, machine = 3906
-processor 15: version = 00, identification = FFFFFF, machine = 3906
-processor 16: version = 00, identification = FFFFFF, machine = 3906
-processor 17: version = 00, identification = FFFFFF, machine = 3906
-processor 18: version = 00, identification = FFFFFF, machine = 3906
-processor 19: version = 00, identification = FFFFFF, machine = 3906
-processor 20: version = 00, identification = FFFFFF, machine = 3906
-processor 21: version = 00, identification = FFFFFF, machine = 3906
-processor 22: version = 00, identification = FFFFFF, machine = 3906
-processor 23: version = 00, identification = FFFFFF, machine = 3906
-processor 24: version = 00, identification = FFFFFF, machine = 3906
-processor 25: version = 00, identification = FFFFFF, machine = 3906
-processor 26: version = 00, identification = FFFFFF, machine = 3906
-processor 27: version = 00, identification = FFFFFF, machine = 3906
-processor 28: version = 00, identification = FFFFFF, machine = 3906
-processor 29: version = 00, identification = FFFFFF, machine = 3906
-processor 30: version = 00, identification = FFFFFF, machine = 3906
-processor 31: version = 00, identification = FFFFFF, machine = 3906
-processor 32: version = 00, identification = FFFFFF, machine = 3906
-processor 33: version = 00, identification = FFFFFF, machine = 3906
-processor 34: version = 00, identification = FFFFFF, machine = 3906
-processor 35: version = 00, identification = FFFFFF, machine = 3906
-processor 36: version = 00, identification = FFFFFF, machine = 3906
-processor 37: version = 00, identification = FFFFFF, machine = 3906
-processor 38: version = 00, identification = FFFFFF, machine = 3906
-processor 39: version = 00, identification = FFFFFF, machine = 3906
-processor 40: version = 00, identification = FFFFFF, machine = 3906
-processor 41: version = 00, identification = FFFFFF, machine = 3906
-processor 42: version = 00, identification = FFFFFF, machine = 3906
-processor 43: version = 00, identification = FFFFFF, machine = 3906
-processor 44: version = 00, identification = FFFFFF, machine = 3906
-processor 45: version = 00, identification = FFFFFF, machine = 3906
-processor 46: version = 00, identification = FFFFFF, machine = 3906
-processor 47: version = 00, identification = FFFFFF, machine = 3906
-processor 48: version = 00, identification = FFFFFF, machine = 3906
-processor 49: version = 00, identification = FFFFFF, machine = 3906
-processor 50: version = 00, identification = FFFFFF, machine = 3906
-processor 51: version = 00, identification = FFFFFF, machine = 3906
-processor 52: version = 00, identification = FFFFFF, machine = 3906
-processor 53: version = 00, identification = FFFFFF, machine = 3906
-processor 54: version = 00, identification = FFFFFF, machine = 3906
-processor 55: version = 00, identification = FFFFFF, machine = 3906
-processor 56: version = 00, identification = FFFFFF, machine = 3906
-processor 57: version = 00, identification = FFFFFF, machine = 3906
-processor 58: version = 00, identification = FFFFFF, machine = 3906
-processor 59: version = 00, identification = FFFFFF, machine = 3906
-processor 60: version = 00, identification = FFFFFF, machine = 3906
-processor 61: version = 00, identification = FFFFFF, machine = 3906
-processor 62: version = 00, identification = FFFFFF, machine = 3906
-processor 63: version = 00, identification = FFFFFF, machine = 3906
-
-cpu number : 0
-physical id : 1
-core id : 0
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 0
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 1
-physical id : 1
-core id : 0
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 1
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 2
-physical id : 1
-core id : 1
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 2
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 3
-physical id : 1
-core id : 1
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 3
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 4
-physical id : 1
-core id : 2
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 4
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 5
-physical id : 1
-core id : 2
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 5
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 6
-physical id : 1
-core id : 3
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 6
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 7
-physical id : 1
-core id : 3
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 7
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 8
-physical id : 1
-core id : 4
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 8
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 9
-physical id : 1
-core id : 4
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 9
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 10
-physical id : 1
-core id : 5
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 10
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 11
-physical id : 1
-core id : 5
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 11
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 12
-physical id : 1
-core id : 6
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 12
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 13
-physical id : 1
-core id : 6
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 13
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 14
-physical id : 2
-core id : 7
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 14
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 15
-physical id : 2
-core id : 7
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 15
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 16
-physical id : 2
-core id : 8
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 16
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 17
-physical id : 2
-core id : 8
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 17
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 18
-physical id : 2
-core id : 9
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 18
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 19
-physical id : 2
-core id : 9
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 19
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 20
-physical id : 2
-core id : 10
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 20
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 21
-physical id : 2
-core id : 10
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 21
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 22
-physical id : 2
-core id : 11
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 22
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 23
-physical id : 2
-core id : 11
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 23
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 24
-physical id : 2
-core id : 12
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 24
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 25
-physical id : 2
-core id : 12
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 25
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 26
-physical id : 2
-core id : 13
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 26
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 27
-physical id : 2
-core id : 13
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 27
-siblings : 14
-cpu cores : 7
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 28
-physical id : 3
-core id : 14
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 28
-siblings : 16
-cpu cores : 8
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 29
-physical id : 3
-core id : 14
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 29
-siblings : 16
-cpu cores : 8
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 30
-physical id : 3
-core id : 15
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 30
-siblings : 16
-cpu cores : 8
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 31
-physical id : 3
-core id : 15
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 31
-siblings : 16
-cpu cores : 8
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 32
-physical id : 3
-core id : 16
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 32
-siblings : 16
-cpu cores : 8
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 33
-physical id : 3
-core id : 16
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 33
-siblings : 16
-cpu cores : 8
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 34
-physical id : 3
-core id : 17
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 34
-siblings : 16
-cpu cores : 8
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 35
-physical id : 3
-core id : 17
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 35
-siblings : 16
-cpu cores : 8
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 36
-physical id : 3
-core id : 18
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 36
-siblings : 16
-cpu cores : 8
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 37
-physical id : 3
-core id : 18
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 37
-siblings : 16
-cpu cores : 8
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 38
-physical id : 3
-core id : 19
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 38
-siblings : 16
-cpu cores : 8
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 39
-physical id : 3
-core id : 19
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 39
-siblings : 16
-cpu cores : 8
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 40
-physical id : 3
-core id : 20
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 40
-siblings : 16
-cpu cores : 8
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 41
-physical id : 3
-core id : 20
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 41
-siblings : 16
-cpu cores : 8
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 42
-physical id : 3
-core id : 21
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 42
-siblings : 16
-cpu cores : 8
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 43
-physical id : 3
-core id : 21
-book id : 1
-drawer id : 4
-dedicated : 0
-address : 43
-siblings : 16
-cpu cores : 8
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 44
-physical id : 1
-core id : 22
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 44
-siblings : 12
-cpu cores : 6
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 45
-physical id : 1
-core id : 22
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 45
-siblings : 12
-cpu cores : 6
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 46
-physical id : 1
-core id : 23
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 46
-siblings : 12
-cpu cores : 6
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 47
-physical id : 1
-core id : 23
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 47
-siblings : 12
-cpu cores : 6
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 48
-physical id : 1
-core id : 24
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 48
-siblings : 12
-cpu cores : 6
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 49
-physical id : 1
-core id : 24
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 49
-siblings : 12
-cpu cores : 6
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 50
-physical id : 1
-core id : 25
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 50
-siblings : 12
-cpu cores : 6
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 51
-physical id : 1
-core id : 25
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 51
-siblings : 12
-cpu cores : 6
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 52
-physical id : 1
-core id : 26
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 52
-siblings : 12
-cpu cores : 6
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 53
-physical id : 1
-core id : 26
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 53
-siblings : 12
-cpu cores : 6
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 54
-physical id : 1
-core id : 27
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 54
-siblings : 12
-cpu cores : 6
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 55
-physical id : 1
-core id : 27
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 55
-siblings : 12
-cpu cores : 6
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 56
-physical id : 2
-core id : 28
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 56
-siblings : 8
-cpu cores : 4
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 57
-physical id : 2
-core id : 28
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 57
-siblings : 8
-cpu cores : 4
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 58
-physical id : 2
-core id : 29
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 58
-siblings : 8
-cpu cores : 4
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 59
-physical id : 2
-core id : 29
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 59
-siblings : 8
-cpu cores : 4
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 60
-physical id : 2
-core id : 30
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 60
-siblings : 8
-cpu cores : 4
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 61
-physical id : 2
-core id : 30
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 61
-siblings : 8
-cpu cores : 4
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 62
-physical id : 2
-core id : 31
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 62
-siblings : 8
-cpu cores : 4
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
-cpu number : 63
-physical id : 2
-core id : 31
-book id : 2
-drawer id : 4
-dedicated : 0
-address : 63
-siblings : 8
-cpu cores : 4
-version : 00
-identification : FFFFFF
-machine : 3906
-cpu MHz dynamic : 5208
-cpu MHz static : 5208
-
diff --git a/test/units/module_utils/facts/hardware/linux_data.py b/test/units/module_utils/facts/hardware/linux_data.py
index f92f14eb..3879188d 100644
--- a/test/units/module_utils/facts/hardware/linux_data.py
+++ b/test/units/module_utils/facts/hardware/linux_data.py
@@ -18,12 +18,6 @@ __metaclass__ = type
import os
-
-def read_lines(path):
- with open(path) as file:
- return file.readlines()
-
-
LSBLK_OUTPUT = b"""
/dev/sda
/dev/sda1 32caaec3-ef40-4691-a3b6-438c3f9bc1c0
@@ -374,7 +368,7 @@ CPU_INFO_TEST_SCENARIOS = [
'architecture': 'armv61',
'nproc_out': 1,
'sched_getaffinity': set([0]),
- 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv6-rev7-1cpu-cpuinfo')),
+ 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv6-rev7-1cpu-cpuinfo')).readlines(),
'expected_result': {
'processor': ['0', 'ARMv6-compatible processor rev 7 (v6l)'],
'processor_cores': 1,
@@ -387,7 +381,7 @@ CPU_INFO_TEST_SCENARIOS = [
'architecture': 'armv71',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
- 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv7-rev4-4cpu-cpuinfo')),
+ 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv7-rev4-4cpu-cpuinfo')).readlines(),
'expected_result': {
'processor': [
'0', 'ARMv7 Processor rev 4 (v7l)',
@@ -405,7 +399,7 @@ CPU_INFO_TEST_SCENARIOS = [
'architecture': 'aarch64',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
- 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/aarch64-4cpu-cpuinfo')),
+ 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/aarch64-4cpu-cpuinfo')).readlines(),
'expected_result': {
'processor': [
'0', 'AArch64 Processor rev 4 (aarch64)',
@@ -423,7 +417,7 @@ CPU_INFO_TEST_SCENARIOS = [
'architecture': 'x86_64',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
- 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-4cpu-cpuinfo')),
+ 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-4cpu-cpuinfo')).readlines(),
'expected_result': {
'processor': [
'0', 'AuthenticAMD', 'Dual-Core AMD Opteron(tm) Processor 2216',
@@ -441,7 +435,7 @@ CPU_INFO_TEST_SCENARIOS = [
'architecture': 'x86_64',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
- 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-8cpu-cpuinfo')),
+ 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-8cpu-cpuinfo')).readlines(),
'expected_result': {
'processor': [
'0', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
@@ -463,7 +457,7 @@ CPU_INFO_TEST_SCENARIOS = [
'architecture': 'arm64',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
- 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/arm64-4cpu-cpuinfo')),
+ 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/arm64-4cpu-cpuinfo')).readlines(),
'expected_result': {
'processor': ['0', '1', '2', '3'],
'processor_cores': 1,
@@ -476,7 +470,7 @@ CPU_INFO_TEST_SCENARIOS = [
'architecture': 'armv71',
'nproc_out': 8,
'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7]),
- 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv7-rev3-8cpu-cpuinfo')),
+ 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv7-rev3-8cpu-cpuinfo')).readlines(),
'expected_result': {
'processor': [
'0', 'ARMv7 Processor rev 3 (v7l)',
@@ -498,7 +492,7 @@ CPU_INFO_TEST_SCENARIOS = [
'architecture': 'x86_64',
'nproc_out': 2,
'sched_getaffinity': set([0, 1]),
- 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-2cpu-cpuinfo')),
+ 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-2cpu-cpuinfo')).readlines(),
'expected_result': {
'processor': [
'0', 'GenuineIntel', 'Intel(R) Xeon(R) CPU E5-2680 v2 @ 2.80GHz',
@@ -511,7 +505,7 @@ CPU_INFO_TEST_SCENARIOS = [
'processor_vcpus': 2},
},
{
- 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/ppc64-power7-rhel7-8cpu-cpuinfo')),
+ 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/ppc64-power7-rhel7-8cpu-cpuinfo')).readlines(),
'architecture': 'ppc64',
'nproc_out': 8,
'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7]),
@@ -534,7 +528,7 @@ CPU_INFO_TEST_SCENARIOS = [
},
},
{
- 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/ppc64le-power8-24cpu-cpuinfo')),
+ 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/ppc64le-power8-24cpu-cpuinfo')).readlines(),
'architecture': 'ppc64le',
'nproc_out': 24,
'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]),
@@ -573,41 +567,7 @@ CPU_INFO_TEST_SCENARIOS = [
},
},
{
- 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/s390x-z13-2cpu-cpuinfo')),
- 'architecture': 's390x',
- 'nproc_out': 2,
- 'sched_getaffinity': set([0, 1]),
- 'expected_result': {
- 'processor': [
- 'IBM/S390',
- ],
- 'processor_cores': 2,
- 'processor_count': 1,
- 'processor_nproc': 2,
- 'processor_threads_per_core': 1,
- 'processor_vcpus': 2
- },
- },
- {
- 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/s390x-z14-64cpu-cpuinfo')),
- 'architecture': 's390x',
- 'nproc_out': 64,
- 'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
- 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
- 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63]),
- 'expected_result': {
- 'processor': [
- 'IBM/S390',
- ],
- 'processor_cores': 32,
- 'processor_count': 1,
- 'processor_nproc': 64,
- 'processor_threads_per_core': 2,
- 'processor_vcpus': 64
- },
- },
- {
- 'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/sparc-t5-debian-ldom-24vcpu')),
+ 'cpuinfo': open(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/sparc-t5-debian-ldom-24vcpu')).readlines(),
'architecture': 'sparc64',
'nproc_out': 24,
'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]),
diff --git a/test/units/module_utils/facts/hardware/test_linux_get_cpu_info.py b/test/units/module_utils/facts/hardware/test_linux_get_cpu_info.py
index 41674344..aea8694e 100644
--- a/test/units/module_utils/facts/hardware/test_linux_get_cpu_info.py
+++ b/test/units/module_utils/facts/hardware/test_linux_get_cpu_info.py
@@ -45,7 +45,7 @@ def test_get_cpu_info_missing_arch(mocker):
module = mocker.Mock()
inst = linux.LinuxHardware(module)
- # ARM, Power, and zSystems will report incorrect processor count if architecture is not available
+ # ARM and Power will report incorrect processor count if architecture is not available
mocker.patch('os.path.exists', return_value=False)
mocker.patch('os.access', return_value=True)
for test in CPU_INFO_TEST_SCENARIOS:
@@ -56,7 +56,7 @@ def test_get_cpu_info_missing_arch(mocker):
test_result = inst.get_cpu_facts()
- if test['architecture'].startswith(('armv', 'aarch', 'ppc', 's390')):
+ if test['architecture'].startswith(('armv', 'aarch', 'ppc')):
assert test['expected_result'] != test_result
else:
assert test['expected_result'] == test_result
diff --git a/test/units/module_utils/facts/network/test_locally_reachable_ips.py b/test/units/module_utils/facts/network/test_locally_reachable_ips.py
deleted file mode 100644
index 7eac790f..00000000
--- a/test/units/module_utils/facts/network/test_locally_reachable_ips.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# This file is part of Ansible
-# -*- coding: utf-8 -*-
-#
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-#
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from units.compat.mock import Mock
-from units.compat import unittest
-from ansible.module_utils.facts.network import linux
-
-# ip -4 route show table local
-IP4_ROUTE_SHOW_LOCAL = """
-broadcast 127.0.0.0 dev lo proto kernel scope link src 127.0.0.1
-local 127.0.0.0/8 dev lo proto kernel scope host src 127.0.0.1
-local 127.0.0.1 dev lo proto kernel scope host src 127.0.0.1
-broadcast 127.255.255.255 dev lo proto kernel scope link src 127.0.0.1
-local 192.168.1.0/24 dev lo scope host
-"""
-
-# ip -6 route show table local
-IP6_ROUTE_SHOW_LOCAL = """
-local ::1 dev lo proto kernel metric 0 pref medium
-local 2a02:123:3:1::e dev enp94s0f0np0 proto kernel metric 0 pref medium
-local 2a02:123:15::/48 dev lo metric 1024 pref medium
-local 2a02:123:16::/48 dev lo metric 1024 pref medium
-local fe80::2eea:7fff:feca:fe68 dev enp94s0f0np0 proto kernel metric 0 pref medium
-multicast ff00::/8 dev enp94s0f0np0 proto kernel metric 256 pref medium
-"""
-
-# Hash returned by get_locally_reachable_ips()
-IP_ROUTE_SHOW_LOCAL_EXPECTED = {
- 'ipv4': [
- '127.0.0.0/8',
- '127.0.0.1',
- '192.168.1.0/24'
- ],
- 'ipv6': [
- '::1',
- '2a02:123:3:1::e',
- '2a02:123:15::/48',
- '2a02:123:16::/48',
- 'fe80::2eea:7fff:feca:fe68'
- ]
-}
-
-
-class TestLocalRoutesLinux(unittest.TestCase):
- gather_subset = ['all']
-
- def get_bin_path(self, command):
- if command == 'ip':
- return 'fake/ip'
- return None
-
- def run_command(self, command):
- if command == ['fake/ip', '-4', 'route', 'show', 'table', 'local']:
- return 0, IP4_ROUTE_SHOW_LOCAL, ''
- if command == ['fake/ip', '-6', 'route', 'show', 'table', 'local']:
- return 0, IP6_ROUTE_SHOW_LOCAL, ''
- return 1, '', ''
-
- def test(self):
- module = self._mock_module()
- module.get_bin_path.side_effect = self.get_bin_path
- module.run_command.side_effect = self.run_command
-
- net = linux.LinuxNetwork(module)
- res = net.get_locally_reachable_ips('fake/ip')
- self.assertDictEqual(res, IP_ROUTE_SHOW_LOCAL_EXPECTED)
-
- def _mock_module(self):
- mock_module = Mock()
- mock_module.params = {'gather_subset': self.gather_subset,
- 'gather_timeout': 5,
- 'filter': '*'}
- mock_module.get_bin_path = Mock(return_value=None)
- return mock_module
diff --git a/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_ClearLinux.py b/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_ClearLinux.py
index 6667ada7..c0957566 100644
--- a/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_ClearLinux.py
+++ b/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_ClearLinux.py
@@ -21,8 +21,7 @@ def test_input():
def test_parse_distribution_file_clear_linux(mock_module, test_input):
- with open(os.path.join(os.path.dirname(__file__), '../../fixtures/distribution_files/ClearLinux')) as file:
- test_input['data'] = file.read()
+ test_input['data'] = open(os.path.join(os.path.dirname(__file__), '../../fixtures/distribution_files/ClearLinux')).read()
result = (
True,
@@ -44,8 +43,7 @@ def test_parse_distribution_file_clear_linux_no_match(mock_module, distro_file,
Test against data from Linux Mint and CoreOS to ensure we do not get a reported
match from parse_distribution_file_ClearLinux()
"""
- with open(os.path.join(os.path.dirname(__file__), '../../fixtures/distribution_files', distro_file)) as file:
- test_input['data'] = file.read()
+ test_input['data'] = open(os.path.join(os.path.dirname(__file__), '../../fixtures/distribution_files', distro_file)).read()
result = (False, {})
diff --git a/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_Slackware.py b/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_Slackware.py
index efb937e0..53fd4ea1 100644
--- a/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_Slackware.py
+++ b/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_Slackware.py
@@ -19,12 +19,9 @@ from ansible.module_utils.facts.system.distribution import DistributionFiles
)
)
def test_parse_distribution_file_slackware(mock_module, distro_file, expected_version):
- with open(os.path.join(os.path.dirname(__file__), '../../fixtures/distribution_files', distro_file)) as file:
- data = file.read()
-
test_input = {
'name': 'Slackware',
- 'data': data,
+ 'data': open(os.path.join(os.path.dirname(__file__), '../../fixtures/distribution_files', distro_file)).read(),
'path': '/etc/os-release',
'collected_facts': None,
}
diff --git a/test/units/module_utils/facts/system/test_pkg_mgr.py b/test/units/module_utils/facts/system/test_pkg_mgr.py
deleted file mode 100644
index 8dc1a3b7..00000000
--- a/test/units/module_utils/facts/system/test_pkg_mgr.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright: (c) 2023, Ansible Project
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from ansible.module_utils.facts.system.pkg_mgr import PkgMgrFactCollector
-
-
-_FEDORA_FACTS = {
- "ansible_distribution": "Fedora",
- "ansible_distribution_major_version": 38, # any version where yum isn't default
- "ansible_os_family": "RedHat"
-}
-
-_KYLIN_FACTS = {
- "ansible_distribution": "Kylin Linux Advanced Server",
- "ansible_distribution_major_version": "V10",
- "ansible_os_family": "RedHat"
-}
-
-# NOTE pkg_mgr == "dnf" means the dnf module for the dnf 4 or below
-
-
-def test_default_dnf_version_detection_kylin_dnf4(mocker):
- mocker.patch("os.path.exists", lambda p: p in ("/usr/bin/dnf", "/usr/bin/dnf-3"))
- mocker.patch("os.path.realpath", lambda p: {"/usr/bin/dnf": "/usr/bin/dnf-3"}.get(p, p))
- assert PkgMgrFactCollector().collect(collected_facts=_KYLIN_FACTS).get("pkg_mgr") == "dnf"
-
-
-def test_default_dnf_version_detection_fedora_dnf4(mocker):
- mocker.patch("os.path.exists", lambda p: p in ("/usr/bin/dnf", "/usr/bin/dnf-3"))
- mocker.patch("os.path.realpath", lambda p: {"/usr/bin/dnf": "/usr/bin/dnf-3"}.get(p, p))
- assert PkgMgrFactCollector().collect(collected_facts=_FEDORA_FACTS).get("pkg_mgr") == "dnf"
-
-
-def test_default_dnf_version_detection_fedora_dnf5(mocker):
- mocker.patch("os.path.exists", lambda p: p in ("/usr/bin/dnf", "/usr/bin/dnf5"))
- mocker.patch("os.path.realpath", lambda p: {"/usr/bin/dnf": "/usr/bin/dnf5"}.get(p, p))
- assert PkgMgrFactCollector().collect(collected_facts=_FEDORA_FACTS).get("pkg_mgr") == "dnf5"
-
-
-def test_default_dnf_version_detection_fedora_dnf4_both_installed(mocker):
- mocker.patch("os.path.exists", lambda p: p in ("/usr/bin/dnf", "/usr/bin/dnf-3", "/usr/bin/dnf5"))
- mocker.patch("os.path.realpath", lambda p: {"/usr/bin/dnf": "/usr/bin/dnf-3"}.get(p, p))
- assert PkgMgrFactCollector().collect(collected_facts=_FEDORA_FACTS).get("pkg_mgr") == "dnf"
-
-
-def test_default_dnf_version_detection_fedora_dnf4_microdnf5_installed(mocker):
- mocker.patch(
- "os.path.exists",
- lambda p: p in ("/usr/bin/dnf", "/usr/bin/microdnf", "/usr/bin/dnf-3", "/usr/bin/dnf5")
- )
- mocker.patch(
- "os.path.realpath",
- lambda p: {"/usr/bin/dnf": "/usr/bin/dnf-3", "/usr/bin/microdnf": "/usr/bin/dnf5"}.get(p, p)
- )
- assert PkgMgrFactCollector().collect(collected_facts=_FEDORA_FACTS).get("pkg_mgr") == "dnf"
-
-
-def test_default_dnf_version_detection_fedora_dnf4_microdnf(mocker):
- mocker.patch("os.path.exists", lambda p: p == "/usr/bin/microdnf")
- assert PkgMgrFactCollector().collect(collected_facts=_FEDORA_FACTS).get("pkg_mgr") == "dnf"
-
-
-def test_default_dnf_version_detection_fedora_dnf5_microdnf(mocker):
- mocker.patch("os.path.exists", lambda p: p in ("/usr/bin/microdnf", "/usr/bin/dnf5"))
- mocker.patch("os.path.realpath", lambda p: {"/usr/bin/microdnf": "/usr/bin/dnf5"}.get(p, p))
- assert PkgMgrFactCollector().collect(collected_facts=_FEDORA_FACTS).get("pkg_mgr") == "dnf5"
-
-
-def test_default_dnf_version_detection_fedora_no_default(mocker):
- mocker.patch("os.path.exists", lambda p: p in ("/usr/bin/dnf-3", "/usr/bin/dnf5"))
- assert PkgMgrFactCollector().collect(collected_facts=_FEDORA_FACTS).get("pkg_mgr") == "unknown"
diff --git a/test/units/module_utils/facts/test_collectors.py b/test/units/module_utils/facts/test_collectors.py
index 984b5859..c4806025 100644
--- a/test/units/module_utils/facts/test_collectors.py
+++ b/test/units/module_utils/facts/test_collectors.py
@@ -93,7 +93,7 @@ class TestApparmorFacts(BaseFactsTest):
collector_class = ApparmorFactCollector
def test_collect(self):
- facts_dict = super(TestApparmorFacts, self)._test_collect()
+ facts_dict = super(TestApparmorFacts, self).test_collect()
self.assertIn('status', facts_dict['apparmor'])
@@ -191,7 +191,7 @@ class TestEnvFacts(BaseFactsTest):
collector_class = EnvFactCollector
def test_collect(self):
- facts_dict = super(TestEnvFacts, self)._test_collect()
+ facts_dict = super(TestEnvFacts, self).test_collect()
self.assertIn('HOME', facts_dict['env'])
@@ -355,6 +355,7 @@ class TestSelinuxFacts(BaseFactsTest):
facts_dict = fact_collector.collect(module=module)
self.assertIsInstance(facts_dict, dict)
self.assertEqual(facts_dict['selinux']['status'], 'Missing selinux Python library')
+ return facts_dict
class TestServiceMgrFacts(BaseFactsTest):
diff --git a/test/units/module_utils/facts/test_date_time.py b/test/units/module_utils/facts/test_date_time.py
index 6cc05f97..6abc36a7 100644
--- a/test/units/module_utils/facts/test_date_time.py
+++ b/test/units/module_utils/facts/test_date_time.py
@@ -10,27 +10,28 @@ import datetime
import string
import time
-from ansible.module_utils.compat.datetime import UTC
from ansible.module_utils.facts.system import date_time
EPOCH_TS = 1594449296.123456
DT = datetime.datetime(2020, 7, 11, 12, 34, 56, 124356)
-UTC_DT = datetime.datetime(2020, 7, 11, 2, 34, 56, 124356)
+DT_UTC = datetime.datetime(2020, 7, 11, 2, 34, 56, 124356)
@pytest.fixture
def fake_now(monkeypatch):
"""
- Patch `datetime.datetime.fromtimestamp()`,
+ Patch `datetime.datetime.fromtimestamp()`, `datetime.datetime.utcfromtimestamp()`,
and `time.time()` to return deterministic values.
"""
class FakeNow:
@classmethod
- def fromtimestamp(cls, timestamp, tz=None):
- if tz == UTC:
- return UTC_DT.replace(tzinfo=tz)
- return DT.replace(tzinfo=tz)
+ def fromtimestamp(cls, timestamp):
+ return DT
+
+ @classmethod
+ def utcfromtimestamp(cls, timestamp):
+ return DT_UTC
def _time():
return EPOCH_TS
diff --git a/test/units/module_utils/facts/test_sysctl.py b/test/units/module_utils/facts/test_sysctl.py
index 0f1632bf..c369b610 100644
--- a/test/units/module_utils/facts/test_sysctl.py
+++ b/test/units/module_utils/facts/test_sysctl.py
@@ -20,9 +20,13 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import os
+
+import pytest
+
# for testing
from units.compat import unittest
-from units.compat.mock import MagicMock
+from units.compat.mock import patch, MagicMock, mock_open, Mock
from ansible.module_utils.facts.sysctl import get_sysctl
diff --git a/test/units/module_utils/facts/test_timeout.py b/test/units/module_utils/facts/test_timeout.py
index 6ba7c397..2adbc4a6 100644
--- a/test/units/module_utils/facts/test_timeout.py
+++ b/test/units/module_utils/facts/test_timeout.py
@@ -139,7 +139,7 @@ def function_other_timeout():
@timeout.timeout(1)
def function_raises():
- return 1 / 0
+ 1 / 0
@timeout.timeout(1)
diff --git a/test/units/module_utils/test_text.py b/test/units/module_utils/test_text.py
deleted file mode 100644
index 72ef2ab2..00000000
--- a/test/units/module_utils/test_text.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import codecs
-
-from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
-from ansible.module_utils.six import PY3, text_type, binary_type
-
-
-def test_exports():
- """Ensure legacy attributes are exported."""
-
- from ansible.module_utils import _text
-
- assert _text.codecs == codecs
- assert _text.PY3 == PY3
- assert _text.text_type == text_type
- assert _text.binary_type == binary_type
- assert _text.to_bytes == to_bytes
- assert _text.to_native == to_native
- assert _text.to_text == to_text
diff --git a/test/units/module_utils/urls/test_Request.py b/test/units/module_utils/urls/test_Request.py
index a8bc3a0b..d2c4ea38 100644
--- a/test/units/module_utils/urls/test_Request.py
+++ b/test/units/module_utils/urls/test_Request.py
@@ -33,7 +33,6 @@ def install_opener_mock(mocker):
def test_Request_fallback(urlopen_mock, install_opener_mock, mocker):
here = os.path.dirname(__file__)
pem = os.path.join(here, 'fixtures/client.pem')
- client_key = os.path.join(here, 'fixtures/client.key')
cookies = cookiejar.CookieJar()
request = Request(
@@ -47,8 +46,8 @@ def test_Request_fallback(urlopen_mock, install_opener_mock, mocker):
http_agent='ansible-tests',
force_basic_auth=True,
follow_redirects='all',
- client_cert=pem,
- client_key=client_key,
+ client_cert='/tmp/client.pem',
+ client_key='/tmp/client.key',
cookies=cookies,
unix_socket='/foo/bar/baz.sock',
ca_path=pem,
@@ -69,8 +68,8 @@ def test_Request_fallback(urlopen_mock, install_opener_mock, mocker):
call(None, 'ansible-tests'), # http_agent
call(None, True), # force_basic_auth
call(None, 'all'), # follow_redirects
- call(None, pem), # client_cert
- call(None, client_key), # client_key
+ call(None, '/tmp/client.pem'), # client_cert
+ call(None, '/tmp/client.key'), # client_key
call(None, cookies), # cookies
call(None, '/foo/bar/baz.sock'), # unix_socket
call(None, pem), # ca_path
@@ -359,7 +358,10 @@ def test_Request_open_client_cert(urlopen_mock, install_opener_mock):
assert ssl_handler.client_cert == client_cert
assert ssl_handler.client_key == client_key
- ssl_handler._build_https_connection('ansible.com')
+ https_connection = ssl_handler._build_https_connection('ansible.com')
+
+ assert https_connection.key_file == client_key
+ assert https_connection.cert_file == client_cert
def test_Request_open_cookies(urlopen_mock, install_opener_mock):
diff --git a/test/units/module_utils/urls/test_fetch_file.py b/test/units/module_utils/urls/test_fetch_file.py
index ecb6b9f1..ed112270 100644
--- a/test/units/module_utils/urls/test_fetch_file.py
+++ b/test/units/module_utils/urls/test_fetch_file.py
@@ -10,6 +10,7 @@ import os
from ansible.module_utils.urls import fetch_file
import pytest
+from units.compat.mock import MagicMock
class FakeTemporaryFile:
diff --git a/test/units/module_utils/urls/test_prepare_multipart.py b/test/units/module_utils/urls/test_prepare_multipart.py
index ee320477..226d9edd 100644
--- a/test/units/module_utils/urls/test_prepare_multipart.py
+++ b/test/units/module_utils/urls/test_prepare_multipart.py
@@ -7,6 +7,8 @@ __metaclass__ = type
import os
+from io import StringIO
+
from email.message import Message
import pytest
diff --git a/test/units/module_utils/urls/test_urls.py b/test/units/module_utils/urls/test_urls.py
index f0e5e9ea..69c1b824 100644
--- a/test/units/module_utils/urls/test_urls.py
+++ b/test/units/module_utils/urls/test_urls.py
@@ -6,7 +6,7 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils import urls
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
import pytest
diff --git a/test/units/modules/conftest.py b/test/units/modules/conftest.py
index c60c586d..a7d1e047 100644
--- a/test/units/modules/conftest.py
+++ b/test/units/modules/conftest.py
@@ -8,15 +8,24 @@ import json
import pytest
-from ansible.module_utils.common.text.converters import to_bytes
+from ansible.module_utils.six import string_types
+from ansible.module_utils._text import to_bytes
+from ansible.module_utils.common._collections_compat import MutableMapping
@pytest.fixture
def patch_ansible_module(request, mocker):
- request.param = {'ANSIBLE_MODULE_ARGS': request.param}
- request.param['ANSIBLE_MODULE_ARGS']['_ansible_remote_tmp'] = '/tmp'
- request.param['ANSIBLE_MODULE_ARGS']['_ansible_keep_remote_files'] = False
-
- args = json.dumps(request.param)
+ if isinstance(request.param, string_types):
+ args = request.param
+ elif isinstance(request.param, MutableMapping):
+ if 'ANSIBLE_MODULE_ARGS' not in request.param:
+ request.param = {'ANSIBLE_MODULE_ARGS': request.param}
+ if '_ansible_remote_tmp' not in request.param['ANSIBLE_MODULE_ARGS']:
+ request.param['ANSIBLE_MODULE_ARGS']['_ansible_remote_tmp'] = '/tmp'
+ if '_ansible_keep_remote_files' not in request.param['ANSIBLE_MODULE_ARGS']:
+ request.param['ANSIBLE_MODULE_ARGS']['_ansible_keep_remote_files'] = False
+ args = json.dumps(request.param)
+ else:
+ raise Exception('Malformed data to the patch_ansible_module pytest fixture')
mocker.patch('ansible.module_utils.basic._ANSIBLE_ARGS', to_bytes(args))
diff --git a/test/units/modules/test_apt.py b/test/units/modules/test_apt.py
index a5aa4a90..20e056ff 100644
--- a/test/units/modules/test_apt.py
+++ b/test/units/modules/test_apt.py
@@ -2,13 +2,20 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import collections
+import sys
from units.compat.mock import Mock
from units.compat import unittest
-from ansible.modules.apt import (
- expand_pkgspec_from_fnmatches,
-)
+try:
+ from ansible.modules.apt import (
+ expand_pkgspec_from_fnmatches,
+ )
+except Exception:
+ # Need some more module_utils work (porting urls.py) before we can test
+ # modules. So don't error out in this case.
+ if sys.version_info[0] >= 3:
+ pass
class AptExpandPkgspecTestCase(unittest.TestCase):
@@ -22,25 +29,25 @@ class AptExpandPkgspecTestCase(unittest.TestCase):
]
def test_trivial(self):
- pkg = ["apt"]
+ foo = ["apt"]
self.assertEqual(
- expand_pkgspec_from_fnmatches(None, pkg, self.fake_cache), pkg)
+ expand_pkgspec_from_fnmatches(None, foo, self.fake_cache), foo)
def test_version_wildcard(self):
- pkg = ["apt=1.0*"]
+ foo = ["apt=1.0*"]
self.assertEqual(
- expand_pkgspec_from_fnmatches(None, pkg, self.fake_cache), pkg)
+ expand_pkgspec_from_fnmatches(None, foo, self.fake_cache), foo)
def test_pkgname_wildcard_version_wildcard(self):
- pkg = ["apt*=1.0*"]
+ foo = ["apt*=1.0*"]
m_mock = Mock()
self.assertEqual(
- expand_pkgspec_from_fnmatches(m_mock, pkg, self.fake_cache),
+ expand_pkgspec_from_fnmatches(m_mock, foo, self.fake_cache),
['apt', 'apt-utils'])
def test_pkgname_expands(self):
- pkg = ["apt*"]
+ foo = ["apt*"]
m_mock = Mock()
self.assertEqual(
- expand_pkgspec_from_fnmatches(m_mock, pkg, self.fake_cache),
+ expand_pkgspec_from_fnmatches(m_mock, foo, self.fake_cache),
["apt", "apt-utils"])
diff --git a/test/units/modules/test_async_wrapper.py b/test/units/modules/test_async_wrapper.py
index dbaf6834..37b1fda3 100644
--- a/test/units/modules/test_async_wrapper.py
+++ b/test/units/modules/test_async_wrapper.py
@@ -7,21 +7,26 @@ __metaclass__ = type
import os
import json
import shutil
-import sys
import tempfile
+import pytest
+
+from units.compat.mock import patch, MagicMock
from ansible.modules import async_wrapper
+from pprint import pprint
+
class TestAsyncWrapper:
def test_run_module(self, monkeypatch):
def mock_get_interpreter(module_path):
- return [sys.executable]
+ return ['/usr/bin/python']
module_result = {'rc': 0}
module_lines = [
+ '#!/usr/bin/python',
'import sys',
'sys.stderr.write("stderr stuff")',
"print('%s')" % json.dumps(module_result)
diff --git a/test/units/modules/test_copy.py b/test/units/modules/test_copy.py
index beeef6d7..20c309b6 100644
--- a/test/units/modules/test_copy.py
+++ b/test/units/modules/test_copy.py
@@ -128,19 +128,16 @@ def test_split_pre_existing_dir_working_dir_exists(directory, expected, mocker):
#
# Info helpful for making new test cases:
#
-# base_mode = {
-# 'dir no perms': 0o040000,
-# 'file no perms': 0o100000,
-# 'dir all perms': 0o040000 | 0o777,
-# 'file all perms': 0o100000 | 0o777}
+# base_mode = {'dir no perms': 0o040000,
+# 'file no perms': 0o100000,
+# 'dir all perms': 0o400000 | 0o777,
+# 'file all perms': 0o100000, | 0o777}
#
-# perm_bits = {
-# 'x': 0b001,
+# perm_bits = {'x': 0b001,
# 'w': 0b010,
# 'r': 0b100}
#
-# role_shift = {
-# 'u': 6,
+# role_shift = {'u': 6,
# 'g': 3,
# 'o': 0}
@@ -175,10 +172,6 @@ DATA = ( # Going from no permissions to setting all for user, group, and/or oth
# chmod a-X statfile <== removes execute from statfile
(0o100777, u'a-X', 0o0666),
- # Verify X uses computed not original mode
- (0o100777, u'a=,u=rX', 0o0400),
- (0o040777, u'a=,u=rX', 0o0500),
-
# Multiple permissions
(0o040000, u'u=rw-x+X,g=r-x+X,o=r-x+X', 0o0755),
(0o100000, u'u=rw-x+X,g=r-x+X,o=r-x+X', 0o0644),
@@ -192,10 +185,6 @@ UMASK_DATA = (
INVALID_DATA = (
(0o040000, u'a=foo', "bad symbolic permission for mode: a=foo"),
(0o040000, u'f=rwx', "bad symbolic permission for mode: f=rwx"),
- (0o100777, u'of=r', "bad symbolic permission for mode: of=r"),
-
- (0o100777, u'ao=r', "bad symbolic permission for mode: ao=r"),
- (0o100777, u'oa=r', "bad symbolic permission for mode: oa=r"),
)
diff --git a/test/units/modules/test_hostname.py b/test/units/modules/test_hostname.py
index 1aa4a57a..9050fd04 100644
--- a/test/units/modules/test_hostname.py
+++ b/test/units/modules/test_hostname.py
@@ -6,6 +6,7 @@ import shutil
import tempfile
from units.compat.mock import patch, MagicMock, mock_open
+from ansible.module_utils import basic
from ansible.module_utils.common._utils import get_all_subclasses
from ansible.modules import hostname
from units.modules.utils import ModuleTestCase, set_module_args
@@ -43,9 +44,12 @@ class TestHostname(ModuleTestCase):
classname = "%sStrategy" % prefix
cls = getattr(hostname, classname, None)
- assert cls is not None
-
- self.assertTrue(issubclass(cls, hostname.BaseStrategy))
+ if cls is None:
+ self.assertFalse(
+ cls is None, "%s is None, should be a subclass" % classname
+ )
+ else:
+ self.assertTrue(issubclass(cls, hostname.BaseStrategy))
class TestRedhatStrategy(ModuleTestCase):
diff --git a/test/units/modules/test_iptables.py b/test/units/modules/test_iptables.py
index 2459cf77..265e770a 100644
--- a/test/units/modules/test_iptables.py
+++ b/test/units/modules/test_iptables.py
@@ -181,7 +181,7 @@ class TestIptables(ModuleTestCase):
iptables.main()
self.assertTrue(result.exception.args[0]['changed'])
- self.assertEqual(run_command.call_count, 1)
+ self.assertEqual(run_command.call_count, 2)
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t',
@@ -208,6 +208,7 @@ class TestIptables(ModuleTestCase):
commands_results = [
(1, '', ''), # check_rule_present
+ (0, '', ''), # check_chain_present
(0, '', ''),
]
@@ -217,7 +218,7 @@ class TestIptables(ModuleTestCase):
iptables.main()
self.assertTrue(result.exception.args[0]['changed'])
- self.assertEqual(run_command.call_count, 2)
+ self.assertEqual(run_command.call_count, 3)
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t',
@@ -231,7 +232,7 @@ class TestIptables(ModuleTestCase):
'-j',
'ACCEPT'
])
- self.assertEqual(run_command.call_args_list[1][0][0], [
+ self.assertEqual(run_command.call_args_list[2][0][0], [
'/sbin/iptables',
'-t',
'filter',
@@ -271,7 +272,7 @@ class TestIptables(ModuleTestCase):
iptables.main()
self.assertTrue(result.exception.args[0]['changed'])
- self.assertEqual(run_command.call_count, 1)
+ self.assertEqual(run_command.call_count, 2)
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t',
@@ -320,7 +321,7 @@ class TestIptables(ModuleTestCase):
iptables.main()
self.assertTrue(result.exception.args[0]['changed'])
- self.assertEqual(run_command.call_count, 2)
+ self.assertEqual(run_command.call_count, 3)
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t',
@@ -342,7 +343,7 @@ class TestIptables(ModuleTestCase):
'--to-ports',
'8600'
])
- self.assertEqual(run_command.call_args_list[1][0][0], [
+ self.assertEqual(run_command.call_args_list[2][0][0], [
'/sbin/iptables',
'-t',
'nat',
@@ -1018,8 +1019,10 @@ class TestIptables(ModuleTestCase):
})
commands_results = [
+ (1, '', ''), # check_rule_present
(1, '', ''), # check_chain_present
(0, '', ''), # create_chain
+ (0, '', ''), # append_rule
]
with patch.object(basic.AnsibleModule, 'run_command') as run_command:
@@ -1028,20 +1031,32 @@ class TestIptables(ModuleTestCase):
iptables.main()
self.assertTrue(result.exception.args[0]['changed'])
- self.assertEqual(run_command.call_count, 2)
+ self.assertEqual(run_command.call_count, 4)
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t', 'filter',
- '-L', 'FOOBAR',
+ '-C', 'FOOBAR',
])
self.assertEqual(run_command.call_args_list[1][0][0], [
'/sbin/iptables',
'-t', 'filter',
+ '-L', 'FOOBAR',
+ ])
+
+ self.assertEqual(run_command.call_args_list[2][0][0], [
+ '/sbin/iptables',
+ '-t', 'filter',
'-N', 'FOOBAR',
])
+ self.assertEqual(run_command.call_args_list[3][0][0], [
+ '/sbin/iptables',
+ '-t', 'filter',
+ '-A', 'FOOBAR',
+ ])
+
commands_results = [
(0, '', ''), # check_rule_present
]
@@ -1063,6 +1078,7 @@ class TestIptables(ModuleTestCase):
commands_results = [
(1, '', ''), # check_rule_present
+ (1, '', ''), # check_chain_present
]
with patch.object(basic.AnsibleModule, 'run_command') as run_command:
@@ -1071,11 +1087,17 @@ class TestIptables(ModuleTestCase):
iptables.main()
self.assertTrue(result.exception.args[0]['changed'])
- self.assertEqual(run_command.call_count, 1)
+ self.assertEqual(run_command.call_count, 2)
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t', 'filter',
+ '-C', 'FOOBAR',
+ ])
+
+ self.assertEqual(run_command.call_args_list[1][0][0], [
+ '/sbin/iptables',
+ '-t', 'filter',
'-L', 'FOOBAR',
])
diff --git a/test/units/modules/test_known_hosts.py b/test/units/modules/test_known_hosts.py
index 667f3e50..123dd75f 100644
--- a/test/units/modules/test_known_hosts.py
+++ b/test/units/modules/test_known_hosts.py
@@ -6,7 +6,7 @@ import tempfile
from ansible.module_utils import basic
from units.compat import unittest
-from ansible.module_utils.common.text.converters import to_bytes
+from ansible.module_utils._text import to_bytes
from ansible.module_utils.basic import AnsibleModule
from ansible.modules.known_hosts import compute_diff, sanity_check
diff --git a/test/units/modules/test_unarchive.py b/test/units/modules/test_unarchive.py
index 935231ba..3e7a58c9 100644
--- a/test/units/modules/test_unarchive.py
+++ b/test/units/modules/test_unarchive.py
@@ -8,6 +8,20 @@ import pytest
from ansible.modules.unarchive import ZipArchive, TgzArchive
+class AnsibleModuleExit(Exception):
+ def __init__(self, *args, **kwargs):
+ self.args = args
+ self.kwargs = kwargs
+
+
+class ExitJson(AnsibleModuleExit):
+ pass
+
+
+class FailJson(AnsibleModuleExit):
+ pass
+
+
@pytest.fixture
def fake_ansible_module():
return FakeAnsibleModule()
@@ -18,6 +32,12 @@ class FakeAnsibleModule:
self.params = {}
self.tmpdir = None
+ def exit_json(self, *args, **kwargs):
+ raise ExitJson(*args, **kwargs)
+
+ def fail_json(self, *args, **kwargs):
+ raise FailJson(*args, **kwargs)
+
class TestCaseZipArchive:
@pytest.mark.parametrize(
diff --git a/test/units/modules/utils.py b/test/units/modules/utils.py
index b56229e8..6d169e36 100644
--- a/test/units/modules/utils.py
+++ b/test/units/modules/utils.py
@@ -6,12 +6,14 @@ import json
from units.compat import unittest
from units.compat.mock import patch
from ansible.module_utils import basic
-from ansible.module_utils.common.text.converters import to_bytes
+from ansible.module_utils._text import to_bytes
def set_module_args(args):
- args['_ansible_remote_tmp'] = '/tmp'
- args['_ansible_keep_remote_files'] = False
+ if '_ansible_remote_tmp' not in args:
+ args['_ansible_remote_tmp'] = '/tmp'
+ if '_ansible_keep_remote_files' not in args:
+ args['_ansible_keep_remote_files'] = False
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args)
@@ -26,6 +28,8 @@ class AnsibleFailJson(Exception):
def exit_json(*args, **kwargs):
+ if 'changed' not in kwargs:
+ kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
diff --git a/test/units/parsing/test_ajson.py b/test/units/parsing/test_ajson.py
index bb7bf1a7..1b9a76b4 100644
--- a/test/units/parsing/test_ajson.py
+++ b/test/units/parsing/test_ajson.py
@@ -109,11 +109,7 @@ class TestAnsibleJSONEncoder:
def __len__(self):
return len(self.__dict__)
- mapping = M(request.param)
-
- assert isinstance(len(mapping), int) # ensure coverage of __len__
-
- return mapping
+ return M(request.param)
@pytest.fixture
def ansible_json_encoder(self):
diff --git a/test/units/parsing/test_dataloader.py b/test/units/parsing/test_dataloader.py
index a7f8b1d2..9ec49a8d 100644
--- a/test/units/parsing/test_dataloader.py
+++ b/test/units/parsing/test_dataloader.py
@@ -25,7 +25,8 @@ from units.compat import unittest
from unittest.mock import patch, mock_open
from ansible.errors import AnsibleParserError, yaml_strings, AnsibleFileNotFound
from ansible.parsing.vault import AnsibleVaultError
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six import PY3
from units.mock.vault_helper import TextVaultSecret
from ansible.parsing.dataloader import DataLoader
@@ -91,11 +92,11 @@ class TestDataLoader(unittest.TestCase):
- { role: 'testrole' }
testrole/tasks/main.yml:
- - include_tasks: "include1.yml"
+ - include: "include1.yml"
static: no
testrole/tasks/include1.yml:
- - include_tasks: include2.yml
+ - include: include2.yml
static: no
testrole/tasks/include2.yml:
@@ -228,7 +229,11 @@ class TestDataLoaderWithVault(unittest.TestCase):
3135306561356164310a343937653834643433343734653137383339323330626437313562306630
3035
"""
+ if PY3:
+ builtins_name = 'builtins'
+ else:
+ builtins_name = '__builtin__'
- with patch('builtins.open', mock_open(read_data=vaulted_data.encode('utf-8'))):
+ with patch(builtins_name + '.open', mock_open(read_data=vaulted_data.encode('utf-8'))):
output = self._loader.load_from_file('dummy_vault.txt')
self.assertEqual(output, dict(foo='bar'))
diff --git a/test/units/parsing/test_mod_args.py b/test/units/parsing/test_mod_args.py
index aeb74ad5..5d3f5d25 100644
--- a/test/units/parsing/test_mod_args.py
+++ b/test/units/parsing/test_mod_args.py
@@ -6,10 +6,10 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
+import re
from ansible.errors import AnsibleParserError
from ansible.parsing.mod_args import ModuleArgsParser
-from ansible.plugins.loader import init_plugin_loader
from ansible.utils.sentinel import Sentinel
@@ -119,19 +119,19 @@ class TestModArgsDwim:
assert err.value.args[0] == msg
def test_multiple_actions_ping_shell(self):
- init_plugin_loader()
args_dict = {'ping': 'data=hi', 'shell': 'echo hi'}
m = ModuleArgsParser(args_dict)
with pytest.raises(AnsibleParserError) as err:
m.parse()
- assert err.value.args[0] == f'conflicting action statements: {", ".join(args_dict)}'
+ assert err.value.args[0].startswith("conflicting action statements: ")
+ actions = set(re.search(r'(\w+), (\w+)', err.value.args[0]).groups())
+ assert actions == set(['ping', 'shell'])
def test_bogus_action(self):
- init_plugin_loader()
args_dict = {'bogusaction': {}}
m = ModuleArgsParser(args_dict)
with pytest.raises(AnsibleParserError) as err:
m.parse()
- assert err.value.args[0].startswith(f"couldn't resolve module/action '{next(iter(args_dict))}'")
+ assert err.value.args[0].startswith("couldn't resolve module/action 'bogusaction'")
diff --git a/test/units/parsing/test_splitter.py b/test/units/parsing/test_splitter.py
index 893f0473..a37de0f9 100644
--- a/test/units/parsing/test_splitter.py
+++ b/test/units/parsing/test_splitter.py
@@ -21,17 +21,10 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.parsing.splitter import split_args, parse_kv
-from ansible.errors import AnsibleParserError
import pytest
SPLIT_DATA = (
- (None,
- [],
- {}),
- (u'',
- [],
- {}),
(u'a',
[u'a'],
{u'_raw_params': u'a'}),
@@ -53,18 +46,6 @@ SPLIT_DATA = (
(u'a="echo \\"hello world\\"" b=bar',
[u'a="echo \\"hello world\\""', u'b=bar'],
{u'a': u'echo "hello world"', u'b': u'bar'}),
- (u'a="nest\'ed"',
- [u'a="nest\'ed"'],
- {u'a': u'nest\'ed'}),
- (u' ',
- [u' '],
- {u'_raw_params': u' '}),
- (u'\\ ',
- [u' '],
- {u'_raw_params': u' '}),
- (u'a\\=escaped',
- [u'a\\=escaped'],
- {u'_raw_params': u'a=escaped'}),
(u'a="multi\nline"',
[u'a="multi\nline"'],
{u'a': u'multi\nline'}),
@@ -80,27 +61,12 @@ SPLIT_DATA = (
(u'a="multiline\nmessage1\\\n" b="multiline\nmessage2\\\n"',
[u'a="multiline\nmessage1\\\n"', u'b="multiline\nmessage2\\\n"'],
{u'a': 'multiline\nmessage1\\\n', u'b': u'multiline\nmessage2\\\n'}),
- (u'line \\\ncontinuation',
- [u'line', u'continuation'],
- {u'_raw_params': u'line continuation'}),
- (u'not jinja}}',
- [u'not', u'jinja}}'],
- {u'_raw_params': u'not jinja}}'}),
- (u'a={{multiline\njinja}}',
- [u'a={{multiline\njinja}}'],
- {u'a': u'{{multiline\njinja}}'}),
(u'a={{jinja}}',
[u'a={{jinja}}'],
{u'a': u'{{jinja}}'}),
(u'a={{ jinja }}',
[u'a={{ jinja }}'],
{u'a': u'{{ jinja }}'}),
- (u'a={% jinja %}',
- [u'a={% jinja %}'],
- {u'a': u'{% jinja %}'}),
- (u'a={# jinja #}',
- [u'a={# jinja #}'],
- {u'a': u'{# jinja #}'}),
(u'a="{{jinja}}"',
[u'a="{{jinja}}"'],
{u'a': u'{{jinja}}'}),
@@ -128,50 +94,17 @@ SPLIT_DATA = (
(u'One\n Two\n Three\n',
[u'One\n ', u'Two\n ', u'Three\n'],
{u'_raw_params': u'One\n Two\n Three\n'}),
- (u'\nOne\n Two\n Three\n',
- [u'\n', u'One\n ', u'Two\n ', u'Three\n'],
- {u'_raw_params': u'\nOne\n Two\n Three\n'}),
)
-PARSE_KV_CHECK_RAW = (
- (u'raw=yes', {u'_raw_params': u'raw=yes'}),
- (u'creates=something', {u'creates': u'something'}),
-)
-
-PARSER_ERROR = (
- '"',
- "'",
- '{{',
- '{%',
- '{#',
-)
+SPLIT_ARGS = ((test[0], test[1]) for test in SPLIT_DATA)
+PARSE_KV = ((test[0], test[2]) for test in SPLIT_DATA)
-SPLIT_ARGS = tuple((test[0], test[1]) for test in SPLIT_DATA)
-PARSE_KV = tuple((test[0], test[2]) for test in SPLIT_DATA)
-
-@pytest.mark.parametrize("args, expected", SPLIT_ARGS, ids=[str(arg[0]) for arg in SPLIT_ARGS])
+@pytest.mark.parametrize("args, expected", SPLIT_ARGS)
def test_split_args(args, expected):
assert split_args(args) == expected
-@pytest.mark.parametrize("args, expected", PARSE_KV, ids=[str(arg[0]) for arg in PARSE_KV])
+@pytest.mark.parametrize("args, expected", PARSE_KV)
def test_parse_kv(args, expected):
assert parse_kv(args) == expected
-
-
-@pytest.mark.parametrize("args, expected", PARSE_KV_CHECK_RAW, ids=[str(arg[0]) for arg in PARSE_KV_CHECK_RAW])
-def test_parse_kv_check_raw(args, expected):
- assert parse_kv(args, check_raw=True) == expected
-
-
-@pytest.mark.parametrize("args", PARSER_ERROR)
-def test_split_args_error(args):
- with pytest.raises(AnsibleParserError):
- split_args(args)
-
-
-@pytest.mark.parametrize("args", PARSER_ERROR)
-def test_parse_kv_error(args):
- with pytest.raises(AnsibleParserError):
- parse_kv(args)
diff --git a/test/units/parsing/vault/test_vault.py b/test/units/parsing/vault/test_vault.py
index f94171a2..7afd3560 100644
--- a/test/units/parsing/vault/test_vault.py
+++ b/test/units/parsing/vault/test_vault.py
@@ -21,6 +21,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import binascii
import io
import os
import tempfile
@@ -33,7 +34,7 @@ from unittest.mock import patch, MagicMock
from ansible import errors
from ansible.module_utils import six
-from ansible.module_utils.common.text.converters import to_bytes, to_text
+from ansible.module_utils._text import to_bytes, to_text
from ansible.parsing import vault
from units.mock.loader import DictDataLoader
@@ -605,6 +606,9 @@ class TestVaultLib(unittest.TestCase):
('test_id', text_secret)]
self.v = vault.VaultLib(self.vault_secrets)
+ def _vault_secrets(self, vault_id, secret):
+ return [(vault_id, secret)]
+
def _vault_secrets_from_password(self, vault_id, password):
return [(vault_id, TextVaultSecret(password))]
@@ -775,6 +779,43 @@ class TestVaultLib(unittest.TestCase):
b_plaintext = self.v.decrypt(b_vaulttext)
self.assertEqual(b_plaintext, b_orig_plaintext, msg="decryption failed")
+ # FIXME This test isn't working quite yet.
+ @pytest.mark.skip(reason='This test is not ready yet')
+ def test_encrypt_decrypt_aes256_bad_hmac(self):
+
+ self.v.cipher_name = 'AES256'
+ # plaintext = "Setec Astronomy"
+ enc_data = '''$ANSIBLE_VAULT;1.1;AES256
+33363965326261303234626463623963633531343539616138316433353830356566396130353436
+3562643163366231316662386565383735653432386435610a306664636137376132643732393835
+63383038383730306639353234326630666539346233376330303938323639306661313032396437
+6233623062366136310a633866373936313238333730653739323461656662303864663666653563
+3138'''
+ b_data = to_bytes(enc_data, errors='strict', encoding='utf-8')
+ b_data = self.v._split_header(b_data)
+ foo = binascii.unhexlify(b_data)
+ lines = foo.splitlines()
+ # line 0 is salt, line 1 is hmac, line 2+ is ciphertext
+ b_salt = lines[0]
+ b_hmac = lines[1]
+ b_ciphertext_data = b'\n'.join(lines[2:])
+
+ b_ciphertext = binascii.unhexlify(b_ciphertext_data)
+ # b_orig_ciphertext = b_ciphertext[:]
+
+ # now muck with the text
+ # b_munged_ciphertext = b_ciphertext[:10] + b'\x00' + b_ciphertext[11:]
+ # b_munged_ciphertext = b_ciphertext
+ # assert b_orig_ciphertext != b_munged_ciphertext
+
+ b_ciphertext_data = binascii.hexlify(b_ciphertext)
+ b_payload = b'\n'.join([b_salt, b_hmac, b_ciphertext_data])
+ # reformat
+ b_invalid_ciphertext = self.v._format_output(b_payload)
+
+ # assert we throw an error
+ self.v.decrypt(b_invalid_ciphertext)
+
def test_decrypt_and_get_vault_id(self):
b_expected_plaintext = to_bytes('foo bar\n')
vaulttext = '''$ANSIBLE_VAULT;1.2;AES256;ansible_devel
diff --git a/test/units/parsing/vault/test_vault_editor.py b/test/units/parsing/vault/test_vault_editor.py
index 28561c6a..77509f08 100644
--- a/test/units/parsing/vault/test_vault_editor.py
+++ b/test/units/parsing/vault/test_vault_editor.py
@@ -33,7 +33,8 @@ from ansible import errors
from ansible.parsing import vault
from ansible.parsing.vault import VaultLib, VaultEditor, match_encrypt_secret
-from ansible.module_utils.common.text.converters import to_bytes, to_text
+from ansible.module_utils.six import PY3
+from ansible.module_utils._text import to_bytes, to_text
from units.mock.vault_helper import TextVaultSecret
@@ -87,10 +88,12 @@ class TestVaultEditor(unittest.TestCase):
suffix = '_ansible_unit_test_%s_' % (self.__class__.__name__)
return tempfile.mkdtemp(suffix=suffix)
- def _create_file(self, test_dir, name, content, symlink=False):
+ def _create_file(self, test_dir, name, content=None, symlink=False):
file_path = os.path.join(test_dir, name)
- with open(file_path, 'wb') as opened_file:
+ opened_file = open(file_path, 'wb')
+ if content:
opened_file.write(content)
+ opened_file.close()
return file_path
def _vault_editor(self, vault_secrets=None):
@@ -115,8 +118,11 @@ class TestVaultEditor(unittest.TestCase):
def test_stdin_binary(self):
stdin_data = '\0'
- fake_stream = StringIO(stdin_data)
- fake_stream.buffer = BytesIO(to_bytes(stdin_data))
+ if PY3:
+ fake_stream = StringIO(stdin_data)
+ fake_stream.buffer = BytesIO(to_bytes(stdin_data))
+ else:
+ fake_stream = BytesIO(to_bytes(stdin_data))
with patch('sys.stdin', fake_stream):
ve = self._vault_editor()
@@ -161,15 +167,17 @@ class TestVaultEditor(unittest.TestCase):
self.assertNotEqual(src_file_contents, b_ciphertext,
'b_ciphertext should be encrypted and not equal to src_contents')
- def _faux_editor(self, editor_args, new_src_contents):
+ def _faux_editor(self, editor_args, new_src_contents=None):
if editor_args[0] == 'shred':
return
tmp_path = editor_args[-1]
# simulate the tmp file being editted
- with open(tmp_path, 'wb') as tmp_file:
+ tmp_file = open(tmp_path, 'wb')
+ if new_src_contents:
tmp_file.write(new_src_contents)
+ tmp_file.close()
def _faux_command(self, tmp_path):
pass
@@ -190,13 +198,13 @@ class TestVaultEditor(unittest.TestCase):
ve._edit_file_helper(src_file_path, self.vault_secret, existing_data=src_file_contents)
- with open(src_file_path, 'rb') as new_target_file:
- new_target_file_contents = new_target_file.read()
- self.assertEqual(src_file_contents, new_target_file_contents)
+ new_target_file = open(src_file_path, 'rb')
+ new_target_file_contents = new_target_file.read()
+ self.assertEqual(src_file_contents, new_target_file_contents)
def _assert_file_is_encrypted(self, vault_editor, src_file_path, src_contents):
- with open(src_file_path, 'rb') as new_src_file:
- new_src_file_contents = new_src_file.read()
+ new_src_file = open(src_file_path, 'rb')
+ new_src_file_contents = new_src_file.read()
# TODO: assert that it is encrypted
self.assertTrue(vault.is_encrypted(new_src_file_contents))
@@ -331,8 +339,8 @@ class TestVaultEditor(unittest.TestCase):
ve.encrypt_file(src_file_path, self.vault_secret)
ve.edit_file(src_file_path)
- with open(src_file_path, 'rb') as new_src_file:
- new_src_file_contents = new_src_file.read()
+ new_src_file = open(src_file_path, 'rb')
+ new_src_file_contents = new_src_file.read()
self.assertTrue(b'$ANSIBLE_VAULT;1.1;AES256' in new_src_file_contents)
@@ -359,8 +367,8 @@ class TestVaultEditor(unittest.TestCase):
vault_id='vault_secrets')
ve.edit_file(src_file_path)
- with open(src_file_path, 'rb') as new_src_file:
- new_src_file_contents = new_src_file.read()
+ new_src_file = open(src_file_path, 'rb')
+ new_src_file_contents = new_src_file.read()
self.assertTrue(b'$ANSIBLE_VAULT;1.2;AES256;vault_secrets' in new_src_file_contents)
@@ -391,8 +399,8 @@ class TestVaultEditor(unittest.TestCase):
ve.edit_file(src_file_link_path)
- with open(src_file_path, 'rb') as new_src_file:
- new_src_file_contents = new_src_file.read()
+ new_src_file = open(src_file_path, 'rb')
+ new_src_file_contents = new_src_file.read()
src_file_plaintext = ve.vault.decrypt(new_src_file_contents)
@@ -410,6 +418,13 @@ class TestVaultEditor(unittest.TestCase):
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
+ new_src_contents = to_bytes("The info is different now.")
+
+ def faux_editor(editor_args):
+ self._faux_editor(editor_args, new_src_contents)
+
+ mock_sp_call.side_effect = faux_editor
+
ve = self._vault_editor()
self.assertRaisesRegex(errors.AnsibleError,
'input is not vault encrypted data',
@@ -463,14 +478,20 @@ class TestVaultEditor(unittest.TestCase):
ve = self._vault_editor(self._secrets("ansible"))
# make sure the password functions for the cipher
- ve.decrypt_file(v11_file.name)
+ error_hit = False
+ try:
+ ve.decrypt_file(v11_file.name)
+ except errors.AnsibleError:
+ error_hit = True
# verify decrypted content
- with open(v11_file.name, "rb") as f:
- fdata = to_text(f.read())
+ f = open(v11_file.name, "rb")
+ fdata = to_text(f.read())
+ f.close()
os.unlink(v11_file.name)
+ assert error_hit is False, "error decrypting 1.1 file"
assert fdata.strip() == "foo", "incorrect decryption of 1.1 file: %s" % fdata.strip()
def test_real_path_dash(self):
@@ -480,9 +501,21 @@ class TestVaultEditor(unittest.TestCase):
res = ve._real_path(filename)
self.assertEqual(res, '-')
- def test_real_path_not_dash(self):
+ def test_real_path_dev_null(self):
filename = '/dev/null'
ve = self._vault_editor()
res = ve._real_path(filename)
- self.assertNotEqual(res, '-')
+ self.assertEqual(res, '/dev/null')
+
+ def test_real_path_symlink(self):
+ self._test_dir = os.path.realpath(self._create_test_dir())
+ file_path = self._create_file(self._test_dir, 'test_file', content=b'this is a test file')
+ file_link_path = os.path.join(self._test_dir, 'a_link_to_test_file')
+
+ os.symlink(file_path, file_link_path)
+
+ ve = self._vault_editor()
+
+ res = ve._real_path(file_link_path)
+ self.assertEqual(res, file_path)
diff --git a/test/units/parsing/yaml/test_dumper.py b/test/units/parsing/yaml/test_dumper.py
index 8af1eeed..cbf5b456 100644
--- a/test/units/parsing/yaml/test_dumper.py
+++ b/test/units/parsing/yaml/test_dumper.py
@@ -19,6 +19,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import io
+import yaml
from jinja2.exceptions import UndefinedError
@@ -26,6 +27,7 @@ from units.compat import unittest
from ansible.parsing import vault
from ansible.parsing.yaml import dumper, objects
from ansible.parsing.yaml.loader import AnsibleLoader
+from ansible.module_utils.six import PY2
from ansible.template import AnsibleUndefined
from units.mock.yaml_helper import YamlTestUtils
@@ -74,6 +76,20 @@ class TestAnsibleDumper(unittest.TestCase, YamlTestUtils):
data_from_yaml = loader.get_single_data()
result = b_text
+ if PY2:
+ # https://pyyaml.org/wiki/PyYAMLDocumentation#string-conversion-python-2-only
+ # pyyaml on Python 2 can return either unicode or bytes when given byte strings.
+ # We normalize that to always return unicode on Python2 as that's right most of the
+ # time. However, this means byte strings can round trip through yaml on Python3 but
+ # not on Python2. To make this code work the same on Python2 and Python3 (we want
+ # the Python3 behaviour) we need to change the methods in Ansible to:
+ # (1) Let byte strings pass through yaml without being converted on Python2
+ # (2) Convert byte strings to text strings before being given to pyyaml (Without this,
+ # strings would end up as byte strings most of the time which would mostly be wrong)
+ # In practice, we mostly read bytes in from files and then pass that to pyyaml, for which
+ # the present behavior is correct.
+ # This is a workaround for the current behavior.
+ result = u'tr\xe9ma'
self.assertEqual(result, data_from_yaml)
@@ -89,7 +105,10 @@ class TestAnsibleDumper(unittest.TestCase, YamlTestUtils):
self.assertEqual(u_text, data_from_yaml)
def test_vars_with_sources(self):
- self._dump_string(VarsWithSources(), dumper=self.dumper)
+ try:
+ self._dump_string(VarsWithSources(), dumper=self.dumper)
+ except yaml.representer.RepresenterError:
+ self.fail("Dump VarsWithSources raised RepresenterError unexpectedly!")
def test_undefined(self):
undefined_object = AnsibleUndefined()
diff --git a/test/units/parsing/yaml/test_objects.py b/test/units/parsing/yaml/test_objects.py
index f899915d..f64b708f 100644
--- a/test/units/parsing/yaml/test_objects.py
+++ b/test/units/parsing/yaml/test_objects.py
@@ -24,7 +24,7 @@ from units.compat import unittest
from ansible.errors import AnsibleError
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.parsing import vault
from ansible.parsing.yaml.loader import AnsibleLoader
@@ -105,6 +105,11 @@ class TestAnsibleVaultEncryptedUnicode(unittest.TestCase, YamlTestUtils):
id_secret = vault.match_encrypt_secret(self.good_vault_secrets)
return objects.AnsibleVaultEncryptedUnicode.from_plaintext(seq, vault=self.vault, secret=id_secret[1])
+ def _from_ciphertext(self, ciphertext):
+ avu = objects.AnsibleVaultEncryptedUnicode(ciphertext)
+ avu.vault = self.vault
+ return avu
+
def test_empty_init(self):
self.assertRaises(TypeError, objects.AnsibleVaultEncryptedUnicode)
diff --git a/test/units/playbook/role/test_include_role.py b/test/units/playbook/role/test_include_role.py
index aa97da15..5e7625ba 100644
--- a/test/units/playbook/role/test_include_role.py
+++ b/test/units/playbook/role/test_include_role.py
@@ -108,6 +108,8 @@ class TestIncludeRole(unittest.TestCase):
# skip meta: role_complete
continue
role = task._role
+ if not role:
+ continue
yield (role.get_name(),
self.var_manager.get_vars(play=play, task=task))
@@ -199,7 +201,7 @@ class TestIncludeRole(unittest.TestCase):
self.assertEqual(task_vars.get('l3_variable'), 'l3-main')
self.assertEqual(task_vars.get('test_variable'), 'l3-main')
else:
- self.fail() # pragma: nocover
+ self.fail()
self.assertFalse(expected_roles)
@patch('ansible.playbook.role.definition.unfrackpath',
@@ -245,5 +247,5 @@ class TestIncludeRole(unittest.TestCase):
self.assertEqual(task_vars.get('l3_variable'), 'l3-alt')
self.assertEqual(task_vars.get('test_variable'), 'l3-alt')
else:
- self.fail() # pragma: nocover
+ self.fail()
self.assertFalse(expected_roles)
diff --git a/test/units/playbook/role/test_role.py b/test/units/playbook/role/test_role.py
index 9d6b0edc..5d47631f 100644
--- a/test/units/playbook/role/test_role.py
+++ b/test/units/playbook/role/test_role.py
@@ -21,12 +21,10 @@ __metaclass__ = type
from collections.abc import Container
-import pytest
-
from units.compat import unittest
from unittest.mock import patch, MagicMock
-from ansible.errors import AnsibleParserError
+from ansible.errors import AnsibleError, AnsibleParserError
from ansible.playbook.block import Block
from units.mock.loader import DictDataLoader
@@ -44,9 +42,12 @@ class TestHashParams(unittest.TestCase):
self._assert_set(res)
self._assert_hashable(res)
- @staticmethod
- def _assert_hashable(res):
- hash(res)
+ def _assert_hashable(self, res):
+ a_dict = {}
+ try:
+ a_dict[res] = res
+ except TypeError as e:
+ self.fail('%s is not hashable: %s' % (res, e))
def _assert_set(self, res):
self.assertIsInstance(res, frozenset)
@@ -86,28 +87,36 @@ class TestHashParams(unittest.TestCase):
def test_generator(self):
def my_generator():
- yield
+ for i in ['a', 1, None, {}]:
+ yield i
params = my_generator()
res = hash_params(params)
self._assert_hashable(res)
- assert list(params)
def test_container_but_not_iterable(self):
# This is a Container that is not iterable, which is unlikely but...
class MyContainer(Container):
- def __init__(self, _some_thing):
- pass
+ def __init__(self, some_thing):
+ self.data = []
+ self.data.append(some_thing)
def __contains__(self, item):
- """Implementation omitted, since it will never be called."""
+ return item in self.data
+
+ def __hash__(self):
+ return hash(self.data)
+
+ def __len__(self):
+ return len(self.data)
- params = MyContainer('foo bar')
+ def __call__(self):
+ return False
- with pytest.raises(TypeError) as ex:
- hash_params(params)
+ foo = MyContainer('foo bar')
+ params = foo
- assert ex.value.args == ("'MyContainer' object is not iterable",)
+ self.assertRaises(TypeError, hash_params, params)
def test_param_dict_dupe_values(self):
params1 = {'foo': False}
@@ -142,18 +151,18 @@ class TestHashParams(unittest.TestCase):
self.assertNotEqual(hash(res1), hash(res2))
self.assertNotEqual(res1, res2)
- params_dict = {}
- params_dict[res1] = 'params1'
- params_dict[res2] = 'params2'
+ foo = {}
+ foo[res1] = 'params1'
+ foo[res2] = 'params2'
- self.assertEqual(len(params_dict), 2)
+ self.assertEqual(len(foo), 2)
- del params_dict[res2]
- self.assertEqual(len(params_dict), 1)
+ del foo[res2]
+ self.assertEqual(len(foo), 1)
- for key in params_dict:
- self.assertTrue(key in params_dict)
- self.assertIn(key, params_dict)
+ for key in foo:
+ self.assertTrue(key in foo)
+ self.assertIn(key, foo)
class TestRole(unittest.TestCase):
@@ -168,7 +177,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.role_cache = {}
+ mock_play.ROLE_CACHE = {}
i = RoleInclude.load('foo_tasks', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
@@ -190,7 +199,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.role_cache = {}
+ mock_play.ROLE_CACHE = {}
i = RoleInclude.load('foo_tasks', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play, from_files=dict(tasks='custom_main'))
@@ -208,7 +217,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.role_cache = {}
+ mock_play.ROLE_CACHE = {}
i = RoleInclude.load('foo_handlers', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
@@ -229,7 +238,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.role_cache = {}
+ mock_play.ROLE_CACHE = {}
i = RoleInclude.load('foo_vars', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
@@ -250,7 +259,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.role_cache = {}
+ mock_play.ROLE_CACHE = {}
i = RoleInclude.load('foo_vars', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
@@ -271,7 +280,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.role_cache = {}
+ mock_play.ROLE_CACHE = {}
i = RoleInclude.load('foo_vars', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
@@ -294,7 +303,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.role_cache = {}
+ mock_play.ROLE_CACHE = {}
i = RoleInclude.load('foo_vars', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
@@ -314,7 +323,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.role_cache = {}
+ mock_play.ROLE_CACHE = {}
i = RoleInclude.load('foo_vars', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
@@ -361,7 +370,7 @@ class TestRole(unittest.TestCase):
mock_play = MagicMock()
mock_play.collections = None
- mock_play.role_cache = {}
+ mock_play.ROLE_CACHE = {}
i = RoleInclude.load('foo_metadata', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
@@ -406,7 +415,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
- mock_play.role_cache = {}
+ mock_play.ROLE_CACHE = {}
i = RoleInclude.load(dict(role='foo_complex'), play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
diff --git a/test/units/playbook/test_base.py b/test/units/playbook/test_base.py
index bedd96a8..d5810e73 100644
--- a/test/units/playbook/test_base.py
+++ b/test/units/playbook/test_base.py
@@ -21,12 +21,13 @@ __metaclass__ = type
from units.compat import unittest
-from ansible.errors import AnsibleParserError, AnsibleAssertionError
+from ansible.errors import AnsibleParserError
from ansible.module_utils.six import string_types
from ansible.playbook.attribute import FieldAttribute, NonInheritableFieldAttribute
from ansible.template import Templar
from ansible.playbook import base
-from ansible.utils.unsafe_proxy import AnsibleUnsafeText
+from ansible.utils.unsafe_proxy import AnsibleUnsafeBytes, AnsibleUnsafeText
+from ansible.utils.sentinel import Sentinel
from units.mock.loader import DictDataLoader
@@ -330,6 +331,12 @@ class ExampleSubClass(base.Base):
def __init__(self):
super(ExampleSubClass, self).__init__()
+ def get_dep_chain(self):
+ if self._parent:
+ return self._parent.get_dep_chain()
+ else:
+ return None
+
class BaseSubClass(base.Base):
name = FieldAttribute(isa='string', default='', always_post_validate=True)
@@ -581,11 +588,10 @@ class TestBaseSubClass(TestBase):
bsc.post_validate, templar)
def test_attr_unknown(self):
- self.assertRaises(
- AnsibleAssertionError,
- self._base_validate,
- {'test_attr_unknown_isa': True}
- )
+ a_list = ['some string']
+ ds = {'test_attr_unknown_isa': a_list}
+ bsc = self._base_validate(ds)
+ self.assertEqual(bsc.test_attr_unknown_isa, a_list)
def test_attr_method(self):
ds = {'test_attr_method': 'value from the ds'}
diff --git a/test/units/playbook/test_collectionsearch.py b/test/units/playbook/test_collectionsearch.py
index d16541b7..be40d85e 100644
--- a/test/units/playbook/test_collectionsearch.py
+++ b/test/units/playbook/test_collectionsearch.py
@@ -22,6 +22,7 @@ from ansible.errors import AnsibleParserError
from ansible.playbook.play import Play
from ansible.playbook.task import Task
from ansible.playbook.block import Block
+from ansible.playbook.collectionsearch import CollectionSearch
import pytest
diff --git a/test/units/playbook/test_helpers.py b/test/units/playbook/test_helpers.py
index 23385c00..a89730ca 100644
--- a/test/units/playbook/test_helpers.py
+++ b/test/units/playbook/test_helpers.py
@@ -52,6 +52,10 @@ class MixinForMocks(object):
self.mock_inventory = MagicMock(name='MockInventory')
self.mock_inventory._hosts_cache = dict()
+ def _get_host(host_name):
+ return None
+
+ self.mock_inventory.get_host.side_effect = _get_host
# TODO: can we use a real VariableManager?
self.mock_variable_manager = MagicMock(name='MockVariableManager')
self.mock_variable_manager.get_vars.return_value = dict()
@@ -65,11 +69,11 @@ class MixinForMocks(object):
self._test_data_path = os.path.dirname(__file__)
self.fake_include_loader = DictDataLoader({"/dev/null/includes/test_include.yml": """
- - include_tasks: other_test_include.yml
+ - include: other_test_include.yml
- shell: echo 'hello world'
""",
"/dev/null/includes/static_test_include.yml": """
- - include_tasks: other_test_include.yml
+ - include: other_test_include.yml
- shell: echo 'hello static world'
""",
"/dev/null/includes/other_test_include.yml": """
@@ -82,6 +86,10 @@ class TestLoadListOfTasks(unittest.TestCase, MixinForMocks):
def setUp(self):
self._setup()
+ def _assert_is_task_list(self, results):
+ for result in results:
+ self.assertIsInstance(result, Task)
+
def _assert_is_task_list_or_blocks(self, results):
self.assertIsInstance(results, list)
for result in results:
@@ -160,57 +168,57 @@ class TestLoadListOfTasks(unittest.TestCase, MixinForMocks):
ds, play=self.mock_play, use_handlers=True,
variable_manager=self.mock_variable_manager, loader=self.fake_loader)
- def test_one_bogus_include_tasks(self):
- ds = [{'include_tasks': 'somefile.yml'}]
+ def test_one_bogus_include(self):
+ ds = [{'include': 'somefile.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_loader)
self.assertIsInstance(res, list)
- self.assertEqual(len(res), 1)
- self.assertIsInstance(res[0], TaskInclude)
+ self.assertEqual(len(res), 0)
- def test_one_bogus_include_tasks_use_handlers(self):
- ds = [{'include_tasks': 'somefile.yml'}]
+ def test_one_bogus_include_use_handlers(self):
+ ds = [{'include': 'somefile.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play, use_handlers=True,
variable_manager=self.mock_variable_manager, loader=self.fake_loader)
self.assertIsInstance(res, list)
- self.assertEqual(len(res), 1)
- self.assertIsInstance(res[0], TaskInclude)
+ self.assertEqual(len(res), 0)
- def test_one_bogus_import_tasks(self):
+ def test_one_bogus_include_static(self):
ds = [{'import_tasks': 'somefile.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_loader)
self.assertIsInstance(res, list)
self.assertEqual(len(res), 0)
- def test_one_include_tasks(self):
- ds = [{'include_tasks': '/dev/null/includes/other_test_include.yml'}]
+ def test_one_include(self):
+ ds = [{'include': '/dev/null/includes/other_test_include.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
self.assertEqual(len(res), 1)
self._assert_is_task_list_or_blocks(res)
- def test_one_parent_include_tasks(self):
- ds = [{'include_tasks': '/dev/null/includes/test_include.yml'}]
+ def test_one_parent_include(self):
+ ds = [{'include': '/dev/null/includes/test_include.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
self._assert_is_task_list_or_blocks(res)
- self.assertIsInstance(res[0], TaskInclude)
- self.assertIsNone(res[0]._parent)
+ self.assertIsInstance(res[0], Block)
+ self.assertIsInstance(res[0]._parent, TaskInclude)
- def test_one_include_tasks_tags(self):
- ds = [{'include_tasks': '/dev/null/includes/other_test_include.yml',
+ # TODO/FIXME: do this non deprecated way
+ def test_one_include_tags(self):
+ ds = [{'include': '/dev/null/includes/other_test_include.yml',
'tags': ['test_one_include_tags_tag1', 'and_another_tagB']
}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
self._assert_is_task_list_or_blocks(res)
- self.assertIsInstance(res[0], TaskInclude)
+ self.assertIsInstance(res[0], Block)
self.assertIn('test_one_include_tags_tag1', res[0].tags)
self.assertIn('and_another_tagB', res[0].tags)
- def test_one_parent_include_tasks_tags(self):
- ds = [{'include_tasks': '/dev/null/includes/test_include.yml',
+ # TODO/FIXME: do this non deprecated way
+ def test_one_parent_include_tags(self):
+ ds = [{'include': '/dev/null/includes/test_include.yml',
# 'vars': {'tags': ['test_one_parent_include_tags_tag1', 'and_another_tag2']}
'tags': ['test_one_parent_include_tags_tag1', 'and_another_tag2']
}
@@ -218,20 +226,20 @@ class TestLoadListOfTasks(unittest.TestCase, MixinForMocks):
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
self._assert_is_task_list_or_blocks(res)
- self.assertIsInstance(res[0], TaskInclude)
+ self.assertIsInstance(res[0], Block)
self.assertIn('test_one_parent_include_tags_tag1', res[0].tags)
self.assertIn('and_another_tag2', res[0].tags)
- def test_one_include_tasks_use_handlers(self):
- ds = [{'include_tasks': '/dev/null/includes/other_test_include.yml'}]
+ def test_one_include_use_handlers(self):
+ ds = [{'include': '/dev/null/includes/other_test_include.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
use_handlers=True,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
self._assert_is_task_list_or_blocks(res)
self.assertIsInstance(res[0], Handler)
- def test_one_parent_include_tasks_use_handlers(self):
- ds = [{'include_tasks': '/dev/null/includes/test_include.yml'}]
+ def test_one_parent_include_use_handlers(self):
+ ds = [{'include': '/dev/null/includes/test_include.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
use_handlers=True,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
diff --git a/test/units/playbook/test_included_file.py b/test/units/playbook/test_included_file.py
index c7a66b06..7341dffa 100644
--- a/test/units/playbook/test_included_file.py
+++ b/test/units/playbook/test_included_file.py
@@ -105,7 +105,7 @@ def test_included_file_instantiation():
assert inc_file._task is None
-def test_process_include_tasks_results(mock_iterator, mock_variable_manager):
+def test_process_include_results(mock_iterator, mock_variable_manager):
hostname = "testhost1"
hostname2 = "testhost2"
@@ -113,7 +113,7 @@ def test_process_include_tasks_results(mock_iterator, mock_variable_manager):
parent_task = Task.load(parent_task_ds)
parent_task._play = None
- task_ds = {'include_tasks': 'include_test.yml'}
+ task_ds = {'include': 'include_test.yml'}
loaded_task = TaskInclude.load(task_ds, task_include=parent_task)
return_data = {'include': 'include_test.yml'}
@@ -133,7 +133,7 @@ def test_process_include_tasks_results(mock_iterator, mock_variable_manager):
assert res[0]._vars == {}
-def test_process_include_tasks_diff_files(mock_iterator, mock_variable_manager):
+def test_process_include_diff_files(mock_iterator, mock_variable_manager):
hostname = "testhost1"
hostname2 = "testhost2"
@@ -141,11 +141,11 @@ def test_process_include_tasks_diff_files(mock_iterator, mock_variable_manager):
parent_task = Task.load(parent_task_ds)
parent_task._play = None
- task_ds = {'include_tasks': 'include_test.yml'}
+ task_ds = {'include': 'include_test.yml'}
loaded_task = TaskInclude.load(task_ds, task_include=parent_task)
loaded_task._play = None
- child_task_ds = {'include_tasks': 'other_include_test.yml'}
+ child_task_ds = {'include': 'other_include_test.yml'}
loaded_child_task = TaskInclude.load(child_task_ds, task_include=loaded_task)
loaded_child_task._play = None
@@ -175,7 +175,7 @@ def test_process_include_tasks_diff_files(mock_iterator, mock_variable_manager):
assert res[1]._vars == {}
-def test_process_include_tasks_simulate_free(mock_iterator, mock_variable_manager):
+def test_process_include_simulate_free(mock_iterator, mock_variable_manager):
hostname = "testhost1"
hostname2 = "testhost2"
@@ -186,7 +186,7 @@ def test_process_include_tasks_simulate_free(mock_iterator, mock_variable_manage
parent_task1._play = None
parent_task2._play = None
- task_ds = {'include_tasks': 'include_test.yml'}
+ task_ds = {'include': 'include_test.yml'}
loaded_task1 = TaskInclude.load(task_ds, task_include=parent_task1)
loaded_task2 = TaskInclude.load(task_ds, task_include=parent_task2)
diff --git a/test/units/playbook/test_play_context.py b/test/units/playbook/test_play_context.py
index 7461b45f..7c24de51 100644
--- a/test/units/playbook/test_play_context.py
+++ b/test/units/playbook/test_play_context.py
@@ -12,8 +12,10 @@ import pytest
from ansible import constants as C
from ansible import context
from ansible.cli.arguments import option_helpers as opt_help
+from ansible.errors import AnsibleError
from ansible.playbook.play_context import PlayContext
from ansible.playbook.play import Play
+from ansible.plugins.loader import become_loader
from ansible.utils import context_objects as co
diff --git a/test/units/playbook/test_taggable.py b/test/units/playbook/test_taggable.py
index c6ce35d3..3881e17d 100644
--- a/test/units/playbook/test_taggable.py
+++ b/test/units/playbook/test_taggable.py
@@ -29,7 +29,6 @@ class TaggableTestObj(Taggable):
def __init__(self):
self._loader = DictDataLoader({})
self.tags = []
- self._parent = None
class TestTaggable(unittest.TestCase):
diff --git a/test/units/playbook/test_task.py b/test/units/playbook/test_task.py
index e28d2ecd..070d7aa7 100644
--- a/test/units/playbook/test_task.py
+++ b/test/units/playbook/test_task.py
@@ -22,7 +22,6 @@ __metaclass__ = type
from units.compat import unittest
from unittest.mock import patch
from ansible.playbook.task import Task
-from ansible.plugins.loader import init_plugin_loader
from ansible.parsing.yaml import objects
from ansible import errors
@@ -75,7 +74,6 @@ class TestTask(unittest.TestCase):
@patch.object(errors.AnsibleError, '_get_error_lines_from_file')
def test_load_task_kv_form_error_36848(self, mock_get_err_lines):
- init_plugin_loader()
ds = objects.AnsibleMapping(kv_bad_args_ds)
ds.ansible_pos = ('test_task_faux_playbook.yml', 1, 1)
mock_get_err_lines.return_value = (kv_bad_args_str, '')
diff --git a/test/units/plugins/action/test_action.py b/test/units/plugins/action/test_action.py
index 33d09c42..f2bbe194 100644
--- a/test/units/plugins/action/test_action.py
+++ b/test/units/plugins/action/test_action.py
@@ -22,7 +22,6 @@ __metaclass__ = type
import os
import re
-from importlib import import_module
from ansible import constants as C
from units.compat import unittest
@@ -31,10 +30,9 @@ from unittest.mock import patch, MagicMock, mock_open
from ansible.errors import AnsibleError, AnsibleAuthenticationFailure
from ansible.module_utils.six import text_type
from ansible.module_utils.six.moves import shlex_quote, builtins
-from ansible.module_utils.common.text.converters import to_bytes
+from ansible.module_utils._text import to_bytes
from ansible.playbook.play_context import PlayContext
from ansible.plugins.action import ActionBase
-from ansible.plugins.loader import init_plugin_loader
from ansible.template import Templar
from ansible.vars.clean import clean_facts
@@ -111,11 +109,6 @@ class TestActionBase(unittest.TestCase):
self.assertEqual(results, {})
def test_action_base__configure_module(self):
- init_plugin_loader()
- # Pre-populate the ansible.builtin collection
- # so reading the ansible_builtin_runtime.yml happens
- # before the mock_open below
- import_module('ansible_collections.ansible.builtin')
fake_loader = DictDataLoader({
})
@@ -269,8 +262,11 @@ class TestActionBase(unittest.TestCase):
def get_shell_opt(opt):
- assert opt == 'admin_users'
- ret = ['root', 'toor', 'Administrator']
+ ret = None
+ if opt == 'admin_users':
+ ret = ['root', 'toor', 'Administrator']
+ elif opt == 'remote_tmp':
+ ret = '~/.ansible/tmp'
return ret
@@ -666,10 +662,17 @@ class TestActionBase(unittest.TestCase):
mock_task.no_log = False
# create a mock connection, so we don't actually try and connect to things
+ def build_module_command(env_string, shebang, cmd, arg_path=None):
+ to_run = [env_string, cmd]
+ if arg_path:
+ to_run.append(arg_path)
+ return " ".join(to_run)
+
def get_option(option):
return {'admin_users': ['root', 'toor']}.get(option)
mock_connection = MagicMock()
+ mock_connection.build_module_command.side_effect = build_module_command
mock_connection.socket_path = None
mock_connection._shell.get_remote_filename.return_value = 'copy.py'
mock_connection._shell.join_path.side_effect = os.path.join
@@ -796,7 +799,41 @@ class TestActionBase(unittest.TestCase):
class TestActionBaseCleanReturnedData(unittest.TestCase):
def test(self):
+
+ fake_loader = DictDataLoader({
+ })
+ mock_module_loader = MagicMock()
+ mock_shared_loader_obj = MagicMock()
+ mock_shared_loader_obj.module_loader = mock_module_loader
+ connection_loader_paths = ['/tmp/asdfadf', '/usr/lib64/whatever',
+ 'dfadfasf',
+ 'foo.py',
+ '.*',
+ # FIXME: a path with parans breaks the regex
+ # '(.*)',
+ '/path/to/ansible/lib/ansible/plugins/connection/custom_connection.py',
+ '/path/to/ansible/lib/ansible/plugins/connection/ssh.py']
+
+ def fake_all(path_only=None):
+ for path in connection_loader_paths:
+ yield path
+
+ mock_connection_loader = MagicMock()
+ mock_connection_loader.all = fake_all
+
+ mock_shared_loader_obj.connection_loader = mock_connection_loader
+ mock_connection = MagicMock()
+ # mock_connection._shell.env_prefix.side_effect = env_prefix
+
+ # action_base = DerivedActionBase(mock_task, mock_connection, play_context, None, None, None)
+ action_base = DerivedActionBase(task=None,
+ connection=mock_connection,
+ play_context=None,
+ loader=fake_loader,
+ templar=None,
+ shared_loader_obj=mock_shared_loader_obj)
data = {'ansible_playbook_python': '/usr/bin/python',
+ # 'ansible_rsync_path': '/usr/bin/rsync',
'ansible_python_interpreter': '/usr/bin/python',
'ansible_ssh_some_var': 'whatever',
'ansible_ssh_host_key_somehost': 'some key here',
diff --git a/test/units/utils/display/test_curses.py b/test/units/plugins/action/test_pause.py
index 05efc41b..8ad6db72 100644
--- a/test/units/utils/display/test_curses.py
+++ b/test/units/plugins/action/test_pause.py
@@ -11,14 +11,16 @@ import io
import pytest
import sys
-import ansible.utils.display # make available for monkeypatch
-assert ansible.utils.display # avoid reporting as unused
+from ansible.plugins.action import pause # noqa: F401
+from ansible.module_utils.six import PY2
builtin_import = 'builtins.__import__'
+if PY2:
+ builtin_import = '__builtin__.__import__'
def test_pause_curses_tigetstr_none(mocker, monkeypatch):
- monkeypatch.delitem(sys.modules, 'ansible.utils.display')
+ monkeypatch.delitem(sys.modules, 'ansible.plugins.action.pause')
dunder_import = __import__
@@ -33,11 +35,7 @@ def test_pause_curses_tigetstr_none(mocker, monkeypatch):
mocker.patch(builtin_import, _import)
- mod = importlib.import_module('ansible.utils.display')
-
- assert mod.HAS_CURSES is True
-
- mod.setupterm()
+ mod = importlib.import_module('ansible.plugins.action.pause')
assert mod.HAS_CURSES is True
assert mod.MOVE_TO_BOL == b'\r'
@@ -45,7 +43,7 @@ def test_pause_curses_tigetstr_none(mocker, monkeypatch):
def test_pause_missing_curses(mocker, monkeypatch):
- monkeypatch.delitem(sys.modules, 'ansible.utils.display')
+ monkeypatch.delitem(sys.modules, 'ansible.plugins.action.pause')
dunder_import = __import__
@@ -57,12 +55,10 @@ def test_pause_missing_curses(mocker, monkeypatch):
mocker.patch(builtin_import, _import)
- mod = importlib.import_module('ansible.utils.display')
-
- assert mod.HAS_CURSES is False
+ mod = importlib.import_module('ansible.plugins.action.pause')
with pytest.raises(AttributeError):
- assert mod.curses
+ mod.curses
assert mod.HAS_CURSES is False
assert mod.MOVE_TO_BOL == b'\r'
@@ -71,7 +67,7 @@ def test_pause_missing_curses(mocker, monkeypatch):
@pytest.mark.parametrize('exc', (curses.error, TypeError, io.UnsupportedOperation))
def test_pause_curses_setupterm_error(mocker, monkeypatch, exc):
- monkeypatch.delitem(sys.modules, 'ansible.utils.display')
+ monkeypatch.delitem(sys.modules, 'ansible.plugins.action.pause')
dunder_import = __import__
@@ -86,11 +82,7 @@ def test_pause_curses_setupterm_error(mocker, monkeypatch, exc):
mocker.patch(builtin_import, _import)
- mod = importlib.import_module('ansible.utils.display')
-
- assert mod.HAS_CURSES is True
-
- mod.setupterm()
+ mod = importlib.import_module('ansible.plugins.action.pause')
assert mod.HAS_CURSES is False
assert mod.MOVE_TO_BOL == b'\r'
diff --git a/test/units/plugins/action/test_raw.py b/test/units/plugins/action/test_raw.py
index c50004a7..33480516 100644
--- a/test/units/plugins/action/test_raw.py
+++ b/test/units/plugins/action/test_raw.py
@@ -20,6 +20,7 @@ __metaclass__ = type
import os
+from ansible.errors import AnsibleActionFail
from units.compat import unittest
from unittest.mock import MagicMock, Mock
from ansible.plugins.action.raw import ActionModule
@@ -67,7 +68,10 @@ class TestCopyResultExclude(unittest.TestCase):
task.args = {'_raw_params': 'Args1'}
self.play_context.check_mode = True
- self.mock_am = ActionModule(task, self.connection, self.play_context, loader=None, templar=None, shared_loader_obj=None)
+ try:
+ self.mock_am = ActionModule(task, self.connection, self.play_context, loader=None, templar=None, shared_loader_obj=None)
+ except AnsibleActionFail:
+ pass
def test_raw_test_environment_is_None(self):
diff --git a/test/units/plugins/cache/test_cache.py b/test/units/plugins/cache/test_cache.py
index b4ffe4e3..25b84c06 100644
--- a/test/units/plugins/cache/test_cache.py
+++ b/test/units/plugins/cache/test_cache.py
@@ -29,7 +29,7 @@ from units.compat import unittest
from ansible.errors import AnsibleError
from ansible.plugins.cache import CachePluginAdjudicator
from ansible.plugins.cache.memory import CacheModule as MemoryCache
-from ansible.plugins.loader import cache_loader, init_plugin_loader
+from ansible.plugins.loader import cache_loader
from ansible.vars.fact_cache import FactCache
import pytest
@@ -66,7 +66,7 @@ class TestCachePluginAdjudicator(unittest.TestCase):
def test___getitem__(self):
with pytest.raises(KeyError):
- self.cache['foo'] # pylint: disable=pointless-statement
+ self.cache['foo']
def test_pop_with_default(self):
assert self.cache.pop('foo', 'bar') == 'bar'
@@ -183,7 +183,6 @@ class TestFactCache(unittest.TestCase):
assert len(self.cache.keys()) == 0
def test_plugin_load_failure(self):
- init_plugin_loader()
# See https://github.com/ansible/ansible/issues/18751
# Note no fact_connection config set, so this will fail
with mock.patch('ansible.constants.CACHE_PLUGIN', 'json'):
diff --git a/test/units/plugins/connection/test_connection.py b/test/units/plugins/connection/test_connection.py
index 56095c60..38d66910 100644
--- a/test/units/plugins/connection/test_connection.py
+++ b/test/units/plugins/connection/test_connection.py
@@ -27,28 +27,6 @@ from ansible.plugins.connection import ConnectionBase
from ansible.plugins.loader import become_loader
-class NoOpConnection(ConnectionBase):
-
- @property
- def transport(self):
- """This method is never called by unit tests."""
-
- def _connect(self):
- """This method is never called by unit tests."""
-
- def exec_command(self):
- """This method is never called by unit tests."""
-
- def put_file(self):
- """This method is never called by unit tests."""
-
- def fetch_file(self):
- """This method is never called by unit tests."""
-
- def close(self):
- """This method is never called by unit tests."""
-
-
class TestConnectionBaseClass(unittest.TestCase):
def setUp(self):
@@ -67,8 +45,36 @@ class TestConnectionBaseClass(unittest.TestCase):
with self.assertRaises(TypeError):
ConnectionModule1() # pylint: disable=abstract-class-instantiated
+ class ConnectionModule2(ConnectionBase):
+ def get(self, key):
+ super(ConnectionModule2, self).get(key)
+
+ with self.assertRaises(TypeError):
+ ConnectionModule2() # pylint: disable=abstract-class-instantiated
+
def test_subclass_success(self):
- self.assertIsInstance(NoOpConnection(self.play_context, self.in_stream), NoOpConnection)
+ class ConnectionModule3(ConnectionBase):
+
+ @property
+ def transport(self):
+ pass
+
+ def _connect(self):
+ pass
+
+ def exec_command(self):
+ pass
+
+ def put_file(self):
+ pass
+
+ def fetch_file(self):
+ pass
+
+ def close(self):
+ pass
+
+ self.assertIsInstance(ConnectionModule3(self.play_context, self.in_stream), ConnectionModule3)
def test_check_password_prompt(self):
local = (
@@ -123,7 +129,28 @@ debug3: receive packet: type 98
debug1: Sending command: /bin/sh -c 'sudo -H -S -p "[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: " -u root /bin/sh -c '"'"'echo
'''
- c = NoOpConnection(self.play_context, self.in_stream)
+ class ConnectionFoo(ConnectionBase):
+
+ @property
+ def transport(self):
+ pass
+
+ def _connect(self):
+ pass
+
+ def exec_command(self):
+ pass
+
+ def put_file(self):
+ pass
+
+ def fetch_file(self):
+ pass
+
+ def close(self):
+ pass
+
+ c = ConnectionFoo(self.play_context, self.in_stream)
c.set_become_plugin(become_loader.get('sudo'))
c.become.prompt = '[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: '
diff --git a/test/units/plugins/connection/test_local.py b/test/units/plugins/connection/test_local.py
index 483a881b..e5525855 100644
--- a/test/units/plugins/connection/test_local.py
+++ b/test/units/plugins/connection/test_local.py
@@ -21,6 +21,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from io import StringIO
+import pytest
from units.compat import unittest
from ansible.plugins.connection import local
diff --git a/test/units/plugins/connection/test_paramiko_ssh.py b/test/units/plugins/connection/test_paramiko.py
index 03072613..dcf31772 100644
--- a/test/units/plugins/connection/test_paramiko_ssh.py
+++ b/test/units/plugins/connection/test_paramiko.py
@@ -23,8 +23,7 @@ __metaclass__ = type
from io import StringIO
import pytest
-from ansible.plugins.connection import paramiko_ssh as paramiko_ssh_module
-from ansible.plugins.loader import connection_loader
+from ansible.plugins.connection import paramiko_ssh
from ansible.playbook.play_context import PlayContext
@@ -45,14 +44,13 @@ def in_stream():
def test_paramiko_connection_module(play_context, in_stream):
assert isinstance(
- connection_loader.get('paramiko_ssh', play_context, in_stream),
- paramiko_ssh_module.Connection)
+ paramiko_ssh.Connection(play_context, in_stream),
+ paramiko_ssh.Connection)
def test_paramiko_connect(play_context, in_stream, mocker):
- paramiko_ssh = connection_loader.get('paramiko_ssh', play_context, in_stream)
- mocker.patch.object(paramiko_ssh, '_connect_uncached')
- connection = paramiko_ssh._connect()
+ mocker.patch.object(paramiko_ssh.Connection, '_connect_uncached')
+ connection = paramiko_ssh.Connection(play_context, in_stream)._connect()
- assert isinstance(connection, paramiko_ssh_module.Connection)
+ assert isinstance(connection, paramiko_ssh.Connection)
assert connection._connected is True
diff --git a/test/units/plugins/connection/test_ssh.py b/test/units/plugins/connection/test_ssh.py
index 48ad3b73..662dff91 100644
--- a/test/units/plugins/connection/test_ssh.py
+++ b/test/units/plugins/connection/test_ssh.py
@@ -24,13 +24,14 @@ from io import StringIO
import pytest
+from ansible import constants as C
from ansible.errors import AnsibleAuthenticationFailure
from units.compat import unittest
from unittest.mock import patch, MagicMock, PropertyMock
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.module_utils.compat.selectors import SelectorKey, EVENT_READ
from ansible.module_utils.six.moves import shlex_quote
-from ansible.module_utils.common.text.converters import to_bytes
+from ansible.module_utils._text import to_bytes
from ansible.playbook.play_context import PlayContext
from ansible.plugins.connection import ssh
from ansible.plugins.loader import connection_loader, become_loader
@@ -141,8 +142,9 @@ class TestConnectionBaseClass(unittest.TestCase):
conn.become.check_missing_password = MagicMock(side_effect=_check_missing_password)
def get_option(option):
- assert option == 'become_pass'
- return 'password'
+ if option == 'become_pass':
+ return 'password'
+ return None
conn.become.get_option = get_option
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nfoo\nline 3\nthis should be the remainder', False)
@@ -349,7 +351,7 @@ class MockSelector(object):
self.register = MagicMock(side_effect=self._register)
self.unregister = MagicMock(side_effect=self._unregister)
self.close = MagicMock()
- self.get_map = MagicMock()
+ self.get_map = MagicMock(side_effect=self._get_map)
self.select = MagicMock()
def _register(self, *args, **kwargs):
@@ -358,6 +360,9 @@ class MockSelector(object):
def _unregister(self, *args, **kwargs):
self.files_watched -= 1
+ def _get_map(self, *args, **kwargs):
+ return self.files_watched
+
@pytest.fixture
def mock_run_env(request, mocker):
@@ -452,8 +457,7 @@ class TestSSHConnectionRun(object):
def _password_with_prompt_examine_output(self, sourice, state, b_chunk, sudoable):
if state == 'awaiting_prompt':
self.conn._flags['become_prompt'] = True
- else:
- assert state == 'awaiting_escalation'
+ elif state == 'awaiting_escalation':
self.conn._flags['become_success'] = True
return (b'', b'')
@@ -542,6 +546,7 @@ class TestSSHConnectionRetries(object):
def test_incorrect_password(self, monkeypatch):
self.conn.set_option('host_key_checking', False)
self.conn.set_option('reconnection_retries', 5)
+ monkeypatch.setattr('time.sleep', lambda x: None)
self.mock_popen_res.stdout.read.side_effect = [b'']
self.mock_popen_res.stderr.read.side_effect = [b'Permission denied, please try again.\r\n']
@@ -664,6 +669,7 @@ class TestSSHConnectionRetries(object):
self.conn.set_option('reconnection_retries', 3)
monkeypatch.setattr('time.sleep', lambda x: None)
+ monkeypatch.setattr('ansible.plugins.connection.ssh.os.path.exists', lambda x: True)
self.mock_popen_res.stdout.read.side_effect = [b"", b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"", b"my_stderr"]
diff --git a/test/units/plugins/connection/test_winrm.py b/test/units/plugins/connection/test_winrm.py
index c3060da5..cb52814b 100644
--- a/test/units/plugins/connection/test_winrm.py
+++ b/test/units/plugins/connection/test_winrm.py
@@ -13,8 +13,8 @@ import pytest
from io import StringIO
from unittest.mock import MagicMock
-from ansible.errors import AnsibleConnectionFailure, AnsibleError
-from ansible.module_utils.common.text.converters import to_bytes
+from ansible.errors import AnsibleConnectionFailure
+from ansible.module_utils._text import to_bytes
from ansible.playbook.play_context import PlayContext
from ansible.plugins.loader import connection_loader
from ansible.plugins.connection import winrm
@@ -441,103 +441,3 @@ class TestWinRMKerbAuth(object):
assert str(err.value) == \
"Kerberos auth failure for principal username with pexpect: " \
"Error with kinit\n<redacted>"
-
- def test_exec_command_with_timeout(self, monkeypatch):
- requests_exc = pytest.importorskip("requests.exceptions")
-
- pc = PlayContext()
- new_stdin = StringIO()
- conn = connection_loader.get('winrm', pc, new_stdin)
-
- mock_proto = MagicMock()
- mock_proto.run_command.side_effect = requests_exc.Timeout("msg")
-
- conn._connected = True
- conn._winrm_host = 'hostname'
-
- monkeypatch.setattr(conn, "_winrm_connect", lambda: mock_proto)
-
- with pytest.raises(AnsibleConnectionFailure) as e:
- conn.exec_command('cmd', in_data=None, sudoable=True)
-
- assert str(e.value) == "winrm connection error: msg"
-
- def test_exec_command_get_output_timeout(self, monkeypatch):
- requests_exc = pytest.importorskip("requests.exceptions")
-
- pc = PlayContext()
- new_stdin = StringIO()
- conn = connection_loader.get('winrm', pc, new_stdin)
-
- mock_proto = MagicMock()
- mock_proto.run_command.return_value = "command_id"
- mock_proto.send_message.side_effect = requests_exc.Timeout("msg")
-
- conn._connected = True
- conn._winrm_host = 'hostname'
-
- monkeypatch.setattr(conn, "_winrm_connect", lambda: mock_proto)
-
- with pytest.raises(AnsibleConnectionFailure) as e:
- conn.exec_command('cmd', in_data=None, sudoable=True)
-
- assert str(e.value) == "winrm connection error: msg"
-
- def test_connect_failure_auth_401(self, monkeypatch):
- pc = PlayContext()
- new_stdin = StringIO()
- conn = connection_loader.get('winrm', pc, new_stdin)
- conn.set_options(var_options={"ansible_winrm_transport": "basic", "_extras": {}})
-
- mock_proto = MagicMock()
- mock_proto.open_shell.side_effect = ValueError("Custom exc Code 401")
-
- mock_proto_init = MagicMock()
- mock_proto_init.return_value = mock_proto
- monkeypatch.setattr(winrm, "Protocol", mock_proto_init)
-
- with pytest.raises(AnsibleConnectionFailure, match="the specified credentials were rejected by the server"):
- conn.exec_command('cmd', in_data=None, sudoable=True)
-
- def test_connect_failure_other_exception(self, monkeypatch):
- pc = PlayContext()
- new_stdin = StringIO()
- conn = connection_loader.get('winrm', pc, new_stdin)
- conn.set_options(var_options={"ansible_winrm_transport": "basic", "_extras": {}})
-
- mock_proto = MagicMock()
- mock_proto.open_shell.side_effect = ValueError("Custom exc")
-
- mock_proto_init = MagicMock()
- mock_proto_init.return_value = mock_proto
- monkeypatch.setattr(winrm, "Protocol", mock_proto_init)
-
- with pytest.raises(AnsibleConnectionFailure, match="basic: Custom exc"):
- conn.exec_command('cmd', in_data=None, sudoable=True)
-
- def test_connect_failure_operation_timed_out(self, monkeypatch):
- pc = PlayContext()
- new_stdin = StringIO()
- conn = connection_loader.get('winrm', pc, new_stdin)
- conn.set_options(var_options={"ansible_winrm_transport": "basic", "_extras": {}})
-
- mock_proto = MagicMock()
- mock_proto.open_shell.side_effect = ValueError("Custom exc Operation timed out")
-
- mock_proto_init = MagicMock()
- mock_proto_init.return_value = mock_proto
- monkeypatch.setattr(winrm, "Protocol", mock_proto_init)
-
- with pytest.raises(AnsibleError, match="the connection attempt timed out"):
- conn.exec_command('cmd', in_data=None, sudoable=True)
-
- def test_connect_no_transport(self):
- pc = PlayContext()
- new_stdin = StringIO()
- conn = connection_loader.get('winrm', pc, new_stdin)
- conn.set_options(var_options={"_extras": {}})
- conn._build_winrm_kwargs()
- conn._winrm_transport = []
-
- with pytest.raises(AnsibleError, match="No transport found for WinRM connection"):
- conn._winrm_connect()
diff --git a/test/units/plugins/filter/test_core.py b/test/units/plugins/filter/test_core.py
index ab09ec43..df4e4725 100644
--- a/test/units/plugins/filter/test_core.py
+++ b/test/units/plugins/filter/test_core.py
@@ -3,11 +3,13 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+from jinja2.runtime import Undefined
+from jinja2.exceptions import UndefinedError
__metaclass__ = type
import pytest
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.plugins.filter.core import to_uuid
from ansible.errors import AnsibleFilterError
diff --git a/test/units/plugins/filter/test_mathstuff.py b/test/units/plugins/filter/test_mathstuff.py
index 4ac5487f..f7938714 100644
--- a/test/units/plugins/filter/test_mathstuff.py
+++ b/test/units/plugins/filter/test_mathstuff.py
@@ -1,8 +1,9 @@
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import annotations
-
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
import pytest
from jinja2 import Environment
@@ -11,68 +12,54 @@ import ansible.plugins.filter.mathstuff as ms
from ansible.errors import AnsibleFilterError, AnsibleFilterTypeError
-UNIQUE_DATA = [
- ([], []),
- ([1, 3, 4, 2], [1, 3, 4, 2]),
- ([1, 3, 2, 4, 2, 3], [1, 3, 2, 4]),
- ([1, 2, 3, 4], [1, 2, 3, 4]),
- ([1, 1, 4, 2, 1, 4, 3, 2], [1, 4, 2, 3]),
-]
-
-TWO_SETS_DATA = [
- ([], [], ([], [], [])),
- ([1, 2], [1, 2], ([1, 2], [], [])),
- ([1, 2], [3, 4], ([], [1, 2], [1, 2, 3, 4])),
- ([1, 2, 3], [5, 3, 4], ([3], [1, 2], [1, 2, 5, 4])),
- ([1, 2, 3], [4, 3, 5], ([3], [1, 2], [1, 2, 4, 5])),
-]
-
-
-def dict_values(values: list[int]) -> list[dict[str, int]]:
- """Return a list of non-hashable values derived from the given list."""
- return [dict(x=value) for value in values]
-
-
-for _data, _expected in list(UNIQUE_DATA):
- UNIQUE_DATA.append((dict_values(_data), dict_values(_expected)))
-
-for _dataset1, _dataset2, _expected in list(TWO_SETS_DATA):
- TWO_SETS_DATA.append((dict_values(_dataset1), dict_values(_dataset2), tuple(dict_values(answer) for answer in _expected)))
+UNIQUE_DATA = (([1, 3, 4, 2], [1, 3, 4, 2]),
+ ([1, 3, 2, 4, 2, 3], [1, 3, 2, 4]),
+ (['a', 'b', 'c', 'd'], ['a', 'b', 'c', 'd']),
+ (['a', 'a', 'd', 'b', 'a', 'd', 'c', 'b'], ['a', 'd', 'b', 'c']),
+ )
+TWO_SETS_DATA = (([1, 2], [3, 4], ([], sorted([1, 2]), sorted([1, 2, 3, 4]), sorted([1, 2, 3, 4]))),
+ ([1, 2, 3], [5, 3, 4], ([3], sorted([1, 2]), sorted([1, 2, 5, 4]), sorted([1, 2, 3, 4, 5]))),
+ (['a', 'b', 'c'], ['d', 'c', 'e'], (['c'], sorted(['a', 'b']), sorted(['a', 'b', 'd', 'e']), sorted(['a', 'b', 'c', 'e', 'd']))),
+ )
env = Environment()
-def assert_lists_contain_same_elements(a, b) -> None:
- """Assert that the two values given are lists that contain the same elements, even when the elements cannot be sorted or hashed."""
- assert isinstance(a, list)
- assert isinstance(b, list)
+@pytest.mark.parametrize('data, expected', UNIQUE_DATA)
+class TestUnique:
+ def test_unhashable(self, data, expected):
+ assert ms.unique(env, list(data)) == expected
- missing_from_a = [item for item in b if item not in a]
- missing_from_b = [item for item in a if item not in b]
+ def test_hashable(self, data, expected):
+ assert ms.unique(env, tuple(data)) == expected
- assert not missing_from_a, f'elements from `b` {missing_from_a} missing from `a` {a}'
- assert not missing_from_b, f'elements from `a` {missing_from_b} missing from `b` {b}'
+@pytest.mark.parametrize('dataset1, dataset2, expected', TWO_SETS_DATA)
+class TestIntersect:
+ def test_unhashable(self, dataset1, dataset2, expected):
+ assert sorted(ms.intersect(env, list(dataset1), list(dataset2))) == expected[0]
-@pytest.mark.parametrize('data, expected', UNIQUE_DATA, ids=str)
-def test_unique(data, expected):
- assert_lists_contain_same_elements(ms.unique(env, data), expected)
+ def test_hashable(self, dataset1, dataset2, expected):
+ assert sorted(ms.intersect(env, tuple(dataset1), tuple(dataset2))) == expected[0]
-@pytest.mark.parametrize('dataset1, dataset2, expected', TWO_SETS_DATA, ids=str)
-def test_intersect(dataset1, dataset2, expected):
- assert_lists_contain_same_elements(ms.intersect(env, dataset1, dataset2), expected[0])
+@pytest.mark.parametrize('dataset1, dataset2, expected', TWO_SETS_DATA)
+class TestDifference:
+ def test_unhashable(self, dataset1, dataset2, expected):
+ assert sorted(ms.difference(env, list(dataset1), list(dataset2))) == expected[1]
+ def test_hashable(self, dataset1, dataset2, expected):
+ assert sorted(ms.difference(env, tuple(dataset1), tuple(dataset2))) == expected[1]
-@pytest.mark.parametrize('dataset1, dataset2, expected', TWO_SETS_DATA, ids=str)
-def test_difference(dataset1, dataset2, expected):
- assert_lists_contain_same_elements(ms.difference(env, dataset1, dataset2), expected[1])
+@pytest.mark.parametrize('dataset1, dataset2, expected', TWO_SETS_DATA)
+class TestSymmetricDifference:
+ def test_unhashable(self, dataset1, dataset2, expected):
+ assert sorted(ms.symmetric_difference(env, list(dataset1), list(dataset2))) == expected[2]
-@pytest.mark.parametrize('dataset1, dataset2, expected', TWO_SETS_DATA, ids=str)
-def test_symmetric_difference(dataset1, dataset2, expected):
- assert_lists_contain_same_elements(ms.symmetric_difference(env, dataset1, dataset2), expected[2])
+ def test_hashable(self, dataset1, dataset2, expected):
+ assert sorted(ms.symmetric_difference(env, tuple(dataset1), tuple(dataset2))) == expected[2]
class TestLogarithm:
diff --git a/test/units/plugins/inventory/test_constructed.py b/test/units/plugins/inventory/test_constructed.py
index 8ae78f1d..581e0253 100644
--- a/test/units/plugins/inventory/test_constructed.py
+++ b/test/units/plugins/inventory/test_constructed.py
@@ -194,11 +194,11 @@ def test_parent_group_templating_error(inventory_module):
'parent_group': '{{ location.barn-yard }}'
}
]
- with pytest.raises(AnsibleParserError) as ex:
+ with pytest.raises(AnsibleParserError) as err_message:
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=True
)
- assert 'Could not generate parent group' in str(ex.value)
+ assert 'Could not generate parent group' in err_message
# invalid parent group did not raise an exception with strict=False
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=False
@@ -213,17 +213,17 @@ def test_keyed_group_exclusive_argument(inventory_module):
host = inventory_module.inventory.get_host('cow')
keyed_groups = [
{
- 'key': 'nickname',
+ 'key': 'tag',
'separator': '_',
'default_value': 'default_value_name',
'trailing_separator': True
}
]
- with pytest.raises(AnsibleParserError) as ex:
+ with pytest.raises(AnsibleParserError) as err_message:
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=True
)
- assert 'parameters are mutually exclusive' in str(ex.value)
+ assert 'parameters are mutually exclusive' in err_message
def test_keyed_group_empty_value(inventory_module):
diff --git a/test/units/plugins/inventory/test_inventory.py b/test/units/plugins/inventory/test_inventory.py
index fb5342af..df246073 100644
--- a/test/units/plugins/inventory/test_inventory.py
+++ b/test/units/plugins/inventory/test_inventory.py
@@ -27,7 +27,7 @@ from unittest import mock
from ansible import constants as C
from units.compat import unittest
from ansible.module_utils.six import string_types
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
from units.mock.path import mock_unfrackpath_noop
from ansible.inventory.manager import InventoryManager, split_host_pattern
diff --git a/test/units/plugins/inventory/test_script.py b/test/units/plugins/inventory/test_script.py
index 89eb4f5b..9f75199f 100644
--- a/test/units/plugins/inventory/test_script.py
+++ b/test/units/plugins/inventory/test_script.py
@@ -28,7 +28,7 @@ from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.plugins.loader import PluginLoader
from units.compat import unittest
-from ansible.module_utils.common.text.converters import to_bytes, to_native
+from ansible.module_utils._text import to_bytes, to_native
class TestInventoryModule(unittest.TestCase):
@@ -103,11 +103,3 @@ class TestInventoryModule(unittest.TestCase):
self.inventory_module.parse(self.inventory, self.loader, '/foo/bar/foobar.py')
assert e.value.message == to_native("failed to parse executable inventory script results from "
"/foo/bar/foobar.py: needs to be a json dict\ndummyédata\n")
-
- def test_get_host_variables_subprocess_script_raises_error(self):
- self.popen_result.returncode = 1
- self.popen_result.stderr = to_bytes("dummyéerror")
-
- with pytest.raises(AnsibleError) as e:
- self.inventory_module.get_host_variables('/foo/bar/foobar.py', 'dummy host')
- assert e.value.message == "Inventory script (/foo/bar/foobar.py) had an execution error: dummyéerror"
diff --git a/test/units/plugins/lookup/test_password.py b/test/units/plugins/lookup/test_password.py
index 685f2ce7..318bc10b 100644
--- a/test/units/plugins/lookup/test_password.py
+++ b/test/units/plugins/lookup/test_password.py
@@ -23,7 +23,7 @@ __metaclass__ = type
try:
import passlib
from passlib.handlers import pbkdf2
-except ImportError: # pragma: nocover
+except ImportError:
passlib = None
pbkdf2 = None
@@ -36,7 +36,7 @@ from unittest.mock import mock_open, patch
from ansible.errors import AnsibleError
from ansible.module_utils.six import text_type
from ansible.module_utils.six.moves import builtins
-from ansible.module_utils.common.text.converters import to_bytes
+from ansible.module_utils._text import to_bytes
from ansible.plugins.loader import PluginLoader, lookup_loader
from ansible.plugins.lookup import password
@@ -416,6 +416,8 @@ class BaseTestLookupModule(unittest.TestCase):
password.os.open = lambda path, flag: None
self.os_close = password.os.close
password.os.close = lambda fd: None
+ self.os_remove = password.os.remove
+ password.os.remove = lambda path: None
self.makedirs_safe = password.makedirs_safe
password.makedirs_safe = lambda path, mode: None
@@ -423,6 +425,7 @@ class BaseTestLookupModule(unittest.TestCase):
password.os.path.exists = self.os_path_exists
password.os.open = self.os_open
password.os.close = self.os_close
+ password.os.remove = self.os_remove
password.makedirs_safe = self.makedirs_safe
@@ -464,17 +467,23 @@ class TestLookupModuleWithoutPasslib(BaseTestLookupModule):
def test_lock_been_held(self, mock_sleep):
# pretend the lock file is here
password.os.path.exists = lambda x: True
- with pytest.raises(AnsibleError):
+ try:
with patch.object(builtins, 'open', mock_open(read_data=b'hunter42 salt=87654321\n')) as m:
# should timeout here
- self.password_lookup.run([u'/path/to/somewhere chars=anything'], None)
+ results = self.password_lookup.run([u'/path/to/somewhere chars=anything'], None)
+ self.fail("Lookup didn't timeout when lock already been held")
+ except AnsibleError:
+ pass
def test_lock_not_been_held(self):
# pretend now there is password file but no lock
password.os.path.exists = lambda x: x == to_bytes('/path/to/somewhere')
- with patch.object(builtins, 'open', mock_open(read_data=b'hunter42 salt=87654321\n')) as m:
- # should not timeout here
- results = self.password_lookup.run([u'/path/to/somewhere chars=anything'], None)
+ try:
+ with patch.object(builtins, 'open', mock_open(read_data=b'hunter42 salt=87654321\n')) as m:
+ # should not timeout here
+ results = self.password_lookup.run([u'/path/to/somewhere chars=anything'], None)
+ except AnsibleError:
+ self.fail('Lookup timeouts when lock is free')
for result in results:
self.assertEqual(result, u'hunter42')
@@ -522,8 +531,10 @@ class TestLookupModuleWithPasslib(BaseTestLookupModule):
self.assertEqual(int(str_parts[2]), crypt_parts['rounds'])
self.assertIsInstance(result, text_type)
+ @patch.object(PluginLoader, '_get_paths')
@patch('ansible.plugins.lookup.password._write_password_file')
- def test_password_already_created_encrypt(self, mock_write_file):
+ def test_password_already_created_encrypt(self, mock_get_paths, mock_write_file):
+ mock_get_paths.return_value = ['/path/one', '/path/two', '/path/three']
password.os.path.exists = lambda x: x == to_bytes('/path/to/somewhere')
with patch.object(builtins, 'open', mock_open(read_data=b'hunter42 salt=87654321\n')) as m:
@@ -531,9 +542,6 @@ class TestLookupModuleWithPasslib(BaseTestLookupModule):
for result in results:
self.assertEqual(result, u'$pbkdf2-sha256$20000$ODc2NTQzMjE$Uikde0cv0BKaRaAXMrUQB.zvG4GmnjClwjghwIRf2gU')
- # Assert the password file is not rewritten
- mock_write_file.assert_not_called()
-
@pytest.mark.skipif(passlib is None, reason='passlib must be installed to run these tests')
class TestLookupModuleWithPasslibWrappedAlgo(BaseTestLookupModule):
diff --git a/test/units/plugins/strategy/test_strategy.py b/test/units/plugins/strategy/test_strategy.py
new file mode 100644
index 00000000..f935f4b5
--- /dev/null
+++ b/test/units/plugins/strategy/test_strategy.py
@@ -0,0 +1,492 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from units.mock.loader import DictDataLoader
+import uuid
+
+from units.compat import unittest
+from unittest.mock import patch, MagicMock
+from ansible.executor.process.worker import WorkerProcess
+from ansible.executor.task_queue_manager import TaskQueueManager
+from ansible.executor.task_result import TaskResult
+from ansible.inventory.host import Host
+from ansible.module_utils.six.moves import queue as Queue
+from ansible.playbook.block import Block
+from ansible.playbook.handler import Handler
+from ansible.plugins.strategy import StrategyBase
+
+import pytest
+
+pytestmark = pytest.mark.skipif(True, reason="Temporarily disabled due to fragile tests that need rewritten")
+
+
+class TestStrategyBase(unittest.TestCase):
+
+ def test_strategy_base_init(self):
+ queue_items = []
+
+ def _queue_empty(*args, **kwargs):
+ return len(queue_items) == 0
+
+ def _queue_get(*args, **kwargs):
+ if len(queue_items) == 0:
+ raise Queue.Empty
+ else:
+ return queue_items.pop()
+
+ def _queue_put(item, *args, **kwargs):
+ queue_items.append(item)
+
+ mock_queue = MagicMock()
+ mock_queue.empty.side_effect = _queue_empty
+ mock_queue.get.side_effect = _queue_get
+ mock_queue.put.side_effect = _queue_put
+
+ mock_tqm = MagicMock(TaskQueueManager)
+ mock_tqm._final_q = mock_queue
+ mock_tqm._workers = []
+ strategy_base = StrategyBase(tqm=mock_tqm)
+ strategy_base.cleanup()
+
+ def test_strategy_base_run(self):
+ queue_items = []
+
+ def _queue_empty(*args, **kwargs):
+ return len(queue_items) == 0
+
+ def _queue_get(*args, **kwargs):
+ if len(queue_items) == 0:
+ raise Queue.Empty
+ else:
+ return queue_items.pop()
+
+ def _queue_put(item, *args, **kwargs):
+ queue_items.append(item)
+
+ mock_queue = MagicMock()
+ mock_queue.empty.side_effect = _queue_empty
+ mock_queue.get.side_effect = _queue_get
+ mock_queue.put.side_effect = _queue_put
+
+ mock_tqm = MagicMock(TaskQueueManager)
+ mock_tqm._final_q = mock_queue
+ mock_tqm._stats = MagicMock()
+ mock_tqm.send_callback.return_value = None
+
+ for attr in ('RUN_OK', 'RUN_ERROR', 'RUN_FAILED_HOSTS', 'RUN_UNREACHABLE_HOSTS'):
+ setattr(mock_tqm, attr, getattr(TaskQueueManager, attr))
+
+ mock_iterator = MagicMock()
+ mock_iterator._play = MagicMock()
+ mock_iterator._play.handlers = []
+
+ mock_play_context = MagicMock()
+
+ mock_tqm._failed_hosts = dict()
+ mock_tqm._unreachable_hosts = dict()
+ mock_tqm._workers = []
+ strategy_base = StrategyBase(tqm=mock_tqm)
+
+ mock_host = MagicMock()
+ mock_host.name = 'host1'
+
+ self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context), mock_tqm.RUN_OK)
+ self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context, result=TaskQueueManager.RUN_ERROR), mock_tqm.RUN_ERROR)
+ mock_tqm._failed_hosts = dict(host1=True)
+ mock_iterator.get_failed_hosts.return_value = [mock_host]
+ self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context, result=False), mock_tqm.RUN_FAILED_HOSTS)
+ mock_tqm._unreachable_hosts = dict(host1=True)
+ mock_iterator.get_failed_hosts.return_value = []
+ self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context, result=False), mock_tqm.RUN_UNREACHABLE_HOSTS)
+ strategy_base.cleanup()
+
+ def test_strategy_base_get_hosts(self):
+ queue_items = []
+
+ def _queue_empty(*args, **kwargs):
+ return len(queue_items) == 0
+
+ def _queue_get(*args, **kwargs):
+ if len(queue_items) == 0:
+ raise Queue.Empty
+ else:
+ return queue_items.pop()
+
+ def _queue_put(item, *args, **kwargs):
+ queue_items.append(item)
+
+ mock_queue = MagicMock()
+ mock_queue.empty.side_effect = _queue_empty
+ mock_queue.get.side_effect = _queue_get
+ mock_queue.put.side_effect = _queue_put
+
+ mock_hosts = []
+ for i in range(0, 5):
+ mock_host = MagicMock()
+ mock_host.name = "host%02d" % (i + 1)
+ mock_host.has_hostkey = True
+ mock_hosts.append(mock_host)
+
+ mock_hosts_names = [h.name for h in mock_hosts]
+
+ mock_inventory = MagicMock()
+ mock_inventory.get_hosts.return_value = mock_hosts
+
+ mock_tqm = MagicMock()
+ mock_tqm._final_q = mock_queue
+ mock_tqm.get_inventory.return_value = mock_inventory
+
+ mock_play = MagicMock()
+ mock_play.hosts = ["host%02d" % (i + 1) for i in range(0, 5)]
+
+ strategy_base = StrategyBase(tqm=mock_tqm)
+ strategy_base._hosts_cache = strategy_base._hosts_cache_all = mock_hosts_names
+
+ mock_tqm._failed_hosts = []
+ mock_tqm._unreachable_hosts = []
+ self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), [h.name for h in mock_hosts])
+
+ mock_tqm._failed_hosts = ["host01"]
+ self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), [h.name for h in mock_hosts[1:]])
+ self.assertEqual(strategy_base.get_failed_hosts(play=mock_play), [mock_hosts[0].name])
+
+ mock_tqm._unreachable_hosts = ["host02"]
+ self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), [h.name for h in mock_hosts[2:]])
+ strategy_base.cleanup()
+
+ @patch.object(WorkerProcess, 'run')
+ def test_strategy_base_queue_task(self, mock_worker):
+ def fake_run(self):
+ return
+
+ mock_worker.run.side_effect = fake_run
+
+ fake_loader = DictDataLoader()
+ mock_var_manager = MagicMock()
+ mock_host = MagicMock()
+ mock_host.get_vars.return_value = dict()
+ mock_host.has_hostkey = True
+ mock_inventory = MagicMock()
+ mock_inventory.get.return_value = mock_host
+
+ tqm = TaskQueueManager(
+ inventory=mock_inventory,
+ variable_manager=mock_var_manager,
+ loader=fake_loader,
+ passwords=None,
+ forks=3,
+ )
+ tqm._initialize_processes(3)
+ tqm.hostvars = dict()
+
+ mock_task = MagicMock()
+ mock_task._uuid = 'abcd'
+ mock_task.throttle = 0
+
+ try:
+ strategy_base = StrategyBase(tqm=tqm)
+ strategy_base._queue_task(host=mock_host, task=mock_task, task_vars=dict(), play_context=MagicMock())
+ self.assertEqual(strategy_base._cur_worker, 1)
+ self.assertEqual(strategy_base._pending_results, 1)
+ strategy_base._queue_task(host=mock_host, task=mock_task, task_vars=dict(), play_context=MagicMock())
+ self.assertEqual(strategy_base._cur_worker, 2)
+ self.assertEqual(strategy_base._pending_results, 2)
+ strategy_base._queue_task(host=mock_host, task=mock_task, task_vars=dict(), play_context=MagicMock())
+ self.assertEqual(strategy_base._cur_worker, 0)
+ self.assertEqual(strategy_base._pending_results, 3)
+ finally:
+ tqm.cleanup()
+
+ def test_strategy_base_process_pending_results(self):
+ mock_tqm = MagicMock()
+ mock_tqm._terminated = False
+ mock_tqm._failed_hosts = dict()
+ mock_tqm._unreachable_hosts = dict()
+ mock_tqm.send_callback.return_value = None
+
+ queue_items = []
+
+ def _queue_empty(*args, **kwargs):
+ return len(queue_items) == 0
+
+ def _queue_get(*args, **kwargs):
+ if len(queue_items) == 0:
+ raise Queue.Empty
+ else:
+ return queue_items.pop()
+
+ def _queue_put(item, *args, **kwargs):
+ queue_items.append(item)
+
+ mock_queue = MagicMock()
+ mock_queue.empty.side_effect = _queue_empty
+ mock_queue.get.side_effect = _queue_get
+ mock_queue.put.side_effect = _queue_put
+ mock_tqm._final_q = mock_queue
+
+ mock_tqm._stats = MagicMock()
+ mock_tqm._stats.increment.return_value = None
+
+ mock_play = MagicMock()
+
+ mock_host = MagicMock()
+ mock_host.name = 'test01'
+ mock_host.vars = dict()
+ mock_host.get_vars.return_value = dict()
+ mock_host.has_hostkey = True
+
+ mock_task = MagicMock()
+ mock_task._role = None
+ mock_task._parent = None
+ mock_task.ignore_errors = False
+ mock_task.ignore_unreachable = False
+ mock_task._uuid = str(uuid.uuid4())
+ mock_task.loop = None
+ mock_task.copy.return_value = mock_task
+
+ mock_handler_task = Handler()
+ mock_handler_task.name = 'test handler'
+ mock_handler_task.action = 'foo'
+ mock_handler_task._parent = None
+ mock_handler_task._uuid = 'xxxxxxxxxxxxx'
+
+ mock_iterator = MagicMock()
+ mock_iterator._play = mock_play
+ mock_iterator.mark_host_failed.return_value = None
+ mock_iterator.get_next_task_for_host.return_value = (None, None)
+
+ mock_handler_block = MagicMock()
+ mock_handler_block.name = '' # implicit unnamed block
+ mock_handler_block.block = [mock_handler_task]
+ mock_handler_block.rescue = []
+ mock_handler_block.always = []
+ mock_play.handlers = [mock_handler_block]
+
+ mock_group = MagicMock()
+ mock_group.add_host.return_value = None
+
+ def _get_host(host_name):
+ if host_name == 'test01':
+ return mock_host
+ return None
+
+ def _get_group(group_name):
+ if group_name in ('all', 'foo'):
+ return mock_group
+ return None
+
+ mock_inventory = MagicMock()
+ mock_inventory._hosts_cache = dict()
+ mock_inventory.hosts.return_value = mock_host
+ mock_inventory.get_host.side_effect = _get_host
+ mock_inventory.get_group.side_effect = _get_group
+ mock_inventory.clear_pattern_cache.return_value = None
+ mock_inventory.get_host_vars.return_value = {}
+ mock_inventory.hosts.get.return_value = mock_host
+
+ mock_var_mgr = MagicMock()
+ mock_var_mgr.set_host_variable.return_value = None
+ mock_var_mgr.set_host_facts.return_value = None
+ mock_var_mgr.get_vars.return_value = dict()
+
+ strategy_base = StrategyBase(tqm=mock_tqm)
+ strategy_base._inventory = mock_inventory
+ strategy_base._variable_manager = mock_var_mgr
+ strategy_base._blocked_hosts = dict()
+
+ def _has_dead_workers():
+ return False
+
+ strategy_base._tqm.has_dead_workers.side_effect = _has_dead_workers
+ results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 0)
+
+ task_result = TaskResult(host=mock_host.name, task=mock_task._uuid, return_data=dict(changed=True))
+ queue_items.append(task_result)
+ strategy_base._blocked_hosts['test01'] = True
+ strategy_base._pending_results = 1
+
+ def mock_queued_task_cache():
+ return {
+ (mock_host.name, mock_task._uuid): {
+ 'task': mock_task,
+ 'host': mock_host,
+ 'task_vars': {},
+ 'play_context': {},
+ }
+ }
+
+ strategy_base._queued_task_cache = mock_queued_task_cache()
+ results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 1)
+ self.assertEqual(results[0], task_result)
+ self.assertEqual(strategy_base._pending_results, 0)
+ self.assertNotIn('test01', strategy_base._blocked_hosts)
+
+ task_result = TaskResult(host=mock_host.name, task=mock_task._uuid, return_data='{"failed":true}')
+ queue_items.append(task_result)
+ strategy_base._blocked_hosts['test01'] = True
+ strategy_base._pending_results = 1
+ mock_iterator.is_failed.return_value = True
+ strategy_base._queued_task_cache = mock_queued_task_cache()
+ results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 1)
+ self.assertEqual(results[0], task_result)
+ self.assertEqual(strategy_base._pending_results, 0)
+ self.assertNotIn('test01', strategy_base._blocked_hosts)
+ # self.assertIn('test01', mock_tqm._failed_hosts)
+ # del mock_tqm._failed_hosts['test01']
+ mock_iterator.is_failed.return_value = False
+
+ task_result = TaskResult(host=mock_host.name, task=mock_task._uuid, return_data='{"unreachable": true}')
+ queue_items.append(task_result)
+ strategy_base._blocked_hosts['test01'] = True
+ strategy_base._pending_results = 1
+ strategy_base._queued_task_cache = mock_queued_task_cache()
+ results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 1)
+ self.assertEqual(results[0], task_result)
+ self.assertEqual(strategy_base._pending_results, 0)
+ self.assertNotIn('test01', strategy_base._blocked_hosts)
+ self.assertIn('test01', mock_tqm._unreachable_hosts)
+ del mock_tqm._unreachable_hosts['test01']
+
+ task_result = TaskResult(host=mock_host.name, task=mock_task._uuid, return_data='{"skipped": true}')
+ queue_items.append(task_result)
+ strategy_base._blocked_hosts['test01'] = True
+ strategy_base._pending_results = 1
+ strategy_base._queued_task_cache = mock_queued_task_cache()
+ results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 1)
+ self.assertEqual(results[0], task_result)
+ self.assertEqual(strategy_base._pending_results, 0)
+ self.assertNotIn('test01', strategy_base._blocked_hosts)
+
+ queue_items.append(TaskResult(host=mock_host.name, task=mock_task._uuid, return_data=dict(add_host=dict(host_name='newhost01', new_groups=['foo']))))
+ strategy_base._blocked_hosts['test01'] = True
+ strategy_base._pending_results = 1
+ strategy_base._queued_task_cache = mock_queued_task_cache()
+ results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 1)
+ self.assertEqual(strategy_base._pending_results, 0)
+ self.assertNotIn('test01', strategy_base._blocked_hosts)
+
+ queue_items.append(TaskResult(host=mock_host.name, task=mock_task._uuid, return_data=dict(add_group=dict(group_name='foo'))))
+ strategy_base._blocked_hosts['test01'] = True
+ strategy_base._pending_results = 1
+ strategy_base._queued_task_cache = mock_queued_task_cache()
+ results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 1)
+ self.assertEqual(strategy_base._pending_results, 0)
+ self.assertNotIn('test01', strategy_base._blocked_hosts)
+
+ queue_items.append(TaskResult(host=mock_host.name, task=mock_task._uuid, return_data=dict(changed=True, _ansible_notify=['test handler'])))
+ strategy_base._blocked_hosts['test01'] = True
+ strategy_base._pending_results = 1
+ strategy_base._queued_task_cache = mock_queued_task_cache()
+ results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 1)
+ self.assertEqual(strategy_base._pending_results, 0)
+ self.assertNotIn('test01', strategy_base._blocked_hosts)
+ self.assertEqual(mock_iterator._play.handlers[0].block[0], mock_handler_task)
+
+ # queue_items.append(('set_host_var', mock_host, mock_task, None, 'foo', 'bar'))
+ # results = strategy_base._process_pending_results(iterator=mock_iterator)
+ # self.assertEqual(len(results), 0)
+ # self.assertEqual(strategy_base._pending_results, 1)
+
+ # queue_items.append(('set_host_facts', mock_host, mock_task, None, 'foo', dict()))
+ # results = strategy_base._process_pending_results(iterator=mock_iterator)
+ # self.assertEqual(len(results), 0)
+ # self.assertEqual(strategy_base._pending_results, 1)
+
+ # queue_items.append(('bad'))
+ # self.assertRaises(AnsibleError, strategy_base._process_pending_results, iterator=mock_iterator)
+ strategy_base.cleanup()
+
+ def test_strategy_base_load_included_file(self):
+ fake_loader = DictDataLoader({
+ "test.yml": """
+ - debug: msg='foo'
+ """,
+ "bad.yml": """
+ """,
+ })
+
+ queue_items = []
+
+ def _queue_empty(*args, **kwargs):
+ return len(queue_items) == 0
+
+ def _queue_get(*args, **kwargs):
+ if len(queue_items) == 0:
+ raise Queue.Empty
+ else:
+ return queue_items.pop()
+
+ def _queue_put(item, *args, **kwargs):
+ queue_items.append(item)
+
+ mock_queue = MagicMock()
+ mock_queue.empty.side_effect = _queue_empty
+ mock_queue.get.side_effect = _queue_get
+ mock_queue.put.side_effect = _queue_put
+
+ mock_tqm = MagicMock()
+ mock_tqm._final_q = mock_queue
+
+ strategy_base = StrategyBase(tqm=mock_tqm)
+ strategy_base._loader = fake_loader
+ strategy_base.cleanup()
+
+ mock_play = MagicMock()
+
+ mock_block = MagicMock()
+ mock_block._play = mock_play
+ mock_block.vars = dict()
+
+ mock_task = MagicMock()
+ mock_task._block = mock_block
+ mock_task._role = None
+
+ # NOTE Mocking calls below to account for passing parent_block=ti_copy.build_parent_block()
+ # into load_list_of_blocks() in _load_included_file. Not doing so meant that retrieving
+ # `collection` attr from parent would result in getting MagicMock instance
+ # instead of an empty list.
+ mock_task._parent = MagicMock()
+ mock_task.copy.return_value = mock_task
+ mock_task.build_parent_block.return_value = mock_block
+ mock_block._get_parent_attribute.return_value = None
+
+ mock_iterator = MagicMock()
+ mock_iterator.mark_host_failed.return_value = None
+
+ mock_inc_file = MagicMock()
+ mock_inc_file._task = mock_task
+
+ mock_inc_file._filename = "test.yml"
+ res = strategy_base._load_included_file(included_file=mock_inc_file, iterator=mock_iterator)
+ self.assertEqual(len(res), 1)
+ self.assertTrue(isinstance(res[0], Block))
+
+ mock_inc_file._filename = "bad.yml"
+ res = strategy_base._load_included_file(included_file=mock_inc_file, iterator=mock_iterator)
+ self.assertEqual(res, [])
diff --git a/test/units/plugins/test_plugins.py b/test/units/plugins/test_plugins.py
index ba2ad2b6..be123b15 100644
--- a/test/units/plugins/test_plugins.py
+++ b/test/units/plugins/test_plugins.py
@@ -46,14 +46,14 @@ class TestErrors(unittest.TestCase):
# python library, and then uses the __file__ attribute of
# the result for that to get the library path, so we mock
# that here and patch the builtin to use our mocked result
- foo_pkg = MagicMock()
- bar_pkg = MagicMock()
+ foo = MagicMock()
+ bar = MagicMock()
bam = MagicMock()
bam.__file__ = '/path/to/my/foo/bar/bam/__init__.py'
- bar_pkg.bam = bam
- foo_pkg.return_value.bar = bar_pkg
+ bar.bam = bam
+ foo.return_value.bar = bar
pl = PluginLoader('test', 'foo.bar.bam', 'test', 'test_plugin')
- with patch('builtins.__import__', foo_pkg):
+ with patch('builtins.__import__', foo):
self.assertEqual(pl._get_package_paths(), ['/path/to/my/foo/bar/bam'])
def test_plugins__get_paths(self):
diff --git a/test/units/requirements.txt b/test/units/requirements.txt
index c77c55cd..1822adaa 100644
--- a/test/units/requirements.txt
+++ b/test/units/requirements.txt
@@ -1,4 +1,4 @@
-bcrypt ; python_version >= '3.10' # controller only
-passlib ; python_version >= '3.10' # controller only
-pexpect ; python_version >= '3.10' # controller only
-pywinrm ; python_version >= '3.10' # controller only
+bcrypt ; python_version >= '3.9' # controller only
+passlib ; python_version >= '3.9' # controller only
+pexpect ; python_version >= '3.9' # controller only
+pywinrm ; python_version >= '3.9' # controller only
diff --git a/test/units/template/test_templar.py b/test/units/template/test_templar.py
index 02840e16..6747f768 100644
--- a/test/units/template/test_templar.py
+++ b/test/units/template/test_templar.py
@@ -22,10 +22,11 @@ __metaclass__ = type
from jinja2.runtime import Context
from units.compat import unittest
+from unittest.mock import patch
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleUndefinedVariable
-from ansible.plugins.loader import init_plugin_loader
+from ansible.module_utils.six import string_types
from ansible.template import Templar, AnsibleContext, AnsibleEnvironment, AnsibleUndefined
from ansible.utils.unsafe_proxy import AnsibleUnsafe, wrap_var
from units.mock.loader import DictDataLoader
@@ -33,7 +34,6 @@ from units.mock.loader import DictDataLoader
class BaseTemplar(object):
def setUp(self):
- init_plugin_loader()
self.test_vars = dict(
foo="bar",
bam="{{foo}}",
@@ -62,6 +62,14 @@ class BaseTemplar(object):
return self._ansible_context._is_unsafe(obj)
+# class used for testing arbitrary objects passed to template
+class SomeClass(object):
+ foo = 'bar'
+
+ def __init__(self):
+ self.blip = 'blip'
+
+
class SomeUnsafeClass(AnsibleUnsafe):
def __init__(self):
super(SomeUnsafeClass, self).__init__()
@@ -258,6 +266,8 @@ class TestTemplarMisc(BaseTemplar, unittest.TestCase):
templar.available_variables = "foo=bam"
except AssertionError:
pass
+ except Exception as e:
+ self.fail(e)
def test_templar_escape_backslashes(self):
# Rule of thumb: If escape backslashes is True you should end up with
diff --git a/test/units/template/test_vars.py b/test/units/template/test_vars.py
index f43cfac4..514104f2 100644
--- a/test/units/template/test_vars.py
+++ b/test/units/template/test_vars.py
@@ -19,16 +19,23 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from ansible.template import Templar
+from units.compat import unittest
+from unittest.mock import MagicMock
+
from ansible.template.vars import AnsibleJ2Vars
-def test_globals_empty():
- assert isinstance(dict(AnsibleJ2Vars(Templar(None), {})), dict)
+class TestVars(unittest.TestCase):
+ def setUp(self):
+ self.mock_templar = MagicMock(name='mock_templar')
+ def test_globals_empty(self):
+ ajvars = AnsibleJ2Vars(self.mock_templar, {})
+ res = dict(ajvars)
+ self.assertIsInstance(res, dict)
-def test_globals():
- res = dict(AnsibleJ2Vars(Templar(None), {'foo': 'bar', 'blip': [1, 2, 3]}))
- assert isinstance(res, dict)
- assert 'foo' in res
- assert res['foo'] == 'bar'
+ def test_globals(self):
+ res = dict(AnsibleJ2Vars(self.mock_templar, {'foo': 'bar', 'blip': [1, 2, 3]}))
+ self.assertIsInstance(res, dict)
+ self.assertIn('foo', res)
+ self.assertEqual(res['foo'], 'bar')
diff --git a/test/units/test_constants.py b/test/units/test_constants.py
new file mode 100644
index 00000000..a206d231
--- /dev/null
+++ b/test/units/test_constants.py
@@ -0,0 +1,94 @@
+# -*- coding: utf-8 -*-
+# (c) 2017 Toshio Kuratomi <tkuratomi@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pwd
+import os
+
+import pytest
+
+from ansible import constants
+from ansible.module_utils.six import StringIO
+from ansible.module_utils.six.moves import configparser
+from ansible.module_utils._text import to_text
+
+
+@pytest.fixture
+def cfgparser():
+ CFGDATA = StringIO("""
+[defaults]
+defaults_one = 'data_defaults_one'
+
+[level1]
+level1_one = 'data_level1_one'
+ """)
+ p = configparser.ConfigParser()
+ p.readfp(CFGDATA)
+ return p
+
+
+@pytest.fixture
+def user():
+ user = {}
+ user['uid'] = os.geteuid()
+
+ pwd_entry = pwd.getpwuid(user['uid'])
+ user['username'] = pwd_entry.pw_name
+ user['home'] = pwd_entry.pw_dir
+
+ return user
+
+
+@pytest.fixture
+def cfg_file():
+ data = '/ansible/test/cfg/path'
+ old_cfg_file = constants.CONFIG_FILE
+ constants.CONFIG_FILE = os.path.join(data, 'ansible.cfg')
+ yield data
+
+ constants.CONFIG_FILE = old_cfg_file
+
+
+@pytest.fixture
+def null_cfg_file():
+ old_cfg_file = constants.CONFIG_FILE
+ del constants.CONFIG_FILE
+ yield
+
+ constants.CONFIG_FILE = old_cfg_file
+
+
+@pytest.fixture
+def cwd():
+ data = '/ansible/test/cwd/'
+ old_cwd = os.getcwd
+ os.getcwd = lambda: data
+
+ old_cwdu = None
+ if hasattr(os, 'getcwdu'):
+ old_cwdu = os.getcwdu
+ os.getcwdu = lambda: to_text(data)
+
+ yield data
+
+ os.getcwd = old_cwd
+ if hasattr(os, 'getcwdu'):
+ os.getcwdu = old_cwdu
diff --git a/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py b/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py
index a85f422a..9d30580f 100644
--- a/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py
+++ b/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py
@@ -1,7 +1,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from ..module_utils.my_util import question # pylint: disable=unused-import
+from ..module_utils.my_util import question
def action_code():
diff --git a/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_other_util.py b/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_other_util.py
index 463b1334..35e1381b 100644
--- a/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_other_util.py
+++ b/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_other_util.py
@@ -1,4 +1,4 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from .my_util import question # pylint: disable=unused-import
+from .my_util import question
diff --git a/test/units/utils/collection_loader/test_collection_loader.py b/test/units/utils/collection_loader/test_collection_loader.py
index feaaf97a..f7050dcd 100644
--- a/test/units/utils/collection_loader/test_collection_loader.py
+++ b/test/units/utils/collection_loader/test_collection_loader.py
@@ -13,7 +13,7 @@ from ansible.modules import ping as ping_module
from ansible.utils.collection_loader import AnsibleCollectionConfig, AnsibleCollectionRef
from ansible.utils.collection_loader._collection_finder import (
_AnsibleCollectionFinder, _AnsibleCollectionLoader, _AnsibleCollectionNSPkgLoader, _AnsibleCollectionPkgLoader,
- _AnsibleCollectionPkgLoaderBase, _AnsibleCollectionRootPkgLoader, _AnsibleNSTraversable, _AnsiblePathHookFinder,
+ _AnsibleCollectionPkgLoaderBase, _AnsibleCollectionRootPkgLoader, _AnsiblePathHookFinder,
_get_collection_name_from_path, _get_collection_role_path, _get_collection_metadata, _iter_modules_impl
)
from ansible.utils.collection_loader._collection_config import _EventSource
@@ -29,16 +29,8 @@ def teardown(*args, **kwargs):
# BEGIN STANDALONE TESTS - these exercise behaviors of the individual components without the import machinery
-@pytest.mark.filterwarnings(
- 'ignore:'
- r'find_module\(\) is deprecated and slated for removal in Python 3\.12; use find_spec\(\) instead'
- ':DeprecationWarning',
- 'ignore:'
- r'FileFinder\.find_loader\(\) is deprecated and slated for removal in Python 3\.12; use find_spec\(\) instead'
- ':DeprecationWarning',
-)
-@pytest.mark.skipif(not PY3 or sys.version_info >= (3, 12), reason='Testing Python 2 codepath (find_module) on Python 3, <= 3.11')
-def test_find_module_py3_lt_312():
+@pytest.mark.skipif(not PY3, reason='Testing Python 2 codepath (find_module) on Python 3')
+def test_find_module_py3():
dir_to_a_file = os.path.dirname(ping_module.__file__)
path_hook_finder = _AnsiblePathHookFinder(_AnsibleCollectionFinder(), dir_to_a_file)
@@ -48,16 +40,6 @@ def test_find_module_py3_lt_312():
assert path_hook_finder.find_module('missing') is None
-@pytest.mark.skipif(sys.version_info < (3, 12), reason='Testing Python 2 codepath (find_module) on Python >= 3.12')
-def test_find_module_py3_gt_311():
- dir_to_a_file = os.path.dirname(ping_module.__file__)
- path_hook_finder = _AnsiblePathHookFinder(_AnsibleCollectionFinder(), dir_to_a_file)
-
- # setuptools may fall back to find_module on Python 3 if find_spec returns None
- # see https://github.com/pypa/setuptools/pull/2918
- assert path_hook_finder.find_spec('missing') is None
-
-
def test_finder_setup():
# ensure scalar path is listified
f = _AnsibleCollectionFinder(paths='/bogus/bogus')
@@ -846,53 +828,6 @@ def test_collectionref_components_invalid(name, subdirs, resource, ref_type, exp
assert re.search(expected_error_expression, str(curerr.value))
-@pytest.mark.skipif(not PY3, reason='importlib.resources only supported for py3')
-def test_importlib_resources():
- if sys.version_info < (3, 10):
- from importlib_resources import files
- else:
- from importlib.resources import files
- from pathlib import Path
-
- f = get_default_finder()
- reset_collections_loader_state(f)
-
- ansible_collections_ns = files('ansible_collections')
- ansible_ns = files('ansible_collections.ansible')
- testns = files('ansible_collections.testns')
- testcoll = files('ansible_collections.testns.testcoll')
- testcoll2 = files('ansible_collections.testns.testcoll2')
- module_utils = files('ansible_collections.testns.testcoll.plugins.module_utils')
-
- assert isinstance(ansible_collections_ns, _AnsibleNSTraversable)
- assert isinstance(ansible_ns, _AnsibleNSTraversable)
- assert isinstance(testcoll, Path)
- assert isinstance(module_utils, Path)
-
- assert ansible_collections_ns.is_dir()
- assert ansible_ns.is_dir()
- assert testcoll.is_dir()
- assert module_utils.is_dir()
-
- first_path = Path(default_test_collection_paths[0])
- second_path = Path(default_test_collection_paths[1])
- testns_paths = []
- ansible_ns_paths = []
- for path in default_test_collection_paths[:2]:
- ansible_ns_paths.append(Path(path) / 'ansible_collections' / 'ansible')
- testns_paths.append(Path(path) / 'ansible_collections' / 'testns')
-
- assert testns._paths == testns_paths
- # NOTE: The next two asserts check for subsets to accommodate running the unit tests when externally installed collections are available.
- assert set(ansible_ns_paths).issubset(ansible_ns._paths)
- assert set(Path(p) / 'ansible_collections' for p in default_test_collection_paths[:2]).issubset(ansible_collections_ns._paths)
- assert testcoll2 == second_path / 'ansible_collections' / 'testns' / 'testcoll2'
-
- assert {p.name for p in module_utils.glob('*.py')} == {'__init__.py', 'my_other_util.py', 'my_util.py'}
- nestcoll_mu_init = first_path / 'ansible_collections' / 'testns' / 'testcoll' / 'plugins' / 'module_utils' / '__init__.py'
- assert next(module_utils.glob('__init__.py')) == nestcoll_mu_init
-
-
# BEGIN TEST SUPPORT
default_test_collection_paths = [
diff --git a/test/units/utils/display/test_broken_cowsay.py b/test/units/utils/display/test_broken_cowsay.py
index 96157e1a..d888010a 100644
--- a/test/units/utils/display/test_broken_cowsay.py
+++ b/test/units/utils/display/test_broken_cowsay.py
@@ -12,13 +12,16 @@ from unittest.mock import MagicMock
def test_display_with_fake_cowsay_binary(capsys, mocker):
- display = Display()
-
mocker.patch("ansible.constants.ANSIBLE_COW_PATH", "./cowsay.sh")
+ def mock_communicate(input=None, timeout=None):
+ return b"", b""
+
mock_popen = MagicMock()
+ mock_popen.return_value.communicate = mock_communicate
mock_popen.return_value.returncode = 1
mocker.patch("subprocess.Popen", mock_popen)
+ display = Display()
assert not hasattr(display, "cows_available")
assert display.b_cowsay is None
diff --git a/test/units/utils/test_cleanup_tmp_file.py b/test/units/utils/test_cleanup_tmp_file.py
index 35374f4d..2a44a55b 100644
--- a/test/units/utils/test_cleanup_tmp_file.py
+++ b/test/units/utils/test_cleanup_tmp_file.py
@@ -6,11 +6,16 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
+import pytest
import tempfile
from ansible.utils.path import cleanup_tmp_file
+def raise_error():
+ raise OSError
+
+
def test_cleanup_tmp_file_file():
tmp_fd, tmp = tempfile.mkstemp()
cleanup_tmp_file(tmp)
@@ -29,21 +34,15 @@ def test_cleanup_tmp_file_nonexistant():
assert None is cleanup_tmp_file('nope')
-def test_cleanup_tmp_file_failure(mocker, capsys):
+def test_cleanup_tmp_file_failure(mocker):
tmp = tempfile.mkdtemp()
- rmtree = mocker.patch('shutil.rmtree', side_effect=OSError('test induced failure'))
- cleanup_tmp_file(tmp)
- out, err = capsys.readouterr()
- assert out == ''
- assert err == ''
- rmtree.assert_called_once()
+ with pytest.raises(Exception):
+ mocker.patch('shutil.rmtree', side_effect=raise_error())
+ cleanup_tmp_file(tmp)
def test_cleanup_tmp_file_failure_warning(mocker, capsys):
tmp = tempfile.mkdtemp()
- rmtree = mocker.patch('shutil.rmtree', side_effect=OSError('test induced failure'))
- cleanup_tmp_file(tmp, warn=True)
- out, err = capsys.readouterr()
- assert out == 'Unable to remove temporary file test induced failure\n'
- assert err == ''
- rmtree.assert_called_once()
+ with pytest.raises(Exception):
+ mocker.patch('shutil.rmtree', side_effect=raise_error())
+ cleanup_tmp_file(tmp, warn=True)
diff --git a/test/units/utils/test_display.py b/test/units/utils/test_display.py
index 80b7a099..6b1914bb 100644
--- a/test/units/utils/test_display.py
+++ b/test/units/utils/test_display.py
@@ -18,14 +18,16 @@ from ansible.utils.multiprocessing import context as multiprocessing_context
@pytest.fixture
def problematic_wcswidth_chars():
- locale.setlocale(locale.LC_ALL, 'C.UTF-8')
+ problematic = []
+ try:
+ locale.setlocale(locale.LC_ALL, 'C.UTF-8')
+ except Exception:
+ return problematic
candidates = set(chr(c) for c in range(sys.maxunicode) if unicodedata.category(chr(c)) == 'Cf')
- problematic = [candidate for candidate in candidates if _LIBC.wcswidth(candidate, _MAX_INT) == -1]
-
- if not problematic:
- # Newer distributions (Ubuntu 22.04, Fedora 38) include a libc which does not report problematic characters.
- pytest.skip("no problematic wcswidth chars found") # pragma: nocover
+ for c in candidates:
+ if _LIBC.wcswidth(c, _MAX_INT) == -1:
+ problematic.append(c)
return problematic
@@ -52,6 +54,9 @@ def test_get_text_width():
def test_get_text_width_no_locale(problematic_wcswidth_chars):
+ if not problematic_wcswidth_chars:
+ pytest.skip("No problmatic wcswidth chars")
+ locale.setlocale(locale.LC_ALL, 'C.UTF-8')
pytest.raises(EnvironmentError, get_text_width, problematic_wcswidth_chars[0])
@@ -103,21 +108,9 @@ def test_Display_display_fork():
display = Display()
display.set_queue(queue)
display.display('foo')
- queue.send_display.assert_called_once_with('display', 'foo')
-
- p = multiprocessing_context.Process(target=test)
- p.start()
- p.join()
- assert p.exitcode == 0
-
-
-def test_Display_display_warn_fork():
- def test():
- queue = MagicMock()
- display = Display()
- display.set_queue(queue)
- display.warning('foo')
- queue.send_display.assert_called_once_with('warning', 'foo')
+ queue.send_display.assert_called_once_with(
+ 'foo', color=None, stderr=False, screen_only=False, log_only=False, newline=True
+ )
p = multiprocessing_context.Process(target=test)
p.start()
diff --git a/test/units/utils/test_encrypt.py b/test/units/utils/test_encrypt.py
index be325790..72fe3b07 100644
--- a/test/units/utils/test_encrypt.py
+++ b/test/units/utils/test_encrypt.py
@@ -27,26 +27,17 @@ class passlib_off(object):
def assert_hash(expected, secret, algorithm, **settings):
- assert encrypt.do_encrypt(secret, algorithm, **settings) == expected
if encrypt.PASSLIB_AVAILABLE:
+ assert encrypt.passlib_or_crypt(secret, algorithm, **settings) == expected
assert encrypt.PasslibHash(algorithm).hash(secret, **settings) == expected
else:
+ assert encrypt.passlib_or_crypt(secret, algorithm, **settings) == expected
with pytest.raises(AnsibleError) as excinfo:
encrypt.PasslibHash(algorithm).hash(secret, **settings)
assert excinfo.value.args[0] == "passlib must be installed and usable to hash with '%s'" % algorithm
@pytest.mark.skipif(sys.platform.startswith('darwin'), reason='macOS requires passlib')
-def test_passlib_or_crypt():
- with passlib_off():
- expected = "$5$rounds=5000$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7"
- assert encrypt.passlib_or_crypt("123", "sha256_crypt", salt="12345678", rounds=5000) == expected
-
- expected = "$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7"
- assert encrypt.passlib_or_crypt("123", "sha256_crypt", salt="12345678", rounds=5000) == expected
-
-
-@pytest.mark.skipif(sys.platform.startswith('darwin'), reason='macOS requires passlib')
def test_encrypt_with_rounds_no_passlib():
with passlib_off():
assert_hash("$5$rounds=5000$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7",
diff --git a/test/units/utils/test_unsafe_proxy.py b/test/units/utils/test_unsafe_proxy.py
index 55f1b6dd..ea653cfe 100644
--- a/test/units/utils/test_unsafe_proxy.py
+++ b/test/units/utils/test_unsafe_proxy.py
@@ -5,9 +5,7 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
-import pathlib
-import sys
-
+from ansible.module_utils.six import PY3
from ansible.utils.unsafe_proxy import AnsibleUnsafe, AnsibleUnsafeBytes, AnsibleUnsafeText, wrap_var
from ansible.module_utils.common.text.converters import to_text, to_bytes
@@ -21,7 +19,10 @@ def test_wrap_var_bytes():
def test_wrap_var_string():
- assert isinstance(wrap_var('foo'), AnsibleUnsafeText)
+ if PY3:
+ assert isinstance(wrap_var('foo'), AnsibleUnsafeText)
+ else:
+ assert isinstance(wrap_var('foo'), AnsibleUnsafeBytes)
def test_wrap_var_dict():
@@ -94,12 +95,12 @@ def test_wrap_var_no_ref():
'text': 'text',
}
wrapped_thing = wrap_var(thing)
- assert thing is not wrapped_thing
- assert thing['foo'] is not wrapped_thing['foo']
- assert thing['bar'][0] is not wrapped_thing['bar'][0]
- assert thing['baz'][0] is not wrapped_thing['baz'][0]
- assert thing['none'] is wrapped_thing['none']
- assert thing['text'] is not wrapped_thing['text']
+ thing is not wrapped_thing
+ thing['foo'] is not wrapped_thing['foo']
+ thing['bar'][0] is not wrapped_thing['bar'][0]
+ thing['baz'][0] is not wrapped_thing['baz'][0]
+ thing['none'] is not wrapped_thing['none']
+ thing['text'] is not wrapped_thing['text']
def test_AnsibleUnsafeText():
@@ -118,10 +119,3 @@ def test_to_text_unsafe():
def test_to_bytes_unsafe():
assert isinstance(to_bytes(AnsibleUnsafeText(u'foo')), AnsibleUnsafeBytes)
assert to_bytes(AnsibleUnsafeText(u'foo')) == AnsibleUnsafeBytes(b'foo')
-
-
-def test_unsafe_with_sys_intern():
- # Specifically this is actually about sys.intern, test of pathlib
- # because that is a specific affected use
- assert sys.intern(AnsibleUnsafeText('foo')) == 'foo'
- assert pathlib.Path(AnsibleUnsafeText('/tmp')) == pathlib.Path('/tmp')
diff --git a/test/units/vars/test_module_response_deepcopy.py b/test/units/vars/test_module_response_deepcopy.py
index 3313dea1..78f9de0e 100644
--- a/test/units/vars/test_module_response_deepcopy.py
+++ b/test/units/vars/test_module_response_deepcopy.py
@@ -7,6 +7,8 @@ __metaclass__ = type
from ansible.vars.clean import module_response_deepcopy
+import pytest
+
def test_module_response_deepcopy_basic():
x = 42
@@ -35,6 +37,15 @@ def test_module_response_deepcopy_empty_tuple():
assert x is y
+@pytest.mark.skip(reason='No current support for this situation')
+def test_module_response_deepcopy_tuple():
+ x = ([1, 2], 3)
+ y = module_response_deepcopy(x)
+ assert y == x
+ assert x is not y
+ assert x[0] is not y[0]
+
+
def test_module_response_deepcopy_tuple_of_immutables():
x = ((1, 2), 3)
y = module_response_deepcopy(x)
diff --git a/test/units/vars/test_variable_manager.py b/test/units/vars/test_variable_manager.py
index ee6de817..67ec120b 100644
--- a/test/units/vars/test_variable_manager.py
+++ b/test/units/vars/test_variable_manager.py
@@ -141,8 +141,10 @@ class TestVariableManager(unittest.TestCase):
return
# pylint: disable=unreachable
- # Tests complex variations and combinations of get_vars() with different
- # objects to modify the context under which variables are merged.
+ '''
+ Tests complex variations and combinations of get_vars() with different
+ objects to modify the context under which variables are merged.
+ '''
# FIXME: BCS makethiswork
# return True