summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorLee Garrett <lgarrett@rocketjump.eu>2022-06-13 23:13:57 +0200
committerLee Garrett <lgarrett@rocketjump.eu>2022-06-13 23:13:57 +0200
commitdf2a2cd18c338647061f3448248f8b97b6971f49 (patch)
treef223b6b9084be551de18fdb4fe0d596c68a9cebc /test
parent71ed02a1e802462d5d9b5f7e0fad42307a175278 (diff)
downloaddebian-ansible-core-df2a2cd18c338647061f3448248f8b97b6971f49.zip
New upstream version 2.13.0
Diffstat (limited to 'test')
-rw-r--r--test/integration/targets/add_host/tasks/main.yml17
-rw-r--r--test/integration/targets/ansiballz_python/aliases1
-rw-r--r--test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/MANIFEST.json30
-rw-r--r--test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/cache/notjsonfile.py70
-rw-r--r--test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/inventory/statichost.py36
-rw-r--r--test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/lookup/noop.py45
-rw-r--r--test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/modules/fakemodule.py28
-rw-r--r--test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/modules/notrealmodule.py (renamed from test/lib/ansible_test/_util/controller/sanity/validate-modules/main.py)8
-rw-r--r--test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py96
-rw-r--r--test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/vars/noop_vars_plugin.py30
-rw-r--r--test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/lookup/noop.py3
-rwxr-xr-xtest/integration/targets/ansible-doc/runme.sh37
-rw-r--r--test/integration/targets/ansible-galaxy-collection-scm/tasks/main.yml2
-rw-r--r--test/integration/targets/ansible-galaxy-collection-scm/tasks/setup_collection_bad_version.yml47
-rw-r--r--test/integration/targets/ansible-galaxy-collection-scm/tasks/test_invalid_version.yml58
-rw-r--r--test/integration/targets/ansible-galaxy-collection-scm/vars/main.yml1
-rw-r--r--test/integration/targets/ansible-galaxy-collection/library/setup_collections.py61
-rw-r--r--test/integration/targets/ansible-galaxy-collection/meta/main.yml1
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/install.yml435
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/main.yml4
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/revoke_gpg_key.yml14
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/setup_gpg.yml24
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/verify.yml234
-rw-r--r--test/integration/targets/ansible-galaxy-collection/vars/main.yml2
-rw-r--r--test/integration/targets/ansible-galaxy/cleanup-default.yml10
-rw-r--r--test/integration/targets/ansible-galaxy/cleanup-freebsd.yml8
-rw-r--r--test/integration/targets/ansible-galaxy/cleanup.yml7
-rw-r--r--test/integration/targets/ansible-galaxy/files/testserver.py20
-rwxr-xr-xtest/integration/targets/ansible-galaxy/runme.sh87
-rw-r--r--test/integration/targets/ansible-galaxy/setup.yml54
-rw-r--r--test/integration/targets/ansible-test-cloud-aws/aliases3
-rw-r--r--test/integration/targets/ansible-test-cloud-aws/tasks/main.yml17
-rw-r--r--test/integration/targets/ansible-test-cloud-azure/aliases3
-rw-r--r--test/integration/targets/ansible-test-cloud-azure/tasks/main.yml18
-rw-r--r--test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/module_utils/PSUtil.psm12
-rw-r--r--test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/modules/win_util_args.ps12
-rwxr-xr-xtest/integration/targets/ansible-test/collection-tests/unsupported-directory.sh9
-rwxr-xr-xtest/integration/targets/ansible-test/collection-tests/venv-pythons.py5
-rwxr-xr-xtest/integration/targets/ansible-vault/runme.sh29
-rw-r--r--test/integration/targets/ansible-vault/single_vault_as_string.yml6
-rw-r--r--test/integration/targets/any_errors_fatal/50897.yml19
-rwxr-xr-xtest/integration/targets/any_errors_fatal/runme.sh20
-rw-r--r--test/integration/targets/apt/aliases1
-rw-r--r--test/integration/targets/apt/defaults/main.yml1
-rw-r--r--test/integration/targets/apt/handlers/main.yml4
-rw-r--r--test/integration/targets/apt/tasks/apt.yml76
-rw-r--r--test/integration/targets/apt/tasks/repo.yml68
-rw-r--r--test/integration/targets/apt/tasks/url-with-deps.yml6
-rw-r--r--test/integration/targets/apt_key/aliases1
-rw-r--r--test/integration/targets/apt_key/tasks/apt_key_binary.yml2
-rw-r--r--test/integration/targets/apt_key/tasks/apt_key_inline_data.yml2
-rw-r--r--test/integration/targets/apt_key/tasks/file.yml4
-rw-r--r--test/integration/targets/apt_key/tasks/main.yml8
-rw-r--r--test/integration/targets/apt_repository/aliases1
-rw-r--r--test/integration/targets/assemble/meta/main.yml1
-rw-r--r--test/integration/targets/assemble/tasks/main.yml37
-rw-r--r--test/integration/targets/async/aliases1
-rw-r--r--test/integration/targets/become/aliases1
-rw-r--r--test/integration/targets/binary/meta/main.yml1
-rw-r--r--test/integration/targets/binary/tasks/main.yml36
-rw-r--r--test/integration/targets/binary_modules/Makefile3
-rw-r--r--test/integration/targets/binary_modules/download_binary_modules.yml2
-rw-r--r--test/integration/targets/callback_default/callback_default.out.result_format_yaml.stderr2
-rw-r--r--test/integration/targets/callback_default/callback_default.out.result_format_yaml.stdout97
-rw-r--r--test/integration/targets/callback_default/callback_default.out.result_format_yaml_lossy_verbose.stderr2
-rw-r--r--test/integration/targets/callback_default/callback_default.out.result_format_yaml_lossy_verbose.stdout282
-rw-r--r--test/integration/targets/callback_default/callback_default.out.result_format_yaml_verbose.stderr2
-rw-r--r--test/integration/targets/callback_default/callback_default.out.result_format_yaml_verbose.stdout294
-rw-r--r--test/integration/targets/callback_default/callback_default.out.yaml_result_format_yaml_verbose.stderr2
-rw-r--r--test/integration/targets/callback_default/callback_default.out.yaml_result_format_yaml_verbose.stdout29
-rwxr-xr-xtest/integration/targets/callback_default/runme.sh48
-rw-r--r--test/integration/targets/callback_default/test_yaml.yml19
-rw-r--r--test/integration/targets/changed_when/tasks/main.yml38
-rw-r--r--test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/win_selfcontained.ps16
-rw-r--r--test/integration/targets/collections/test_bypass_host_loop.yml3
-rw-r--r--test/integration/targets/collections/test_collection_meta.yml2
-rw-r--r--test/integration/targets/command_shell/aliases1
-rw-r--r--test/integration/targets/command_shell/meta/main.yml1
-rw-r--r--test/integration/targets/command_shell/tasks/main.yml157
-rw-r--r--test/integration/targets/connection_local/aliases1
-rwxr-xr-x[l---------]test/integration/targets/connection_local/runme.sh15
-rw-r--r--test/integration/targets/connection_paramiko_ssh/aliases1
-rwxr-xr-x[l---------]test/integration/targets/connection_paramiko_ssh/test.sh15
-rw-r--r--test/integration/targets/connection_posix/aliases2
-rwxr-xr-xtest/integration/targets/connection_posix/test.sh18
-rw-r--r--test/integration/targets/connection_ssh/aliases1
-rwxr-xr-x[l---------]test/integration/targets/connection_ssh/posix.sh15
-rw-r--r--test/integration/targets/connection_windows_ssh/aliases1
-rw-r--r--test/integration/targets/connection_winrm/aliases1
-rwxr-xr-xtest/integration/targets/connection_winrm/runme.sh5
-rw-r--r--test/integration/targets/connection_winrm/tests.yml28
-rw-r--r--test/integration/targets/copy/aliases1
-rw-r--r--test/integration/targets/copy/files-different/vault/folder/nested-vault-file (renamed from test/integration/targets/incidental_win_copy/files-different/vault/folder/nested-vault-file)0
-rw-r--r--test/integration/targets/copy/files-different/vault/readme.txt (renamed from test/integration/targets/incidental_win_copy/files-different/vault/readme.txt)2
-rw-r--r--test/integration/targets/copy/files-different/vault/vault-file (renamed from test/integration/targets/incidental_win_copy/files-different/vault/vault-file)0
l---------test/integration/targets/copy/files/subdir/subdir1/bar.txt1
-rw-r--r--test/integration/targets/copy/files/subdir/subdir1/empty.txt (renamed from test/integration/targets/incidental_win_copy/files/empty.txt)0
-rw-r--r--test/integration/targets/copy/meta/main.yml1
-rw-r--r--test/integration/targets/copy/tasks/main.yml3
-rw-r--r--test/integration/targets/copy/tasks/tests.yml26
-rw-r--r--test/integration/targets/cron/aliases1
-rw-r--r--test/integration/targets/debconf/aliases1
-rw-r--r--test/integration/targets/debugger/aliases3
-rw-r--r--test/integration/targets/debugger/inventory2
-rwxr-xr-xtest/integration/targets/debugger/runme.sh5
-rwxr-xr-xtest/integration/targets/debugger/test_run_once.py35
-rw-r--r--test/integration/targets/debugger/test_run_once_playbook.yml12
-rw-r--r--test/integration/targets/delegate_to/delegate_facts_loop.yml2
-rw-r--r--test/integration/targets/dnf/aliases1
-rw-r--r--test/integration/targets/dnf/tasks/dnf.yml4
-rw-r--r--test/integration/targets/dnf/tasks/filters.yml2
-rw-r--r--test/integration/targets/dnf/tasks/filters_check_mode.yml2
-rw-r--r--test/integration/targets/dnf/tasks/gpg.yml15
-rw-r--r--test/integration/targets/dnf/tasks/main.yml6
-rw-r--r--test/integration/targets/dnf/vars/Fedora-35.yml2
-rw-r--r--test/integration/targets/dnf/vars/RedHat-9.yml2
-rw-r--r--test/integration/targets/dpkg_selections/aliases1
-rw-r--r--test/integration/targets/dpkg_selections/tasks/dpkg_selections.yaml2
-rw-r--r--test/integration/targets/entry_points/aliases2
-rwxr-xr-xtest/integration/targets/entry_points/runme.sh31
-rw-r--r--test/integration/targets/expect/meta/main.yml (renamed from test/integration/targets/incidental_mongodb_parameter/meta/main.yml)1
-rw-r--r--test/integration/targets/expect/tasks/main.yml8
-rw-r--r--test/integration/targets/facts_d/aliases2
-rw-r--r--test/integration/targets/facts_d/meta/main.yml1
-rw-r--r--test/integration/targets/facts_d/tasks/main.yml2
-rw-r--r--test/integration/targets/file/aliases1
-rw-r--r--test/integration/targets/file/meta/main.yml1
-rw-r--r--test/integration/targets/file/tasks/directory_as_dest.yml72
-rw-r--r--test/integration/targets/file/tasks/initialize.yml4
-rw-r--r--test/integration/targets/file/tasks/main.yml150
-rw-r--r--test/integration/targets/file/tasks/modification_time.yml70
-rw-r--r--test/integration/targets/file/tasks/selinux_tests.yml4
-rw-r--r--test/integration/targets/file/tasks/state_link.yml84
-rw-r--r--test/integration/targets/file/tasks/unicode_path.yml4
-rw-r--r--test/integration/targets/filter_core/tasks/main.yml2
-rwxr-xr-xtest/integration/targets/filter_mathstuff/runme.sh10
-rwxr-xr-xtest/integration/targets/filter_urls/runme.sh22
-rw-r--r--test/integration/targets/filter_urls/runme.yml4
-rw-r--r--test/integration/targets/filter_urls/tasks/main.yml7
-rw-r--r--test/integration/targets/find/meta/main.yml1
-rw-r--r--test/integration/targets/find/tasks/main.yml108
-rw-r--r--test/integration/targets/get_url/aliases1
-rw-r--r--test/integration/targets/getent/aliases1
-rw-r--r--test/integration/targets/git/aliases1
-rw-r--r--test/integration/targets/git/handlers/cleanup-freebsd.yml2
-rw-r--r--test/integration/targets/git/tasks/archive.yml13
-rw-r--r--test/integration/targets/group/aliases1
-rw-r--r--test/integration/targets/groupby_filter/aliases1
-rw-r--r--test/integration/targets/groupby_filter/requirements.txt4
-rwxr-xr-xtest/integration/targets/groupby_filter/runme.sh16
-rw-r--r--test/integration/targets/groupby_filter/tasks/main.yml16
-rw-r--r--test/integration/targets/groupby_filter/test_jinja2_groupby.yml29
-rw-r--r--test/integration/targets/hostname/aliases1
-rw-r--r--test/integration/targets/hostname/tasks/Debian.yml20
-rw-r--r--test/integration/targets/hostname/tasks/main.yml2
-rw-r--r--test/integration/targets/hostname/tasks/test_normal.yml30
-rw-r--r--test/integration/targets/incidental_cloud_init_data_facts/aliases7
-rw-r--r--test/integration/targets/incidental_cloud_init_data_facts/tasks/main.yml50
-rw-r--r--test/integration/targets/incidental_cs_common/aliases1
-rw-r--r--test/integration/targets/incidental_cs_common/defaults/main.yml6
-rw-r--r--test/integration/targets/incidental_deploy_helper/aliases2
-rw-r--r--test/integration/targets/incidental_deploy_helper/tasks/main.yml149
-rw-r--r--test/integration/targets/incidental_inventory_docker_swarm/aliases1
-rw-r--r--test/integration/targets/incidental_mongodb_parameter/aliases9
-rw-r--r--test/integration/targets/incidental_mongodb_parameter/defaults/main.yml21
-rw-r--r--test/integration/targets/incidental_mongodb_parameter/tasks/main.yml143
-rw-r--r--test/integration/targets/incidental_mongodb_parameter/tasks/mongod_singlenode.yml55
-rw-r--r--test/integration/targets/incidental_mongodb_parameter/tasks/mongod_teardown.yml25
-rw-r--r--test/integration/targets/incidental_setup_mongodb/aliases1
-rw-r--r--test/integration/targets/incidental_setup_mongodb/defaults/main.yml46
-rw-r--r--test/integration/targets/incidental_setup_mongodb/handlers/main.yml24
-rw-r--r--test/integration/targets/incidental_setup_mongodb/tasks/main.yml168
-rw-r--r--test/integration/targets/incidental_setup_tls/aliases1
-rw-r--r--test/integration/targets/incidental_setup_tls/files/ca_certificate.pem19
-rw-r--r--test/integration/targets/incidental_setup_tls/files/ca_key.pem28
-rw-r--r--test/integration/targets/incidental_setup_tls/files/client_certificate.pem20
-rw-r--r--test/integration/targets/incidental_setup_tls/files/client_key.pem27
-rw-r--r--test/integration/targets/incidental_setup_tls/files/server_certificate.pem20
-rw-r--r--test/integration/targets/incidental_setup_tls/files/server_key.pem27
-rw-r--r--test/integration/targets/incidental_setup_tls/tasks/main.yml21
-rw-r--r--test/integration/targets/incidental_win_copy/aliases2
-rw-r--r--test/integration/targets/incidental_win_copy/defaults/main.yml1
-rw-r--r--test/integration/targets/incidental_win_copy/files/foo.txt1
-rw-r--r--test/integration/targets/incidental_win_copy/files/subdir/bar.txt1
-rw-r--r--test/integration/targets/incidental_win_copy/files/subdir/subdir2/baz.txt1
-rw-r--r--test/integration/targets/incidental_win_copy/files/subdir/subdir2/subdir3/subdir4/qux.txt1
-rw-r--r--test/integration/targets/incidental_win_copy/tasks/main.yml34
-rw-r--r--test/integration/targets/incidental_win_copy/tasks/remote_tests.yml471
-rw-r--r--test/integration/targets/incidental_win_copy/tasks/tests.yml535
-rw-r--r--test/integration/targets/incidental_win_data_deduplication/aliases5
-rw-r--r--test/integration/targets/incidental_win_data_deduplication/meta/main.yml2
-rw-r--r--test/integration/targets/incidental_win_data_deduplication/tasks/main.yml2
-rw-r--r--test/integration/targets/incidental_win_data_deduplication/tasks/pre_test.yml40
-rw-r--r--test/integration/targets/incidental_win_data_deduplication/tasks/tests.yml47
-rw-r--r--test/integration/targets/incidental_win_data_deduplication/templates/partition_creation_script.j211
-rw-r--r--test/integration/targets/incidental_win_data_deduplication/templates/partition_deletion_script.j23
-rw-r--r--test/integration/targets/incidental_win_dsc/aliases6
-rw-r--r--test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.psm141
-rw-r--r--test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.schema.mof7
-rw-r--r--test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1214
-rw-r--r--test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.schema.mof60
-rw-r--r--test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/xTestDsc.psd113
-rw-r--r--test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1214
-rw-r--r--test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.schema.mof63
-rw-r--r--test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/xTestDsc.psd113
-rw-r--r--test/integration/targets/incidental_win_dsc/meta/main.yml2
-rw-r--r--test/integration/targets/incidental_win_dsc/tasks/main.yml39
-rw-r--r--test/integration/targets/incidental_win_dsc/tasks/tests.yml544
-rw-r--r--test/integration/targets/incidental_win_lineinfile/aliases3
-rw-r--r--test/integration/targets/incidental_win_lineinfile/files/test.txt5
-rw-r--r--test/integration/targets/incidental_win_lineinfile/files/test_quoting.txt0
-rw-r--r--test/integration/targets/incidental_win_lineinfile/files/testempty.txt0
-rw-r--r--test/integration/targets/incidental_win_lineinfile/files/testnoeof.txt2
-rw-r--r--test/integration/targets/incidental_win_lineinfile/meta/main.yml2
-rw-r--r--test/integration/targets/incidental_win_lineinfile/tasks/main.yml708
-rw-r--r--test/integration/targets/incidental_win_ping/aliases2
-rw-r--r--test/integration/targets/incidental_win_ping/library/win_ping_set_attr.ps131
-rw-r--r--test/integration/targets/incidental_win_ping/library/win_ping_strict_mode_error.ps130
-rw-r--r--test/integration/targets/incidental_win_ping/library/win_ping_syntax_error.ps130
-rw-r--r--test/integration/targets/incidental_win_ping/library/win_ping_throw.ps130
-rw-r--r--test/integration/targets/incidental_win_ping/library/win_ping_throw_string.ps130
-rw-r--r--test/integration/targets/incidental_win_ping/tasks/main.yml67
-rw-r--r--test/integration/targets/incidental_win_prepare_tests/aliases1
-rw-r--r--test/integration/targets/incidental_win_prepare_tests/meta/main.yml3
-rw-r--r--test/integration/targets/include_import/issue73657.yml8
-rw-r--r--test/integration/targets/include_import/issue73657_tasks.yml2
-rwxr-xr-xtest/integration/targets/include_import/runme.sh4
-rw-r--r--test/integration/targets/interpreter_discovery_python/tasks/main.yml9
-rw-r--r--test/integration/targets/inventory/1/2/inventory.yml2
-rw-r--r--test/integration/targets/inventory/extra_vars_constructed.yml2
-rw-r--r--test/integration/targets/inventory_constructed/constructed.yml2
-rw-r--r--test/integration/targets/inventory_constructed/invs/2/constructed.yml2
-rw-r--r--test/integration/targets/inventory_constructed/keyed_group_default_value.yml2
-rw-r--r--test/integration/targets/inventory_constructed/keyed_group_list_default_value.yml4
-rw-r--r--test/integration/targets/inventory_constructed/keyed_group_str_default_value.yml4
-rw-r--r--test/integration/targets/inventory_constructed/keyed_group_trailing_separator.yml2
-rw-r--r--test/integration/targets/inventory_constructed/no_leading_separator_constructed.yml2
-rw-r--r--test/integration/targets/iptables/aliases5
-rw-r--r--test/integration/targets/iptables/tasks/chain_management.yml71
-rw-r--r--test/integration/targets/iptables/tasks/main.yml (renamed from test/integration/targets/incidental_win_prepare_tests/tasks/main.yml)31
-rw-r--r--test/integration/targets/iptables/vars/alpine.yml2
-rw-r--r--test/integration/targets/iptables/vars/centos.yml2
-rw-r--r--test/integration/targets/iptables/vars/default.yml2
-rw-r--r--test/integration/targets/iptables/vars/fedora.yml2
-rw-r--r--test/integration/targets/iptables/vars/redhat.yml2
-rw-r--r--test/integration/targets/iptables/vars/suse.yml2
-rw-r--r--test/integration/targets/jinja2_native_types/nested_undefined.yml1
-rw-r--r--test/integration/targets/jinja2_native_types/runtests.yml24
-rw-r--r--test/integration/targets/known_hosts/meta/main.yml1
-rw-r--r--test/integration/targets/known_hosts/tasks/main.yml70
-rwxr-xr-xtest/integration/targets/lookup_env/runme.sh12
-rw-r--r--test/integration/targets/lookup_url/aliases1
-rw-r--r--test/integration/targets/loop-until/aliases2
-rw-r--r--test/integration/targets/loop-until/tasks/main.yml160
-rw-r--r--test/integration/targets/module_utils/aliases1
-rw-r--r--test/integration/targets/module_utils/library/test_alias_deprecation.py3
-rw-r--r--test/integration/targets/module_utils/library/test_override.py3
-rw-r--r--test/integration/targets/module_utils/module_utils/ansible_release.py4
-rw-r--r--test/integration/targets/module_utils/module_utils/facts.py1
-rw-r--r--test/integration/targets/module_utils/module_utils_envvar.yml4
-rw-r--r--test/integration/targets/module_utils/module_utils_test.yml13
-rwxr-xr-xtest/integration/targets/module_utils/runme.sh6
-rw-r--r--test/integration/targets/module_utils_Ansible.AccessToken/library/ansible_access_token_tests.ps1253
-rw-r--r--test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps11666
-rw-r--r--test/integration/targets/module_utils_Ansible.Become/library/ansible_become_tests.ps1475
-rw-r--r--test/integration/targets/module_utils_Ansible.ModuleUtils.AddType/library/add_type_test.ps174
-rw-r--r--test/integration/targets/module_utils_Ansible.ModuleUtils.Backup/library/backup_file_test.ps169
-rw-r--r--test/integration/targets/module_utils_Ansible.ModuleUtils.CamelConversion/library/camel_conversion_test.ps139
-rw-r--r--test/integration/targets/module_utils_Ansible.ModuleUtils.CommandUtil/library/command_util_test.ps172
-rw-r--r--test/integration/targets/module_utils_Ansible.ModuleUtils.FileUtil/library/file_util_test.ps154
-rw-r--r--test/integration/targets/module_utils_Ansible.ModuleUtils.Legacy/library/testpath.ps12
-rw-r--r--test/integration/targets/module_utils_Ansible.ModuleUtils.LinkUtil/library/symbolic_link_test.ps1104
-rw-r--r--test/integration/targets/module_utils_Ansible.ModuleUtils.PrivilegeUtil/library/privilege_util_test.ps123
-rw-r--r--test/integration/targets/module_utils_Ansible.ModuleUtils.SID/library/sid_utils_test.ps122
-rw-r--r--test/integration/targets/module_utils_Ansible.ModuleUtils.WebRequest/library/web_request_test.ps1220
-rw-r--r--test/integration/targets/module_utils_Ansible.Privilege/library/ansible_privilege_tests.ps1294
-rw-r--r--test/integration/targets/module_utils_Ansible.Process/library/ansible_process_tests.ps1186
-rw-r--r--test/integration/targets/module_utils_Ansible.Service/library/ansible_service_tests.ps1674
-rw-r--r--test/integration/targets/package/aliases1
-rw-r--r--test/integration/targets/package/tasks/main.yml8
-rw-r--r--test/integration/targets/package_facts/aliases1
-rw-r--r--test/integration/targets/pause/test-pause.yml29
-rw-r--r--test/integration/targets/pip/aliases1
-rw-r--r--test/integration/targets/pip/meta/main.yml1
-rw-r--r--test/integration/targets/pip/tasks/pip.yml75
l---------test/integration/targets/plugin_loader/normal/library/_symlink.py1
-rwxr-xr-xtest/integration/targets/plugin_loader/runme.sh9
-rw-r--r--test/integration/targets/prepare_http_tests/tasks/main.yml1
-rw-r--r--test/integration/targets/prepare_http_tests/vars/RedHat-9.yml4
-rw-r--r--test/integration/targets/pyyaml/aliases2
-rwxr-xr-xtest/integration/targets/pyyaml/runme.sh11
-rw-r--r--test/integration/targets/raw/aliases1
-rw-r--r--test/integration/targets/raw/meta/main.yml1
-rwxr-xr-xtest/integration/targets/raw/runme.sh2
-rw-r--r--test/integration/targets/raw/tasks/main.yml12
-rw-r--r--test/integration/targets/rel_plugin_loading/subdir/inventory_plugins/notyaml.py3
-rw-r--r--test/integration/targets/remote_tmp/aliases1
-rw-r--r--test/integration/targets/replace/meta/main.yml1
-rw-r--r--test/integration/targets/replace/tasks/main.yml56
-rw-r--r--test/integration/targets/roles_arg_spec/test_complex_role_fails.yml14
-rw-r--r--test/integration/targets/rpm_key/aliases1
-rw-r--r--test/integration/targets/rpm_key/meta/main.yml2
-rw-r--r--test/integration/targets/rpm_key/tasks/rpm_key.yaml22
-rw-r--r--test/integration/targets/script/meta/main.yml1
-rw-r--r--test/integration/targets/script/tasks/main.yml55
-rw-r--r--test/integration/targets/service/aliases1
-rw-r--r--test/integration/targets/service_facts/aliases1
-rw-r--r--test/integration/targets/setup_deb_repo/files/package_specs/stable/foo-1.0.0 (renamed from test/integration/targets/setup_deb_repo/files/package_specs/foo-1.0.0)0
-rw-r--r--test/integration/targets/setup_deb_repo/files/package_specs/stable/foo-1.0.1 (renamed from test/integration/targets/setup_deb_repo/files/package_specs/foo-1.0.1)0
-rw-r--r--test/integration/targets/setup_deb_repo/files/package_specs/stable/foobar-1.0.0 (renamed from test/integration/targets/setup_deb_repo/files/package_specs/foobar-1.0.0)0
-rw-r--r--test/integration/targets/setup_deb_repo/files/package_specs/stable/foobar-1.0.1 (renamed from test/integration/targets/setup_deb_repo/files/package_specs/foobar-1.0.1)0
-rw-r--r--test/integration/targets/setup_deb_repo/files/package_specs/testing/foo-2.0.010
-rw-r--r--test/integration/targets/setup_deb_repo/files/package_specs/testing/foo-2.0.110
-rw-r--r--test/integration/targets/setup_deb_repo/tasks/main.yml53
-rw-r--r--test/integration/targets/setup_epel/tasks/main.yml2
-rw-r--r--test/integration/targets/setup_paramiko/install-Fedora-35-python-3.yml9
-rw-r--r--test/integration/targets/setup_paramiko/install-RedHat-9-python-3.yml9
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-Fedora-35-python-3.yml7
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-RedHat-9-python-3.yml7
-rw-r--r--test/integration/targets/setup_rpm_repo/tasks/main.yml5
-rw-r--r--test/integration/targets/setup_rpm_repo/vars/RedHat-9.yml4
-rw-r--r--test/integration/targets/setup_win_printargv/tasks/main.yml2
-rw-r--r--test/integration/targets/slurp/meta/main.yml2
-rw-r--r--test/integration/targets/slurp/tasks/main.yml8
-rw-r--r--test/integration/targets/slurp/tasks/test_unreadable.yml16
-rw-r--r--test/integration/targets/stat/meta/main.yml1
-rw-r--r--test/integration/targets/stat/tasks/main.yml16
-rw-r--r--test/integration/targets/strategy_free/aliases1
-rw-r--r--test/integration/targets/strategy_free/inventory2
-rw-r--r--test/integration/targets/strategy_free/last_include_tasks.yml2
-rwxr-xr-xtest/integration/targets/strategy_free/runme.sh10
-rw-r--r--test/integration/targets/strategy_free/test_last_include_in_always.yml9
-rw-r--r--test/integration/targets/subversion/aliases2
-rw-r--r--test/integration/targets/systemd/aliases1
-rw-r--r--test/integration/targets/systemd/handlers/main.yml8
-rw-r--r--test/integration/targets/systemd/tasks/main.yml1
-rw-r--r--test/integration/targets/systemd/tasks/test_indirect_service.yml37
-rw-r--r--test/integration/targets/systemd/templates/dummy.service11
-rw-r--r--test/integration/targets/systemd/templates/dummy.socket8
-rw-r--r--test/integration/targets/systemd/vars/Debian.yml1
-rw-r--r--test/integration/targets/systemd/vars/default.yml1
-rw-r--r--test/integration/targets/template/72615.yml8
-rw-r--r--test/integration/targets/template/corner_cases.yml4
-rw-r--r--test/integration/targets/template/files/string_type_filters.expected4
-rw-r--r--test/integration/targets/template/tasks/main.yml46
-rw-r--r--test/integration/targets/template/templates/empty_template.j2 (renamed from test/integration/targets/incidental_win_lineinfile/files/test_linebreak.txt)0
-rw-r--r--test/integration/targets/template/unsafe.yml45
-rw-r--r--test/integration/targets/template_jinja2_latest/aliases5
-rw-r--r--test/integration/targets/template_jinja2_latest/main.yml4
-rw-r--r--test/integration/targets/template_jinja2_latest/pip-requirements.txt4
-rw-r--r--test/integration/targets/template_jinja2_latest/requirements.txt2
-rwxr-xr-xtest/integration/targets/template_jinja2_latest/runme.sh14
-rw-r--r--test/integration/targets/template_jinja2_non_native/46169.yml1
-rwxr-xr-xtest/integration/targets/templating_lookups/runme.sh3
-rw-r--r--test/integration/targets/templating_lookups/template_lookup_safe_eval_unicode/playbook.yml8
-rw-r--r--test/integration/targets/templating_lookups/template_lookup_safe_eval_unicode/template.json.j24
-rw-r--r--test/integration/targets/test_mathstuff/tasks/main.yml11
-rw-r--r--test/integration/targets/unarchive/aliases1
-rw-r--r--test/integration/targets/unarchive/tasks/main.yml2
-rw-r--r--test/integration/targets/unarchive/tasks/test_include.yml5
-rw-r--r--test/integration/targets/unarchive/tasks/test_invalid_options.yml27
-rw-r--r--test/integration/targets/unarchive/tasks/test_ownership_top_folder.yml73
-rw-r--r--test/integration/targets/undefined/tasks/main.yml33
-rw-r--r--test/integration/targets/unsafe_writes/aliases1
-rw-r--r--test/integration/targets/uri/aliases1
-rw-r--r--test/integration/targets/uri/tasks/main.yml29
-rw-r--r--test/integration/targets/user/aliases1
-rw-r--r--test/integration/targets/user/tasks/test_expires_min_max.yml18
-rw-r--r--test/integration/targets/wait_for/meta/main.yml1
-rw-r--r--test/integration/targets/wait_for/tasks/main.yml42
-rw-r--r--test/integration/targets/wait_for/vars/main.yml4
-rw-r--r--test/integration/targets/win_async_wrapper/library/async_test.ps117
-rw-r--r--test/integration/targets/win_exec_wrapper/library/test_common_functions.ps139
-rw-r--r--test/integration/targets/win_exec_wrapper/library/test_fail.ps124
-rw-r--r--test/integration/targets/win_module_utils/library/legacy_only_new_way.ps12
-rw-r--r--test/integration/targets/win_module_utils/library/legacy_only_new_way_win_line_ending.ps12
-rw-r--r--test/integration/targets/win_module_utils/library/legacy_only_old_way.ps12
-rw-r--r--test/integration/targets/win_module_utils/library/legacy_only_old_way_win_line_ending.ps12
-rw-r--r--test/integration/targets/win_module_utils/library/uses_bogus_utils.ps12
-rw-r--r--test/integration/targets/win_module_utils/library/uses_local_utils.ps12
-rw-r--r--test/integration/targets/win_script/files/test_script_bool.ps12
-rw-r--r--test/integration/targets/win_script/files/test_script_creates_file.ps12
-rw-r--r--test/integration/targets/win_script/files/test_script_with_args.ps13
-rw-r--r--test/integration/targets/win_script/files/test_script_with_errors.ps13
-rw-r--r--test/integration/targets/yum/aliases1
-rw-r--r--test/integration/targets/yum/tasks/proxy.yml8
-rw-r--r--test/integration/targets/yum/tasks/yum.yml10
-rw-r--r--test/integration/targets/yum/vars/main.yml2
-rw-r--r--test/integration/targets/yum_repository/aliases1
-rw-r--r--test/lib/ansible_test/__init__.py2
-rw-r--r--test/lib/ansible_test/_data/completion/docker.txt10
-rw-r--r--test/lib/ansible_test/_data/completion/remote.txt8
-rw-r--r--test/lib/ansible_test/_data/completion/windows.txt1
-rw-r--r--test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.ps12
-rw-r--r--test/lib/ansible_test/_data/playbooks/windows_hosts_restore.ps15
-rw-r--r--test/lib/ansible_test/_data/pytest.ini5
-rw-r--r--test/lib/ansible_test/_data/requirements/ansible.txt2
-rw-r--r--test/lib/ansible_test/_data/requirements/constraints.txt18
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.ansible-doc.in3
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt13
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.changelog.in2
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.changelog.txt17
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.import.in1
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.import.plugin.in2
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt16
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.import.txt3
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.integration-aliases.in1
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt3
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.mypy.in9
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.mypy.txt20
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.pep8.in1
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.pep8.txt3
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.pslint.ps112
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.pylint.in2
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.pylint.txt19
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.in2
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt5
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.validate-modules.in3
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt11
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.yamllint.in1
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.yamllint.txt9
-rw-r--r--test/lib/ansible_test/_internal/__init__.py9
-rw-r--r--test/lib/ansible_test/_internal/ansible_util.py5
-rw-r--r--test/lib/ansible_test/_internal/bootstrap.py12
-rw-r--r--test/lib/ansible_test/_internal/ci/__init__.py6
-rw-r--r--test/lib/ansible_test/_internal/classification/__init__.py42
-rw-r--r--test/lib/ansible_test/_internal/classification/powershell.py2
-rw-r--r--test/lib/ansible_test/_internal/classification/python.py5
-rw-r--r--test/lib/ansible_test/_internal/cli/__init__.py24
-rw-r--r--test/lib/ansible_test/_internal/cli/argparsing/__init__.py16
-rw-r--r--test/lib/ansible_test/_internal/cli/argparsing/actions.py4
-rw-r--r--test/lib/ansible_test/_internal/cli/argparsing/parsers.py10
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/__init__.py17
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/integration/__init__.py7
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/integration/network.py10
-rw-r--r--test/lib/ansible_test/_internal/cli/compat.py40
-rw-r--r--test/lib/ansible_test/_internal/cli/completers.py9
-rw-r--r--test/lib/ansible_test/_internal/cli/environments.py75
-rw-r--r--test/lib/ansible_test/_internal/cli/epilog.py23
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/__init__.py8
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/helpers.py6
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/value_parsers.py10
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/__init__.py6
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py22
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py13
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py3
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py9
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py15
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py15
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/combine.py33
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/xml.py4
-rw-r--r--test/lib/ansible_test/_internal/commands/env/__init__.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/__init__.py28
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py14
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/aws.py3
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/azure.py50
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py4
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py4
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/nios.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py6
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/coverage.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/filters.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/__init__.py48
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py25
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py102
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/ignores.py3
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/import.py25
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py389
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/mypy.py250
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/pep8.py4
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/pylint.py1
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/validate_modules.py103
-rw-r--r--test/lib/ansible_test/_internal/commands/shell/__init__.py5
-rw-r--r--test/lib/ansible_test/_internal/commands/units/__init__.py14
-rw-r--r--test/lib/ansible_test/_internal/compat/packaging.py6
-rw-r--r--test/lib/ansible_test/_internal/compat/yaml.py4
-rw-r--r--test/lib/ansible_test/_internal/completion.py4
-rw-r--r--test/lib/ansible_test/_internal/config.py13
-rw-r--r--test/lib/ansible_test/_internal/connections.py6
-rw-r--r--test/lib/ansible_test/_internal/constants.py18
-rw-r--r--test/lib/ansible_test/_internal/containers.py34
-rw-r--r--test/lib/ansible_test/_internal/core_ci.py2
-rw-r--r--test/lib/ansible_test/_internal/coverage_util.py6
-rw-r--r--test/lib/ansible_test/_internal/data.py77
-rw-r--r--test/lib/ansible_test/_internal/delegation.py19
-rw-r--r--test/lib/ansible_test/_internal/docker_util.py109
-rw-r--r--test/lib/ansible_test/_internal/host_configs.py35
-rw-r--r--test/lib/ansible_test/_internal/host_profiles.py15
-rw-r--r--test/lib/ansible_test/_internal/inventory.py4
-rw-r--r--test/lib/ansible_test/_internal/io.py14
-rw-r--r--[l---------]test/lib/ansible_test/_internal/junit_xml.py268
-rw-r--r--test/lib/ansible_test/_internal/metadata.py4
-rw-r--r--test/lib/ansible_test/_internal/payload.py8
-rw-r--r--test/lib/ansible_test/_internal/provider/__init__.py2
-rw-r--r--test/lib/ansible_test/_internal/provider/layout/__init__.py4
-rw-r--r--test/lib/ansible_test/_internal/provider/layout/collection.py9
-rw-r--r--test/lib/ansible_test/_internal/provider/layout/unsupported.py42
-rw-r--r--test/lib/ansible_test/_internal/provider/source/unsupported.py22
-rw-r--r--test/lib/ansible_test/_internal/provisioning.py6
-rw-r--r--test/lib/ansible_test/_internal/pypi_proxy.py4
-rw-r--r--test/lib/ansible_test/_internal/python_requirements.py14
-rw-r--r--test/lib/ansible_test/_internal/ssh.py10
-rw-r--r--test/lib/ansible_test/_internal/target.py2
-rw-r--r--test/lib/ansible_test/_internal/test.py2
-rw-r--r--test/lib/ansible_test/_internal/thread.py15
-rw-r--r--test/lib/ansible_test/_internal/util.py81
-rw-r--r--test/lib/ansible_test/_internal/util_common.py27
-rw-r--r--test/lib/ansible_test/_internal/venv.py20
-rw-r--r--test/lib/ansible_test/_util/__init__.py5
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/changelog/sphinx.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.py7
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/integration-aliases/yaml_to_json.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/mypy/ansible-core.ini119
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini24
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/mypy/modules.ini98
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pslint/pslint.ps132
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd143
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg10
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg9
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg2
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py11
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py4
l---------test/lib/ansible_test/_util/controller/sanity/validate-modules/validate-modules1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate.py6
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/__init__.py4
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py166
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py7
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/ps_argspec.ps130
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py480
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py3
-rw-r--r--test/lib/ansible_test/_util/controller/tools/collection_detail.py3
-rw-r--r--test/lib/ansible_test/_util/controller/tools/coverage_stub.ps142
-rw-r--r--test/lib/ansible_test/_util/controller/tools/sslcheck.py3
-rw-r--r--test/lib/ansible_test/_util/controller/tools/yaml_to_json.py3
-rw-r--r--test/lib/ansible_test/_util/target/__init__.py5
-rwxr-xr-xtest/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py4
-rw-r--r--test/lib/ansible_test/_util/target/common/__init__.py5
-rw-r--r--test/lib/ansible_test/_util/target/common/constants.py1
-rw-r--r--test/lib/ansible_test/_util/target/legacy_collection_loader/__init__.py31
-rw-r--r--test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_config.py107
-rw-r--r--test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_finder.py1067
-rw-r--r--test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_meta.py37
-rw-r--r--test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py9
-rw-r--r--test/lib/ansible_test/_util/target/sanity/compile/compile.py4
-rw-r--r--test/lib/ansible_test/_util/target/sanity/import/importer.py120
-rw-r--r--test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1163
-rw-r--r--test/lib/ansible_test/_util/target/setup/bootstrap.sh104
-rw-r--r--test/lib/ansible_test/_util/target/setup/quiet_pip.py20
-rw-r--r--test/lib/ansible_test/_util/target/setup/requirements.py5
-rw-r--r--test/lib/ansible_test/_util/target/tools/virtualenvcheck.py (renamed from test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py)0
-rw-r--r--test/lib/ansible_test/_util/target/tools/yamlcheck.py (renamed from test/lib/ansible_test/_util/controller/tools/yamlcheck.py)0
-rw-r--r--test/lib/ansible_test/config/cloud-config-azure.ini.template4
-rw-r--r--test/sanity/code-smell/ansible-requirements.py3
-rw-r--r--test/sanity/code-smell/ansible-test-future-boilerplate.json3
-rw-r--r--test/sanity/code-smell/ansible-test-future-boilerplate.py19
-rw-r--r--test/sanity/code-smell/configure-remoting-ps1.py3
-rw-r--r--test/sanity/code-smell/deprecated-config.py3
-rw-r--r--test/sanity/code-smell/deprecated-config.requirements.in2
-rw-r--r--test/sanity/code-smell/deprecated-config.requirements.txt9
-rw-r--r--test/sanity/code-smell/docs-build.py3
-rw-r--r--test/sanity/code-smell/docs-build.requirements.in8
-rw-r--r--test/sanity/code-smell/docs-build.requirements.txt100
-rw-r--r--test/sanity/code-smell/no-unwanted-files.py3
-rw-r--r--test/sanity/code-smell/obsolete-files.py3
-rw-r--r--test/sanity/code-smell/package-data.py37
-rw-r--r--test/sanity/code-smell/package-data.requirements.in7
-rw-r--r--test/sanity/code-smell/package-data.requirements.txt25
-rw-r--r--test/sanity/code-smell/release-names.json4
-rw-r--r--test/sanity/code-smell/release-names.py49
-rw-r--r--test/sanity/code-smell/release-names.requirements.txt1
-rw-r--r--test/sanity/code-smell/required-and-default-attributes.py3
-rw-r--r--test/sanity/code-smell/rstcheck.py3
-rw-r--r--test/sanity/code-smell/rstcheck.requirements.in3
-rw-r--r--test/sanity/code-smell/rstcheck.requirements.txt52
-rw-r--r--test/sanity/code-smell/test-constraints.py32
-rw-r--r--test/sanity/code-smell/update-bundled.py3
-rw-r--r--test/sanity/code-smell/update-bundled.requirements.in1
-rw-r--r--test/sanity/code-smell/update-bundled.requirements.txt7
-rw-r--r--test/sanity/ignore.txt257
-rw-r--r--test/support/integration/plugins/inventory/foreman.py3
-rw-r--r--test/support/integration/plugins/lookup/rabbitmq.py190
-rw-r--r--test/support/integration/plugins/module_utils/crypto.py2125
-rw-r--r--test/support/integration/plugins/module_utils/database.py142
-rw-r--r--test/support/integration/plugins/module_utils/ecs/__init__.py0
-rw-r--r--test/support/integration/plugins/module_utils/ecs/api.py364
-rw-r--r--test/support/integration/plugins/module_utils/mysql.py106
-rw-r--r--test/support/integration/plugins/module_utils/postgres.py330
-rw-r--r--test/support/integration/plugins/module_utils/rabbitmq.py220
-rw-r--r--test/support/integration/plugins/modules/aws_s3.py925
-rw-r--r--test/support/integration/plugins/modules/cloud_init_data_facts.py134
-rw-r--r--test/support/integration/plugins/modules/deploy_helper.py521
-rw-r--r--test/support/integration/plugins/modules/ec2_ami_info.py3
-rw-r--r--test/support/integration/plugins/modules/locale_gen.py237
-rw-r--r--test/support/integration/plugins/modules/lvg.py295
-rw-r--r--test/support/integration/plugins/modules/mongodb_parameter.py223
-rw-r--r--test/support/integration/plugins/modules/mongodb_user.py474
-rw-r--r--test/support/integration/plugins/modules/pids.py89
-rw-r--r--test/support/integration/plugins/modules/postgresql_db.py657
-rw-r--r--test/support/integration/plugins/modules/postgresql_privs.py1097
-rw-r--r--test/support/integration/plugins/modules/postgresql_query.py364
-rw-r--r--test/support/integration/plugins/modules/postgresql_set.py434
-rw-r--r--test/support/integration/plugins/modules/postgresql_table.py601
-rw-r--r--test/support/integration/plugins/modules/postgresql_user.py927
-rw-r--r--test/support/integration/plugins/modules/rabbitmq_plugin.py180
-rw-r--r--test/support/integration/plugins/modules/rabbitmq_queue.py257
-rw-r--r--test/support/integration/plugins/modules/s3_bucket.py740
-rw-r--r--test/support/integration/plugins/modules/selogin.py260
-rw-r--r--test/support/integration/plugins/modules/x509_crl.py783
-rw-r--r--test/support/integration/plugins/modules/x509_crl_info.py281
-rw-r--r--test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/filter/network.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/cliconf/ios.py3
-rw-r--r--test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/cliconf/vyos.py3
-rw-r--r--[l---------]test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/action/win_copy.py523
-rw-r--r--[l---------]test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/async_status.ps159
-rw-r--r--[l---------]test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_acl.ps1226
-rw-r--r--[l---------]test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_acl.py133
-rw-r--r--[l---------]test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_copy.ps1404
-rw-r--r--[l---------]test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_copy.py208
-rw-r--r--[l---------]test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_file.ps1153
-rw-r--r--[l---------]test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_file.py71
-rw-r--r--[l---------]test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_ping.ps122
-rw-r--r--[l---------]test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_ping.py56
-rw-r--r--[l---------]test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_shell.ps1139
-rw-r--r--[l---------]test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_shell.py168
-rw-r--r--[l---------]test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_stat.ps1187
-rw-r--r--[l---------]test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_stat.py237
-rw-r--r--test/support/windows-integration/plugins/modules/win_data_deduplication.ps1129
-rw-r--r--test/support/windows-integration/plugins/modules/win_data_deduplication.py87
-rw-r--r--test/support/windows-integration/plugins/modules/win_dsc.ps1398
-rw-r--r--test/support/windows-integration/plugins/modules/win_dsc.py183
-rw-r--r--test/support/windows-integration/plugins/modules/win_feature.ps1111
-rw-r--r--test/support/windows-integration/plugins/modules/win_feature.py149
-rw-r--r--test/support/windows-integration/plugins/modules/win_find.ps1416
-rw-r--r--test/support/windows-integration/plugins/modules/win_find.py345
-rw-r--r--test/support/windows-integration/plugins/modules/win_format.ps1200
-rw-r--r--test/support/windows-integration/plugins/modules/win_format.py103
-rw-r--r--test/support/windows-integration/plugins/modules/win_path.ps1145
-rw-r--r--test/support/windows-integration/plugins/modules/win_path.py79
-rw-r--r--test/support/windows-integration/plugins/modules/win_tempfile.py67
-rw-r--r--test/support/windows-integration/plugins/modules/win_template.py66
-rw-r--r--test/units/_vendor/test_vendor.py2
-rw-r--r--test/units/cli/galaxy/test_display_collection.py2
-rw-r--r--test/units/cli/galaxy/test_execute_list_collection.py7
-rw-r--r--test/units/cli/galaxy/test_get_collection_widths.py8
-rw-r--r--test/units/cli/test_cli.py2
-rw-r--r--test/units/cli/test_console.py2
-rw-r--r--test/units/cli/test_doc.py17
-rw-r--r--test/units/cli/test_galaxy.py10
-rw-r--r--test/units/cli/test_vault.py20
-rw-r--r--test/units/compat/builtins.py33
-rw-r--r--test/units/compat/mock.py122
-rw-r--r--test/units/compat/unittest.py4
-rw-r--r--test/units/errors/test_errors.py11
-rw-r--r--test/units/executor/module_common/test_module_common.py5
-rw-r--r--test/units/executor/test_interpreter_discovery.py2
-rw-r--r--test/units/executor/test_play_iterator.py45
-rw-r--r--test/units/executor/test_playbook_executor.py2
-rw-r--r--test/units/executor/test_task_executor.py2
-rw-r--r--test/units/executor/test_task_queue_manager_callbacks.py2
-rw-r--r--test/units/executor/test_task_result.py2
-rw-r--r--test/units/galaxy/test_api.py98
-rw-r--r--test/units/galaxy/test_collection.py38
-rw-r--r--test/units/galaxy/test_collection_install.py149
-rw-r--r--test/units/galaxy/test_role_install.py17
-rw-r--r--test/units/galaxy/test_token.py2
-rw-r--r--test/units/mock/path.py2
-rw-r--r--test/units/module_utils/basic/test_argument_spec.py15
-rw-r--r--test/units/module_utils/basic/test_deprecate_warn.py2
-rw-r--r--test/units/module_utils/basic/test_filesystem.py26
-rw-r--r--test/units/module_utils/basic/test_get_module_path.py2
-rw-r--r--test/units/module_utils/basic/test_imports.py2
-rw-r--r--test/units/module_utils/basic/test_platform_distribution.py2
-rw-r--r--test/units/module_utils/basic/test_selinux.py3
-rw-r--r--test/units/module_utils/basic/test_set_cwd.py2
-rw-r--r--test/units/module_utils/basic/test_tmpdir.py2
-rw-r--r--test/units/module_utils/common/arg_spec/test_aliases.py5
-rw-r--r--test/units/module_utils/common/arg_spec/test_validate_invalid.py8
-rw-r--r--test/units/module_utils/common/test_locale.py2
-rw-r--r--test/units/module_utils/common/test_sys_info.py2
-rw-r--r--test/units/module_utils/common/validation/test_check_required_if.py2
-rw-r--r--test/units/module_utils/facts/base.py2
-rw-r--r--test/units/module_utils/facts/hardware/linux_data.py48
-rw-r--r--test/units/module_utils/facts/hardware/test_linux.py27
-rw-r--r--test/units/module_utils/facts/network/test_fc_wwn.py2
-rw-r--r--test/units/module_utils/facts/network/test_generic_bsd.py2
-rw-r--r--test/units/module_utils/facts/network/test_iscsi_get_initiator.py2
-rw-r--r--test/units/module_utils/facts/other/test_facter.py2
-rw-r--r--test/units/module_utils/facts/other/test_ohai.py2
-rw-r--r--test/units/module_utils/facts/system/distribution/conftest.py2
-rw-r--r--test/units/module_utils/facts/system/distribution/fixtures/deepin_20.4.json29
-rw-r--r--test/units/module_utils/facts/system/distribution/fixtures/eurolinux_8.5.json46
-rw-r--r--test/units/module_utils/facts/system/distribution/fixtures/tencentos_3_1.json50
-rw-r--r--test/units/module_utils/facts/system/distribution/fixtures/uos_20.json29
-rw-r--r--test/units/module_utils/facts/system/test_lsb.py2
-rw-r--r--test/units/module_utils/facts/test_ansible_collector.py2
-rw-r--r--test/units/module_utils/facts/test_collector.py30
-rw-r--r--test/units/module_utils/facts/test_collectors.py10
-rw-r--r--test/units/module_utils/facts/test_facts.py2
-rw-r--r--test/units/module_utils/facts/test_sysctl.py2
-rw-r--r--test/units/module_utils/facts/test_utils.py2
-rw-r--r--test/units/module_utils/urls/test_RedirectHandlerFactory.py8
-rw-r--r--test/units/modules/test_apt.py3
-rw-r--r--test/units/modules/test_apt_key.py3
-rw-r--r--test/units/modules/test_async_wrapper.py2
-rw-r--r--test/units/modules/test_hostname.py114
-rw-r--r--test/units/modules/test_iptables.py212
-rw-r--r--test/units/modules/test_service_facts.py2
-rw-r--r--test/units/modules/utils.py2
-rw-r--r--test/units/parsing/test_ajson.py2
-rw-r--r--test/units/parsing/test_dataloader.py10
-rw-r--r--test/units/parsing/test_mod_args.py2
-rw-r--r--test/units/parsing/vault/test_vault.py134
-rw-r--r--test/units/parsing/vault/test_vault_editor.py64
-rw-r--r--test/units/parsing/yaml/test_loader.py6
-rw-r--r--test/units/parsing/yaml/test_objects.py2
-rw-r--r--test/units/playbook/role/test_include_role.py2
-rw-r--r--test/units/playbook/role/test_role.py5
-rw-r--r--test/units/playbook/test_base.py14
-rw-r--r--test/units/playbook/test_conditional.py42
-rw-r--r--test/units/playbook/test_helpers.py78
-rw-r--r--test/units/playbook/test_included_file.py2
-rw-r--r--test/units/playbook/test_task.py2
-rw-r--r--test/units/plugins/action/test_action.py4
-rw-r--r--test/units/plugins/action/test_gather_facts.py2
-rw-r--r--test/units/plugins/action/test_raw.py2
-rw-r--r--test/units/plugins/cache/test_cache.py10
-rw-r--r--test/units/plugins/callback/test_callback.py6
-rw-r--r--test/units/plugins/connection/test_connection.py6
-rw-r--r--test/units/plugins/connection/test_psrp.py2
-rw-r--r--test/units/plugins/connection/test_ssh.py2
-rw-r--r--test/units/plugins/connection/test_winrm.py10
-rw-r--r--test/units/plugins/filter/test_mathstuff.py20
-rw-r--r--test/units/plugins/inventory/test_inventory.py3
-rw-r--r--test/units/plugins/inventory/test_script.py2
-rw-r--r--test/units/plugins/lookup/test_password.py2
-rw-r--r--test/units/plugins/strategy/test_linear.py2
-rw-r--r--test/units/plugins/strategy/test_strategy.py15
-rw-r--r--test/units/plugins/test_plugins.py5
-rw-r--r--test/units/template/test_native_concat.py36
-rw-r--r--test/units/template/test_safe_eval.py44
-rw-r--r--test/units/template/test_templar.py88
-rw-r--r--test/units/template/test_vars.py52
-rw-r--r--test/units/utils/collection_loader/test_collection_loader.py14
-rw-r--r--test/units/utils/display/test_broken_cowsay.py27
-rw-r--r--test/units/utils/test_display.py2
-rw-r--r--test/units/utils/test_vars.py4
-rw-r--r--test/units/vars/test_variable_manager.py2
771 files changed, 13356 insertions, 26697 deletions
diff --git a/test/integration/targets/add_host/tasks/main.yml b/test/integration/targets/add_host/tasks/main.yml
index 399b0b6b..d1583eff 100644
--- a/test/integration/targets/add_host/tasks/main.yml
+++ b/test/integration/targets/add_host/tasks/main.yml
@@ -157,3 +157,20 @@
assert:
that:
- badinput is failed
+
+- name: Add hosts in a loop
+ add_host:
+ name: 'host_{{item}}'
+ loop:
+ - 1
+ - 2
+ - 2
+ register: add_host_loop_res
+
+- name: verify correct changed results
+ assert:
+ that:
+ - add_host_loop_res.results[0] is changed
+ - add_host_loop_res.results[1] is changed
+ - add_host_loop_res.results[2] is not changed
+ - add_host_loop_res is changed
diff --git a/test/integration/targets/ansiballz_python/aliases b/test/integration/targets/ansiballz_python/aliases
index e2c8fd39..7ae73ab9 100644
--- a/test/integration/targets/ansiballz_python/aliases
+++ b/test/integration/targets/ansiballz_python/aliases
@@ -1,3 +1,2 @@
shippable/posix/group1
-skip/aix
context/target
diff --git a/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/MANIFEST.json b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/MANIFEST.json
new file mode 100644
index 00000000..243a5e43
--- /dev/null
+++ b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/MANIFEST.json
@@ -0,0 +1,30 @@
+{
+ "collection_info": {
+ "description": null,
+ "repository": "",
+ "tags": [],
+ "dependencies": {},
+ "authors": [
+ "Ansible (https://ansible.com)"
+ ],
+ "issues": "",
+ "name": "testcol",
+ "license": [
+ "GPL-3.0-or-later"
+ ],
+ "documentation": "",
+ "namespace": "testns",
+ "version": "0.1.1231",
+ "readme": "README.md",
+ "license_file": "COPYING",
+ "homepage": "",
+ },
+ "file_manifest_file": {
+ "format": 1,
+ "ftype": "file",
+ "chksum_sha256": "4c15a867ceba8ba1eaf2f4a58844bb5dbb82fec00645fc7eb74a3d31964900f6",
+ "name": "FILES.json",
+ "chksum_type": "sha256"
+ },
+ "format": 1
+}
diff --git a/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/cache/notjsonfile.py b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/cache/notjsonfile.py
new file mode 100644
index 00000000..9fa25b40
--- /dev/null
+++ b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/cache/notjsonfile.py
@@ -0,0 +1,70 @@
+# (c) 2020 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+DOCUMENTATION = '''
+ cache: notjsonfile
+ broken:
+ short_description: JSON formatted files.
+ description:
+ - This cache uses JSON formatted, per host, files saved to the filesystem.
+ author: Ansible Core (@ansible-core)
+ version_added: 0.7.0
+ options:
+ _uri:
+ required: True
+ description:
+ - Path in which the cache plugin will save the JSON files
+ env:
+ - name: ANSIBLE_CACHE_PLUGIN_CONNECTION
+ version_added: 1.2.0
+ ini:
+ - key: fact_caching_connection
+ section: defaults
+ deprecated:
+ alternative: none
+ why: Test deprecation
+ version: '2.0.0'
+ _prefix:
+ description: User defined prefix to use when creating the JSON files
+ env:
+ - name: ANSIBLE_CACHE_PLUGIN_PREFIX
+ version_added: 1.1.0
+ ini:
+ - key: fact_caching_prefix
+ section: defaults
+ deprecated:
+ alternative: none
+ why: Another test deprecation
+ removed_at_date: '2050-01-01'
+ _timeout:
+ default: 86400
+ description: Expiration timeout for the cache plugin data
+ env:
+ - name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
+ ini:
+ - key: fact_caching_timeout
+ section: defaults
+ vars:
+ - name: notsjonfile_fact_caching_timeout
+ version_added: 1.5.0
+ deprecated:
+ alternative: do not use a variable
+ why: Test deprecation
+ version: '3.0.0'
+ type: integer
+ extends_documentation_fragment:
+ - testns.testcol2.plugin
+'''
+
+from ansible.plugins.cache import BaseFileCacheModule
+
+
+class CacheModule(BaseFileCacheModule):
+ """
+ A caching module backed by json files.
+ """
+ pass
diff --git a/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/inventory/statichost.py b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/inventory/statichost.py
new file mode 100644
index 00000000..caec2ed6
--- /dev/null
+++ b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/inventory/statichost.py
@@ -0,0 +1,36 @@
+# Copyright (c) 2018 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+DOCUMENTATION = '''
+ inventory: statichost
+ broken:
+ short_description: Add a single host
+ description: Add a single host
+ extends_documentation_fragment:
+ - inventory_cache
+ options:
+ plugin:
+ description: plugin name (must be statichost)
+ required: true
+ hostname:
+ description: Toggle display of stderr even when script was successful
+ required: True
+'''
+
+from ansible.errors import AnsibleParserError
+from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable
+
+
+class InventoryModule(BaseInventoryPlugin, Cacheable):
+
+ NAME = 'testns.content_adj.statichost'
+
+ def verify_file(self, path):
+ pass
+
+ def parse(self, inventory, loader, path, cache=None):
+
+ pass
diff --git a/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/lookup/noop.py b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/lookup/noop.py
new file mode 100644
index 00000000..d4569869
--- /dev/null
+++ b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/lookup/noop.py
@@ -0,0 +1,45 @@
+# (c) 2020 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+ lookup: noop
+ broken:
+ author: Ansible core team
+ short_description: returns input
+ description:
+ - this is a noop
+ deprecated:
+ alternative: Use some other lookup
+ why: Test deprecation
+ removed_in: '3.0.0'
+ extends_documentation_fragment:
+ - testns.testcol2.version_added
+"""
+
+EXAMPLES = """
+- name: do nothing
+ debug: msg="{{ lookup('testns.testcol.noop', [1,2,3,4] }}"
+"""
+
+RETURN = """
+ _list:
+ description: input given
+ version_added: 1.0.0
+"""
+
+from ansible.module_utils.common._collections_compat import Sequence
+from ansible.plugins.lookup import LookupBase
+from ansible.errors import AnsibleError
+
+
+class LookupModule(LookupBase):
+
+ def run(self, terms, **kwargs):
+ if not isinstance(terms, Sequence):
+ raise AnsibleError("testns.testcol.noop expects a list")
+ return terms
diff --git a/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/modules/fakemodule.py b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/modules/fakemodule.py
new file mode 100644
index 00000000..a1caeb14
--- /dev/null
+++ b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/modules/fakemodule.py
@@ -0,0 +1,28 @@
+#!/usr/bin/python
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+DOCUMENTATION = """
+ module: fakemodule
+ broken:
+ short_desciption: fake module
+ description:
+ - this is a fake module
+ version_added: 1.0.0
+ options:
+ _notreal:
+ description: really not a real option
+ author:
+ - me
+"""
+
+import json
+
+
+def main():
+ print(json.dumps(dict(changed=False, source='testns.testcol.fakemodule')))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/main.py b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/modules/notrealmodule.py
index e6749cdc..4479f23f 100644
--- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/main.py
+++ b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/modules/notrealmodule.py
@@ -1,7 +1,13 @@
+#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from validate_modules.main import main
+import json
+
+
+def main():
+ print(json.dumps(dict(changed=False, source='testns.testcol.notrealmodule')))
+
if __name__ == '__main__':
main()
diff --git a/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py
new file mode 100644
index 00000000..fb0e319d
--- /dev/null
+++ b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py
@@ -0,0 +1,96 @@
+#!/usr/bin/python
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+DOCUMENTATION = '''
+---
+module: randommodule
+short_description: A random module
+description:
+ - A random module.
+author:
+ - Ansible Core Team
+version_added: 1.0.0
+deprecated:
+ alternative: Use some other module
+ why: Test deprecation
+ removed_in: '3.0.0'
+options:
+ test:
+ description: Some text.
+ type: str
+ version_added: 1.2.0
+ sub:
+ description: Suboptions.
+ type: dict
+ suboptions:
+ subtest:
+ description: A suboption.
+ type: int
+ version_added: 1.1.0
+ # The following is the wrong syntax, and should not get processed
+ # by add_collection_to_versions_and_dates()
+ options:
+ subtest2:
+ description: Another suboption.
+ type: float
+ version_added: 1.1.0
+ # The following is not supported in modules, and should not get processed
+ # by add_collection_to_versions_and_dates()
+ env:
+ - name: TEST_ENV
+ version_added: 1.0.0
+ deprecated:
+ alternative: none
+ why: Test deprecation
+ removed_in: '2.0.0'
+ version: '2.0.0'
+extends_documentation_fragment:
+ - testns.testcol2.module
+'''
+
+EXAMPLES = '''
+'''
+
+RETURN = '''
+z_last:
+ description: A last result.
+ broken:
+ type: str
+ returned: success
+ version_added: 1.3.0
+
+m_middle:
+ description:
+ - This should be in the middle.
+ - Has some more data
+ type: dict
+ returned: success and 1st of month
+ contains:
+ suboption:
+ description: A suboption.
+ type: str
+ choices: [ARF, BARN, c_without_capital_first_letter]
+ version_added: 1.4.0
+
+a_first:
+ description: A first result.
+ type: str
+ returned: success
+'''
+
+
+from ansible.module_utils.basic import AnsibleModule
+
+
+def main():
+ module = AnsibleModule(
+ argument_spec=dict(),
+ )
+
+ module.exit_json()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/vars/noop_vars_plugin.py b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/vars/noop_vars_plugin.py
new file mode 100644
index 00000000..ae0f75e0
--- /dev/null
+++ b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/plugins/vars/noop_vars_plugin.py
@@ -0,0 +1,30 @@
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+DOCUMENTATION = '''
+ vars: noop_vars_plugin
+ broken:
+ short_description: Do NOT load host and group vars
+ description: don't test loading host and group vars from a collection
+ options:
+ stage:
+ default: all
+ choices: ['all', 'inventory', 'task']
+ type: str
+ ini:
+ - key: stage
+ section: testns.testcol.noop_vars_plugin
+ env:
+ - name: ANSIBLE_VARS_PLUGIN_STAGE
+ extends_documentation_fragment:
+ - testns.testcol2.deprecation
+'''
+
+from ansible.plugins.vars import BaseVarsPlugin
+
+
+class VarsModule(BaseVarsPlugin):
+
+ def get_vars(self, loader, path, entities, cache=True):
+ super(VarsModule, self).get_vars(loader, path, entities)
+ return {'collection': 'yes', 'notreal': 'value'}
diff --git a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/lookup/noop.py b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/lookup/noop.py
index 9eee46ed..7a64a5d5 100644
--- a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/lookup/noop.py
+++ b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/lookup/noop.py
@@ -31,7 +31,8 @@ RETURN = """
version_added: 1.0.0
"""
-from ansible.module_utils.common._collections_compat import Sequence
+from collections.abc import Sequence
+
from ansible.plugins.lookup import LookupBase
from ansible.errors import AnsibleError
diff --git a/test/integration/targets/ansible-doc/runme.sh b/test/integration/targets/ansible-doc/runme.sh
index 549a341b..81eba619 100755
--- a/test/integration/targets/ansible-doc/runme.sh
+++ b/test/integration/targets/ansible-doc/runme.sh
@@ -25,10 +25,10 @@ expected_out="$(sed '1 s/\(^> TESTNS\.TESTCOL\.RANDOMMODULE\).*(.*)$/\1/' random
test "$current_out" == "$expected_out"
# ensure we do work with valid collection name for list
-ansible-doc --list testns.testcol --playbook-dir ./ 2>&1 | grep -v "Invalid collection pattern"
+ansible-doc --list testns.testcol --playbook-dir ./ 2>&1 | grep -v "Invalid collection name"
# ensure we dont break on invalid collection name for list
-ansible-doc --list testns.testcol.fakemodule --playbook-dir ./ 2>&1 | grep "Invalid collection pattern"
+ansible-doc --list testns.testcol.fakemodule --playbook-dir ./ 2>&1 | grep "Invalid collection name"
# test listing diff plugin types from collection
@@ -47,9 +47,10 @@ do
justcol=$(ansible-doc -l -t ${ptype} testns.testcol|wc -l)
test "$justcol" -eq 0
+ # TODO: do we want per namespace?
# ensure we get 1 plugins when restricting namespace
- justcol=$(ansible-doc -l -t ${ptype} --playbook-dir ./ testns|wc -l)
- test "$justcol" -eq 1
+ #justcol=$(ansible-doc -l -t ${ptype} --playbook-dir ./ testns|wc -l)
+ #test "$justcol" -eq 1
done
#### test role functionality
@@ -102,3 +103,31 @@ test "$current_out" == "$expected_out"
# just ensure it runs
ANSIBLE_LIBRARY='./nolibrary' ansible-doc --metadata-dump --playbook-dir /dev/null
+
+# create broken role argument spec
+mkdir -p broken-docs/collections/ansible_collections/testns/testcol/roles/testrole/meta
+cat <<EOF > broken-docs/collections/ansible_collections/testns/testcol/roles/testrole/meta/main.yml
+---
+dependencies:
+galaxy_info:
+
+argument_specs:
+ main:
+ short_description: testns.testcol.testrole short description for main entry point
+ description:
+ - Longer description for testns.testcol.testrole main entry point.
+ author: Ansible Core (@ansible)
+ options:
+ opt1:
+ description: opt1 description
+ broken:
+ type: "str"
+ required: true
+EOF
+
+# ensure that --metadata-dump does not fail when --no-fail-on-errors is supplied
+ANSIBLE_LIBRARY='./nolibrary' ansible-doc --metadata-dump --no-fail-on-errors --playbook-dir broken-docs testns.testcol
+
+# ensure that --metadata-dump does fail when --no-fail-on-errors is not supplied
+output=$(ANSIBLE_LIBRARY='./nolibrary' ansible-doc --metadata-dump --playbook-dir broken-docs testns.testcol 2>&1 | grep -c 'ERROR!' || true)
+test "$output" -eq 1
diff --git a/test/integration/targets/ansible-galaxy-collection-scm/tasks/main.yml b/test/integration/targets/ansible-galaxy-collection-scm/tasks/main.yml
index 759226d2..986da2f9 100644
--- a/test/integration/targets/ansible-galaxy-collection-scm/tasks/main.yml
+++ b/test/integration/targets/ansible-galaxy-collection-scm/tasks/main.yml
@@ -24,6 +24,8 @@
- include_tasks: ./setup_recursive_scm_dependency.yml
- include_tasks: ./scm_dependency_deduplication.yml
- include_tasks: ./download.yml
+ - include_tasks: ./setup_collection_bad_version.yml
+ - include_tasks: ./test_invalid_version.yml
always:
diff --git a/test/integration/targets/ansible-galaxy-collection-scm/tasks/setup_collection_bad_version.yml b/test/integration/targets/ansible-galaxy-collection-scm/tasks/setup_collection_bad_version.yml
new file mode 100644
index 00000000..0ef406e9
--- /dev/null
+++ b/test/integration/targets/ansible-galaxy-collection-scm/tasks/setup_collection_bad_version.yml
@@ -0,0 +1,47 @@
+- name: Initialize a git repo
+ command: 'git init {{ test_error_repo_path }}'
+
+- stat:
+ path: "{{ test_error_repo_path }}"
+
+- name: Add a collection to the repository
+ command: 'ansible-galaxy collection init {{ item }}'
+ args:
+ chdir: '{{ scm_path }}'
+ loop:
+ - error_test.float_version_collection
+ - error_test.not_semantic_version_collection
+ - error_test.list_version_collection
+ - error_test.dict_version_collection
+
+- name: Add an invalid float version to a collection
+ lineinfile:
+ path: '{{ test_error_repo_path }}/float_version_collection/galaxy.yml'
+ regexp: '^version'
+ line: "version: 1.0" # Version is a float, not a string as expected
+
+- name: Add an invalid non-semantic string version a collection
+ lineinfile:
+ path: '{{ test_error_repo_path }}/not_semantic_version_collection/galaxy.yml'
+ regexp: '^version'
+ line: "version: '1.0'" # Version is a string, but not a semantic version as expected
+
+- name: Add an invalid list version to a collection
+ lineinfile:
+ path: '{{ test_error_repo_path }}/list_version_collection/galaxy.yml'
+ regexp: '^version'
+ line: "version: ['1.0.0']" # Version is a list, not a string as expected
+
+- name: Add an invalid version to a collection
+ lineinfile:
+ path: '{{ test_error_repo_path }}/dict_version_collection/galaxy.yml'
+ regexp: '^version'
+ line: "version: {'broken': 'version'}" # Version is a dict, not a string as expected
+
+- name: Commit the changes
+ command: '{{ item }}'
+ args:
+ chdir: '{{ test_error_repo_path }}'
+ loop:
+ - git add ./
+ - git commit -m 'add collections'
diff --git a/test/integration/targets/ansible-galaxy-collection-scm/tasks/test_invalid_version.yml b/test/integration/targets/ansible-galaxy-collection-scm/tasks/test_invalid_version.yml
new file mode 100644
index 00000000..1f22bb8b
--- /dev/null
+++ b/test/integration/targets/ansible-galaxy-collection-scm/tasks/test_invalid_version.yml
@@ -0,0 +1,58 @@
+- block:
+ - name: test installing a collection with an invalid float version value
+ command: 'ansible-galaxy collection install git+file://{{ test_error_repo_path }}/.git#float_version_collection -vvvvvv'
+ ignore_errors: yes
+ register: invalid_version_result
+
+ - assert:
+ that:
+ - invalid_version_result is failed
+ - msg in invalid_version_result.stderr
+ vars:
+ req: error_test.float_version_collection:1.0
+ ver: "1.0 (<class 'float'>)"
+ msg: "Invalid version found for the collection '{{ req }}': {{ ver }}. A SemVer-compliant version or '*' is required."
+
+ - name: test installing a collection with an invalid non-SemVer string version value
+ command: 'ansible-galaxy collection install git+file://{{ test_error_repo_path }}/.git#not_semantic_version_collection -vvvvvv'
+ ignore_errors: yes
+ register: invalid_version_result
+
+ - assert:
+ that:
+ - invalid_version_result is failed
+ - msg in invalid_version_result.stderr
+ vars:
+ req: error_test.not_semantic_version_collection:1.0
+ ver: "1.0 (<class 'str'>)"
+ msg: "Invalid version found for the collection '{{ req }}': {{ ver }}. A SemVer-compliant version or '*' is required."
+
+ - name: test installing a collection with an invalid list version value
+ command: 'ansible-galaxy collection install git+file://{{ test_error_repo_path }}/.git#list_version_collection -vvvvvv'
+ ignore_errors: yes
+ register: invalid_version_result
+
+ - assert:
+ that:
+ - invalid_version_result is failed
+ - msg in invalid_version_result.stderr
+ vars:
+ req: "error_test.list_version_collection:['1.0.0']"
+ msg: "Invalid version found for the collection '{{ req }}'. A SemVer-compliant version or '*' is required."
+
+ - name: test installing a collection with an invalid dict version value
+ command: 'ansible-galaxy collection install git+file://{{ test_error_repo_path }}/.git#dict_version_collection -vvvvvv'
+ ignore_errors: yes
+ register: invalid_version_result
+
+ - assert:
+ that:
+ - invalid_version_result is failed
+ - msg in invalid_version_result.stderr
+ vars:
+ req: "error_test.dict_version_collection:{'broken': 'version'}"
+ msg: "Invalid version found for the collection '{{ req }}'. A SemVer-compliant version or '*' is required."
+
+ always:
+ - include_tasks: ./empty_installed_collections.yml
+ when: cleanup
diff --git a/test/integration/targets/ansible-galaxy-collection-scm/vars/main.yml b/test/integration/targets/ansible-galaxy-collection-scm/vars/main.yml
index c8f50afd..a82f25dc 100644
--- a/test/integration/targets/ansible-galaxy-collection-scm/vars/main.yml
+++ b/test/integration/targets/ansible-galaxy-collection-scm/vars/main.yml
@@ -2,3 +2,4 @@ install_path: "{{ galaxy_dir }}/collections/ansible_collections"
alt_install_path: "{{ galaxy_dir }}/other_collections/ansible_collections"
scm_path: "{{ galaxy_dir }}/development"
test_repo_path: "{{ galaxy_dir }}/development/ansible_test"
+test_error_repo_path: "{{ galaxy_dir }}/development/error_test"
diff --git a/test/integration/targets/ansible-galaxy-collection/library/setup_collections.py b/test/integration/targets/ansible-galaxy-collection/library/setup_collections.py
index 6f1a17f9..adefba05 100644
--- a/test/integration/targets/ansible-galaxy-collection/library/setup_collections.py
+++ b/test/integration/targets/ansible-galaxy-collection/library/setup_collections.py
@@ -78,6 +78,8 @@ RETURN = '''
'''
import os
+import subprocess
+import tarfile
import tempfile
import yaml
@@ -141,6 +143,21 @@ def publish_collection(module, collection):
'stderr': stderr,
}
+ if module.params['signature_dir'] is not None:
+ # To test user-provided signatures, we need to sign the MANIFEST.json before publishing
+
+ # Extract the tarfile to sign the MANIFEST.json
+ with tarfile.open(collection_path, mode='r') as collection_tar:
+ collection_tar.extractall(path=os.path.join(collection_dir, '%s-%s-%s' % (namespace, name, version)))
+
+ manifest_path = os.path.join(collection_dir, '%s-%s-%s' % (namespace, name, version), 'MANIFEST.json')
+ signature_path = os.path.join(module.params['signature_dir'], '%s-%s-%s-MANIFEST.json.asc' % (namespace, name, version))
+ sign_manifest(signature_path, manifest_path, module, result)
+
+ # Create the tarfile containing the signed MANIFEST.json
+ with tarfile.open(collection_path, "w:gz") as tar:
+ tar.add(os.path.join(collection_dir, '%s-%s-%s' % (namespace, name, version)), arcname=os.path.sep)
+
publish_args = ['ansible-galaxy', 'collection', 'publish', collection_path, '--server', module.params['server']]
if module.params['token']:
publish_args.extend(['--token', module.params['token']])
@@ -155,6 +172,49 @@ def publish_collection(module, collection):
return result
+def sign_manifest(signature_path, manifest_path, module, collection_setup_result):
+ collection_setup_result['gpg_detach_sign'] = {'signature_path': signature_path}
+
+ status_fd_read, status_fd_write = os.pipe()
+ gpg_cmd = [
+ "gpg",
+ "--batch",
+ "--pinentry-mode",
+ "loopback",
+ "--yes",
+ "--passphrase",
+ "SECRET",
+ "--homedir",
+ module.params['signature_dir'],
+ "--detach-sign",
+ "--armor",
+ "--output",
+ signature_path,
+ manifest_path,
+ ]
+ try:
+ p = subprocess.Popen(
+ gpg_cmd,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ pass_fds=(status_fd_write,),
+ encoding='utf8',
+ )
+ except (FileNotFoundError, subprocess.SubprocessError) as err:
+ collection_setup_result['gpg_detach_sign']['error'] = "Failed during GnuPG verification with command '{gpg_cmd}': {err}".format(
+ gpg_cmd=gpg_cmd, err=err
+ )
+ else:
+ stdout, stderr = p.communicate()
+ collection_setup_result['gpg_detach_sign']['stdout'] = stdout
+ if stderr:
+ error = "Failed during GnuPG verification with command '{gpg_cmd}':\n{stderr}".format(gpg_cmd=gpg_cmd, stderr=stderr)
+ collection_setup_result['gpg_detach_sign']['error'] = error
+ finally:
+ os.close(status_fd_write)
+
+
def run_module():
module_args = dict(
server=dict(type='str', required=True),
@@ -171,6 +231,7 @@ def run_module():
use_symlink=dict(type='bool', default=False),
),
),
+ signature_dir=dict(type='path', default=None),
)
module = AnsibleModule(
diff --git a/test/integration/targets/ansible-galaxy-collection/meta/main.yml b/test/integration/targets/ansible-galaxy-collection/meta/main.yml
index e3dd5fb1..ca46bead 100644
--- a/test/integration/targets/ansible-galaxy-collection/meta/main.yml
+++ b/test/integration/targets/ansible-galaxy-collection/meta/main.yml
@@ -1,3 +1,4 @@
---
dependencies:
- setup_remote_tmp_dir
+- setup_pexpect
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/install.yml b/test/integration/targets/ansible-galaxy-collection/tasks/install.yml
index ad10bff8..d345031b 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/install.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/install.yml
@@ -352,6 +352,19 @@
- skip.me
dest: '{{ galaxy_dir }}/ansible_collections/requirements-with-role.yml'
+- name: install roles from requirements file with collection-only keyring option
+ command: ansible-galaxy role install -r {{ req_file }} -s {{ test_name }} --keyring {{ keyring }}
+ vars:
+ req_file: '{{ galaxy_dir }}/ansible_collections/requirements-with-role.yml'
+ keyring: "{{ gpg_homedir }}/pubring.kbx"
+ ignore_errors: yes
+ register: invalid_opt
+
+- assert:
+ that:
+ - invalid_opt is failed
+ - "'unrecognized arguments: --keyring' in invalid_opt.stderr"
+
# Need to run with -vvv to validate the roles will be skipped msg
- name: install collections only with requirements-with-role.yml - {{ test_name }}
command: ansible-galaxy collection install -r '{{ galaxy_dir }}/ansible_collections/requirements-with-role.yml' -s '{{ test_name }}' -vvv
@@ -410,6 +423,241 @@
- (install_req_actual.results[0].content | b64decode | from_json).collection_info.version == '1.0.0'
- (install_req_actual.results[1].content | b64decode | from_json).collection_info.version == '1.0.0'
+- name: uninstall collections for next requirements file test
+ file:
+ path: '{{ galaxy_dir }}/ansible_collections/{{ collection }}/name'
+ state: absent
+ loop_control:
+ loop_var: collection
+ loop:
+ - namespace7
+ - namespace8
+ - namespace9
+
+- name: rewrite requirements file with collections and signatures
+ copy:
+ content: |
+ collections:
+ - name: namespace7.name
+ version: "1.0.0"
+ signatures:
+ - "{{ not_mine }}"
+ - "{{ also_not_mine }}"
+ - "file://{{ gpg_homedir }}/namespace7-name-1.0.0-MANIFEST.json.asc"
+ - namespace8.name
+ - name: namespace9.name
+ signatures:
+ - "file://{{ gpg_homedir }}/namespace9-name-1.0.0-MANIFEST.json.asc"
+ dest: '{{ galaxy_dir }}/ansible_collections/requirements.yaml'
+ vars:
+ not_mine: "file://{{ gpg_homedir }}/namespace1-name1-1.0.0-MANIFEST.json.asc"
+ also_not_mine: "file://{{ gpg_homedir }}/namespace1-name1-1.0.9-MANIFEST.json.asc"
+
+- name: install collection with mutually exclusive options
+ command: ansible-galaxy collection install -r {{ req_file }} -s {{ test_name }} {{ cli_signature }}
+ vars:
+ req_file: "{{ galaxy_dir }}/ansible_collections/requirements.yaml"
+ # --signature is an ansible-galaxy collection install subcommand, but mutually exclusive with -r
+ cli_signature: "--signature file://{{ gpg_homedir }}/namespace7-name-1.0.0-MANIFEST.json.asc"
+ ignore_errors: yes
+ register: mutually_exclusive_opts
+
+- assert:
+ that:
+ - mutually_exclusive_opts is failed
+ - expected_error in actual_error
+ vars:
+ expected_error: >-
+ The --signatures option and --requirements-file are mutually exclusive.
+ Use the --signatures with positional collection_name args or provide a
+ 'signatures' key for requirements in the --requirements-file.
+ actual_error: "{{ mutually_exclusive_opts.stderr }}"
+
+- name: install a collection with user-supplied signatures for verification but no keyring
+ command: ansible-galaxy collection install namespace1.name1:1.0.0 {{ cli_signature }}
+ vars:
+ cli_signature: "--signature file://{{ gpg_homedir }}/namespace1-name1-1.0.0-MANIFEST.json.asc"
+ ignore_errors: yes
+ register: required_together
+
+- assert:
+ that:
+ - required_together is failed
+ - '"ERROR! Signatures were provided to verify namespace1.name1 but no keyring was configured." in required_together.stderr'
+
+- name: install collections with ansible-galaxy install -r with invalid signatures - {{ test_name }}
+ # Note that --keyring is a valid option for 'ansible-galaxy install -r ...', not just 'ansible-galaxy collection ...'
+ command: ansible-galaxy install -r {{ req_file }} -s {{ test_name }} --keyring {{ keyring }} {{ galaxy_verbosity }}
+ register: install_req
+ ignore_errors: yes
+ vars:
+ req_file: "{{ galaxy_dir }}/ansible_collections/requirements.yaml"
+ keyring: "{{ gpg_homedir }}/pubring.kbx"
+ environment:
+ ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
+ ANSIBLE_GALAXY_REQUIRED_VALID_SIGNATURE_COUNT: all
+
+- name: assert invalid signature is fatal with ansible-galaxy install - {{ test_name }}
+ assert:
+ that:
+ - install_req is failed
+ - '"Installing ''namespace7.name:1.0.0'' to" in install_req.stdout'
+ - '"Not installing namespace7.name because GnuPG signature verification failed" in install_req.stderr'
+ # The other collections shouldn't be installed because they're listed
+ # after the failing collection and --ignore-errors was not provided
+ - '"Installing ''namespace8.name:1.0.0'' to" not in install_req.stdout'
+ - '"Installing ''namespace9.name:1.0.0'' to" not in install_req.stdout'
+
+# This command is hardcoded with -vvvv purposefully to evaluate extra verbosity messages
+- name: install collections with ansible-galaxy install and --ignore-errors - {{ test_name }}
+ command: ansible-galaxy install -r {{ req_file }} {{ cli_opts }} -vvvv
+ register: install_req
+ vars:
+ req_file: "{{ galaxy_dir }}/ansible_collections/requirements.yaml"
+ cli_opts: "-s {{ test_name }} --keyring {{ keyring }} --ignore-errors"
+ keyring: "{{ gpg_homedir }}/pubring.kbx"
+ environment:
+ ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
+ ANSIBLE_GALAXY_REQUIRED_VALID_SIGNATURE_COUNT: all
+
+- name: get result of install collections with ansible-galaxy install - {{ test_name }}
+ slurp:
+ path: '{{ galaxy_dir }}/ansible_collections/{{ collection }}/name/MANIFEST.json'
+ register: install_req_actual
+ loop_control:
+ loop_var: collection
+ loop:
+ - namespace8
+ - namespace9
+
+# SIVEL
+- name: assert invalid signature is not fatal with ansible-galaxy install --ignore-errors - {{ test_name }}
+ assert:
+ that:
+ - install_req is success
+ - '"Installing ''namespace7.name:1.0.0'' to" in install_req.stdout'
+ - '"Signature verification failed for ''namespace7.name'' (return code 1)" in install_req.stdout'
+ - '"Not installing namespace7.name because GnuPG signature verification failed." in install_stderr'
+ - '"Failed to install collection namespace7.name:1.0.0 but skipping due to --ignore-errors being set." in install_stderr'
+ - '"Installing ''namespace8.name:1.0.0'' to" in install_req.stdout'
+ - '"Installing ''namespace9.name:1.0.0'' to" in install_req.stdout'
+ - (install_req_actual.results[0].content | b64decode | from_json).collection_info.version == '1.0.0'
+ - (install_req_actual.results[1].content | b64decode | from_json).collection_info.version == '1.0.0'
+ vars:
+ install_stderr: "{{ install_req.stderr | regex_replace(reset_color) | regex_replace(color) | regex_replace('\\n', ' ') }}"
+ reset_color: '\x1b\[0m'
+ color: '\x1b\[[0-9];[0-9]{2}m'
+
+- name: clean up collections from last test
+ file:
+ path: '{{ galaxy_dir }}/ansible_collections/{{ collection }}/name'
+ state: absent
+ loop_control:
+ loop_var: collection
+ loop:
+ - namespace8
+ - namespace9
+
+- name: install collections with only one valid signature using ansible-galaxy install - {{ test_name }}
+ command: ansible-galaxy install -r {{ req_file }} {{ cli_opts }} {{ galaxy_verbosity }}
+ register: install_req
+ vars:
+ req_file: "{{ galaxy_dir }}/ansible_collections/requirements.yaml"
+ cli_opts: "-s {{ test_name }} --keyring {{ keyring }}"
+ keyring: "{{ gpg_homedir }}/pubring.kbx"
+ environment:
+ ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
+
+- name: get result of install collections with ansible-galaxy install - {{ test_name }}
+ slurp:
+ path: '{{ galaxy_dir }}/ansible_collections/{{ collection }}/name/MANIFEST.json'
+ register: install_req_actual
+ loop_control:
+ loop_var: collection
+ loop:
+ - namespace7
+ - namespace8
+ - namespace9
+
+- name: assert just one valid signature is not fatal with ansible-galaxy install - {{ test_name }}
+ assert:
+ that:
+ - install_req is success
+ - '"Installing ''namespace7.name:1.0.0'' to" in install_req.stdout'
+ - '"Signature verification failed for ''namespace7.name'' (return code 1)" not in install_req.stdout'
+ - '"Not installing namespace7.name because GnuPG signature verification failed." not in install_stderr'
+ - '"Installing ''namespace8.name:1.0.0'' to" in install_req.stdout'
+ - '"Installing ''namespace9.name:1.0.0'' to" in install_req.stdout'
+ - (install_req_actual.results[0].content | b64decode | from_json).collection_info.version == '1.0.0'
+ - (install_req_actual.results[1].content | b64decode | from_json).collection_info.version == '1.0.0'
+ - (install_req_actual.results[2].content | b64decode | from_json).collection_info.version == '1.0.0'
+ vars:
+ install_stderr: "{{ install_req.stderr | regex_replace(reset_color) | regex_replace(color) | regex_replace('\\n', ' ') }}"
+ reset_color: '\x1b\[0m'
+ color: '\x1b\[[0-9];[0-9]{2}m'
+
+- name: clean up collections from last test
+ file:
+ path: '{{ galaxy_dir }}/ansible_collections/{{ collection }}/name'
+ state: absent
+ loop_control:
+ loop_var: collection
+ loop:
+ - namespace7
+ - namespace8
+ - namespace9
+
+- name: install collections with only one valid signature by ignoring the other errors
+ command: ansible-galaxy install -r {{ req_file }} {{ cli_opts }} {{ galaxy_verbosity }} --ignore-signature-status-code FAILURE
+ register: install_req
+ vars:
+ req_file: "{{ galaxy_dir }}/ansible_collections/requirements.yaml"
+ cli_opts: "-s {{ test_name }} --keyring {{ keyring }}"
+ keyring: "{{ gpg_homedir }}/pubring.kbx"
+ environment:
+ ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
+ ANSIBLE_GALAXY_REQUIRED_VALID_SIGNATURE_COUNT: all
+ ANSIBLE_GALAXY_IGNORE_SIGNATURE_STATUS_CODES: BADSIG # cli option is appended and both status codes are ignored
+
+- name: get result of install collections with ansible-galaxy install - {{ test_name }}
+ slurp:
+ path: '{{ galaxy_dir }}/ansible_collections/{{ collection }}/name/MANIFEST.json'
+ register: install_req_actual
+ loop_control:
+ loop_var: collection
+ loop:
+ - namespace7
+ - namespace8
+ - namespace9
+
+- name: assert invalid signature is not fatal with ansible-galaxy install - {{ test_name }}
+ assert:
+ that:
+ - install_req is success
+ - '"Installing ''namespace7.name:1.0.0'' to" in install_req.stdout'
+ - '"Signature verification failed for ''namespace7.name'' (return code 1)" not in install_req.stdout'
+ - '"Not installing namespace7.name because GnuPG signature verification failed." not in install_stderr'
+ - '"Installing ''namespace8.name:1.0.0'' to" in install_req.stdout'
+ - '"Installing ''namespace9.name:1.0.0'' to" in install_req.stdout'
+ - (install_req_actual.results[0].content | b64decode | from_json).collection_info.version == '1.0.0'
+ - (install_req_actual.results[1].content | b64decode | from_json).collection_info.version == '1.0.0'
+ - (install_req_actual.results[2].content | b64decode | from_json).collection_info.version == '1.0.0'
+ vars:
+ install_stderr: "{{ install_req.stderr | regex_replace(reset_color) | regex_replace(color) | regex_replace('\\n', ' ') }}"
+ reset_color: '\x1b\[0m'
+ color: '\x1b\[[0-9];[0-9]{2}m'
+
+- name: clean up collections from last test
+ file:
+ path: '{{ galaxy_dir }}/ansible_collections/{{ collection }}/name'
+ state: absent
+ loop_control:
+ loop_var: collection
+ loop:
+ - namespace7
+ - namespace8
+ - namespace9
+
# Uncomment once pulp container is at pulp>=0.5.0
#- name: install cache.cache at the current latest version
# command: ansible-galaxy collection install cache.cache -s '{{ test_name }}' -vvv
@@ -528,6 +776,193 @@
path: '{{ galaxy_dir }}/ansible_collections'
state: absent
+# This command is hardcoded with -vvvv purposefully to evaluate extra verbosity messages
+- name: install collection with signature with invalid keyring
+ command: ansible-galaxy collection install namespace1.name1 -vvvv {{ signature_option }} {{ keyring_option }}
+ environment:
+ ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
+ vars:
+ signature_option: "--signature file://{{ gpg_homedir }}/namespace1-name1-1.0.9-MANIFEST.json.asc"
+ keyring_option: '--keyring {{ gpg_homedir }}/i_do_not_exist.kbx'
+ ignore_errors: yes
+ register: keyring_error
+
+- assert:
+ that:
+ - keyring_error is failed
+ - expected_errors[0] in actual_error
+ - expected_errors[1] in actual_error
+ - expected_errors[2] in actual_error
+ - unexpected_warning not in actual_warning
+ vars:
+ keyring: "{{ gpg_homedir }}/i_do_not_exist.kbx"
+ expected_errors:
+ - "Signature verification failed for 'namespace1.name1' (return code 2):"
+ - "* The public key is not available."
+ - >-
+ * It was not possible to check the signature. This may be caused
+ by a missing public key or an unsupported algorithm. A RC of 4
+ indicates unknown algorithm, a 9 indicates a missing public key.
+ unexpected_warning: >-
+ The GnuPG keyring used for collection signature
+ verification was not configured but signatures were
+ provided by the Galaxy server to verify authenticity.
+ Configure a keyring for ansible-galaxy to use
+ or disable signature verification.
+ Skipping signature verification.
+ actual_warning: "{{ keyring_error.stderr | regex_replace(reset_color) | regex_replace(color) | regex_replace('\\n', ' ') }}"
+ stdout_no_color: "{{ keyring_error.stdout | regex_replace(reset_color) | regex_replace(color) }}"
+ # Remove formatting from the reason so it's one line
+ actual_error: "{{ stdout_no_color | regex_replace('\"') | regex_replace('\\n') | regex_replace(' ', ' ') }}"
+ reset_color: '\x1b\[0m'
+ color: '\x1b\[[0-9];[0-9]{2}m'
+
+# TODO: Uncomment once signatures are provided by pulp-galaxy-ng
+#- name: install collection with signature provided by Galaxy server (no keyring)
+# command: ansible-galaxy collection install namespace1.name1 {{ galaxy_verbosity }}
+# environment:
+# ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
+# ignore_errors: yes
+# register: keyring_warning
+#
+#- name: assert a warning was given but signature verification did not occur without configuring the keyring
+# assert:
+# that:
+# - keyring_warning is not failed
+# - - '"Installing ''namespace1.name1:1.0.9'' to" in keyring_warning.stdout'
+# # TODO: Don't just check the stdout, make sure the collection was installed.
+# - expected_warning in actual_warning
+# vars:
+# expected_warning: >-
+# The GnuPG keyring used for collection signature
+# verification was not configured but signatures were
+# provided by the Galaxy server to verify authenticity.
+# Configure a keyring for ansible-galaxy to use
+# or disable signature verification.
+# Skipping signature verification.
+# actual_warning: "{{ keyring_warning.stderr | regex_replace(reset_color) | regex_replace(color) | regex_replace('\\n', ' ') }}"
+# reset_color: '\x1b\[0m'
+# color: '\x1b\[[0-9];[0-9]{2}m'
+
+- name: install simple collection from first accessible server with valid detached signature
+ command: ansible-galaxy collection install namespace1.name1 {{ galaxy_verbosity }} {{ signature_options }}
+ environment:
+ ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
+ vars:
+ signature_options: "--signature {{ signature }} --keyring {{ keyring }}"
+ signature: "file://{{ gpg_homedir }}/namespace1-name1-1.0.9-MANIFEST.json.asc"
+ keyring: "{{ gpg_homedir }}/pubring.kbx"
+ register: from_first_good_server
+
+- name: get installed files of install simple collection from first good server
+ find:
+ path: '{{ galaxy_dir }}/ansible_collections/namespace1/name1'
+ file_type: file
+ register: install_normal_files
+
+- name: get the manifest of install simple collection from first good server
+ slurp:
+ path: '{{ galaxy_dir }}/ansible_collections/namespace1/name1/MANIFEST.json'
+ register: install_normal_manifest
+
+- name: assert install simple collection from first good server
+ assert:
+ that:
+ - '"Installing ''namespace1.name1:1.0.9'' to" in from_first_good_server.stdout'
+ - install_normal_files.files | length == 3
+ - install_normal_files.files[0].path | basename in ['MANIFEST.json', 'FILES.json', 'README.md']
+ - install_normal_files.files[1].path | basename in ['MANIFEST.json', 'FILES.json', 'README.md']
+ - install_normal_files.files[2].path | basename in ['MANIFEST.json', 'FILES.json', 'README.md']
+ - (install_normal_manifest.content | b64decode | from_json).collection_info.version == '1.0.9'
+
+- name: Remove the collection
+ file:
+ path: '{{ galaxy_dir }}/ansible_collections/namespace1'
+ state: absent
+
+# This command is hardcoded with -vvvv purposefully to evaluate extra verbosity messages
+- name: install simple collection with invalid detached signature
+ command: ansible-galaxy collection install namespace1.name1 -vvvv {{ signature_options }}
+ environment:
+ ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
+ vars:
+ signature_options: "--signature {{ signature }} --keyring {{ keyring }}"
+ signature: "file://{{ gpg_homedir }}/namespace2-name-1.0.0-MANIFEST.json.asc"
+ keyring: "{{ gpg_homedir }}/pubring.kbx"
+ ignore_errors: yes
+ register: invalid_signature
+
+- assert:
+ that:
+ - invalid_signature is failed
+ - "'Not installing namespace1.name1 because GnuPG signature verification failed.' in invalid_signature.stderr"
+ - expected_errors[0] in install_stdout
+ - expected_errors[1] in install_stdout
+ vars:
+ expected_errors:
+ - "* This is the counterpart to SUCCESS and used to indicate a program failure."
+ - "* The signature with the keyid has not been verified okay."
+ stdout_no_color: "{{ invalid_signature.stdout | regex_replace(reset_color) | regex_replace(color) }}"
+ # Remove formatting from the reason so it's one line
+ install_stdout: "{{ stdout_no_color | regex_replace('\"') | regex_replace('\\n') | regex_replace(' ', ' ') }}"
+ reset_color: '\x1b\[0m'
+ color: '\x1b\[[0-9];[0-9]{2}m'
+
+- name: validate collection directory was not created
+ file:
+ path: '{{ galaxy_dir }}/ansible_collections/namespace1/name1'
+ state: absent
+ register: collection_dir
+ check_mode: yes
+ failed_when: collection_dir is changed
+
+- name: disable signature verification and install simple collection with invalid detached signature
+ command: ansible-galaxy collection install namespace1.name1 {{ galaxy_verbosity }} {{ signature_options }}
+ environment:
+ ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
+ vars:
+ signature_options: "--signature {{ signature }} --keyring {{ keyring }} --disable-gpg-verify"
+ signature: "file://{{ gpg_homedir }}/namespace2-name-1.0.0-MANIFEST.json.asc"
+ keyring: "{{ gpg_homedir }}/pubring.kbx"
+ ignore_errors: yes
+ register: ignore_invalid_signature
+
+- assert:
+ that:
+ - ignore_invalid_signature is success
+ - '"Installing ''namespace1.name1:1.0.9'' to" in ignore_invalid_signature.stdout'
+
+- name: use lenient signature verification (default) without providing signatures
+ command: ansible-galaxy collection install namespace1.name1:1.0.0 -vvvv --keyring {{ gpg_homedir }}/pubring.kbx --force
+ environment:
+ ANSIBLE_GALAXY_REQUIRED_VALID_SIGNATURE_COUNT: "all"
+ register: missing_signature
+
+- assert:
+ that:
+ - missing_signature is success
+ - missing_signature.rc == 0
+ - '"namespace1.name1:1.0.0 was installed successfully" in missing_signature.stdout'
+ - '"Signature verification failed for ''namespace1.name1'': no successful signatures" not in missing_signature.stdout'
+
+- name: use strict signature verification without providing signatures
+ command: ansible-galaxy collection install namespace1.name1:1.0.0 -vvvv --keyring {{ gpg_homedir }}/pubring.kbx --force
+ environment:
+ ANSIBLE_GALAXY_REQUIRED_VALID_SIGNATURE_COUNT: "+1"
+ ignore_errors: yes
+ register: missing_signature
+
+- assert:
+ that:
+ - missing_signature is failed
+ - missing_signature.rc == 1
+ - '"Signature verification failed for ''namespace1.name1'': no successful signatures" in missing_signature.stdout'
+ - '"Not installing namespace1.name1 because GnuPG signature verification failed" in missing_signature.stderr'
+
+- name: Remove the collection
+ file:
+ path: '{{ galaxy_dir }}/ansible_collections/namespace1'
+ state: absent
- name: download collections with pre-release dep - {{ test_name }}
command: ansible-galaxy collection download dep_with_beta.parent namespace1.name1:1.1.0-beta.1 -p '{{ galaxy_dir }}/scratch'
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/main.yml b/test/integration/targets/ansible-galaxy-collection/tasks/main.yml
index 0f6af191..598784d3 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/main.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/main.yml
@@ -70,6 +70,8 @@
server: '{{ galaxy_ng_server }}'
v3: true
+- include_tasks: setup_gpg.yml
+
# We use a module for this so we can speed up the test time.
# For pulp interactions, we only upload to galaxy_ng which shares
# the same repo and distribution with pulp_ansible
@@ -79,6 +81,7 @@
setup_collections:
server: galaxy_ng
collections: '{{ collection_list }}'
+ signature_dir: '{{ gpg_homedir }}'
environment:
ANSIBLE_CONFIG: '{{ galaxy_dir }}/ansible.cfg'
@@ -174,6 +177,7 @@
args:
apply:
environment:
+ ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
ANSIBLE_CONFIG: '{{ galaxy_dir }}/ansible.cfg'
vars:
test_api_fallback: 'pulp_v2'
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/revoke_gpg_key.yml b/test/integration/targets/ansible-galaxy-collection/tasks/revoke_gpg_key.yml
new file mode 100644
index 00000000..a766d8ea
--- /dev/null
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/revoke_gpg_key.yml
@@ -0,0 +1,14 @@
+- name: generate revocation certificate
+ expect:
+ command: "gpg --homedir {{ gpg_homedir }} --output {{ gpg_homedir }}/revoke.asc --gen-revoke {{ fingerprint }}"
+ responses:
+ "Create a revocation certificate for this key": "y"
+ "Please select the reason for the revocation": "0"
+ "Enter an optional description": ""
+ "Is this okay": "y"
+
+- name: revoke key
+ command: "gpg --no-tty --homedir {{ gpg_homedir }} --import {{ gpg_homedir }}/revoke.asc"
+
+- name: list keys for debugging
+ command: "gpg --no-tty --homedir {{ gpg_homedir }} --list-keys {{ gpg_user }}"
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/setup_gpg.yml b/test/integration/targets/ansible-galaxy-collection/tasks/setup_gpg.yml
new file mode 100644
index 00000000..93d532f6
--- /dev/null
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/setup_gpg.yml
@@ -0,0 +1,24 @@
+- name: create empty gpg homedir
+ file:
+ state: "{{ item }}"
+ path: "{{ gpg_homedir }}"
+ mode: 0700
+ loop:
+ - absent
+ - directory
+
+- name: get username for generating key
+ command: whoami
+ register: user
+
+- name: generate key for user with gpg
+ command: "gpg --no-tty --homedir {{ gpg_homedir }} --passphrase SECRET --pinentry-mode loopback --quick-gen-key {{ user.stdout }} default default"
+
+- name: list gpg keys for user
+ command: "gpg --no-tty --homedir {{ gpg_homedir }} --list-keys {{ user.stdout }}"
+ register: gpg_list_keys
+
+- name: save gpg user and fingerprint of new key
+ set_fact:
+ gpg_user: "{{ user.stdout }}"
+ fingerprint: "{{ gpg_list_keys.stdout_lines[1] | trim }}"
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/verify.yml b/test/integration/targets/ansible-galaxy-collection/tasks/verify.yml
index 815a67f5..abe6fcc9 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/verify.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/verify.yml
@@ -26,13 +26,10 @@
- name: install the collection from the server
command: ansible-galaxy collection install ansible_test.verify:1.0.0 -s {{ test_api_fallback }} {{ galaxy_verbosity }}
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
+# This command is hardcoded with -vvvv purposefully to evaluate extra verbosity messages
- name: verify the collection against the first valid server
- command: ansible-galaxy collection verify ansible_test.verify:1.0.0 -vvv {{ galaxy_verbosity }}
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
+ command: ansible-galaxy collection verify ansible_test.verify:1.0.0 -vvvv {{ galaxy_verbosity }}
register: verify
- assert:
@@ -43,8 +40,6 @@
- name: verify the installed collection against the server
command: ansible-galaxy collection verify ansible_test.verify:1.0.0 -s {{ test_name }} {{ galaxy_verbosity }}
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
register: verify
- assert:
@@ -54,11 +49,11 @@
- name: verify the installed collection against the server, with unspecified version in CLI
command: ansible-galaxy collection verify ansible_test.verify -s {{ test_name }} {{ galaxy_verbosity }}
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
- name: verify a collection that doesn't appear to be installed
command: ansible-galaxy collection verify ansible_test.verify:1.0.0 -s {{ test_name }} {{ galaxy_verbosity }}
+ environment:
+ ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/nonexistent_dir'
register: verify
failed_when: verify.rc == 0
@@ -95,8 +90,6 @@
- name: verify a version of a collection that isn't installed
command: ansible-galaxy collection verify ansible_test.verify:2.0.0 -s {{ test_name }} {{ galaxy_verbosity }}
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
register: verify
failed_when: verify.rc == 0
@@ -107,13 +100,9 @@
- name: install the new version from the server
command: ansible-galaxy collection install ansible_test.verify:2.0.0 --force -s {{ test_name }} {{ galaxy_verbosity }}
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
- name: verify the installed collection against the server
command: ansible-galaxy collection verify ansible_test.verify:2.0.0 -s {{ test_name }} {{ galaxy_verbosity }}
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
register: verify
- assert:
@@ -159,8 +148,6 @@
- name: test verifying checksumes of the modified collection
command: ansible-galaxy collection verify ansible_test.verify:2.0.0 -s {{ test_name }} {{ galaxy_verbosity }}
register: verify
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
failed_when: verify.rc == 0
- assert:
@@ -179,8 +166,6 @@
- name: ensure a modified FILES.json is validated
command: ansible-galaxy collection verify ansible_test.verify:2.0.0 -s {{ test_name }} {{ galaxy_verbosity }}
register: verify
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
failed_when: verify.rc == 0
- assert:
@@ -203,8 +188,6 @@
- name: ensure the MANIFEST.json is validated against the uncorrupted file from the server
command: ansible-galaxy collection verify ansible_test.verify:2.0.0 -s {{ test_name }} {{ galaxy_verbosity }}
register: verify
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
failed_when: verify.rc == 0
- assert:
@@ -233,8 +216,6 @@
- name: test we only verify collections containing a MANIFEST.json with the version on the server
command: ansible-galaxy collection verify ansible_test.verify:2.0.0 -s {{ test_name }} {{ galaxy_verbosity }}
register: verify
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
failed_when: verify.rc == 0
- assert:
@@ -255,13 +236,9 @@
- name: force-install from local artifact
command: ansible-galaxy collection install '{{ galaxy_dir }}/ansible_test-verify-3.0.0.tar.gz' --force
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
- name: verify locally only, no download or server manifest hash check
command: ansible-galaxy collection verify --offline ansible_test.verify
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
register: verify
- assert:
@@ -288,8 +265,6 @@
- name: verify modified collection locally-only (should fail)
command: ansible-galaxy collection verify --offline ansible_test.verify
register: verify
- environment:
- ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
failed_when: verify.rc == 0
- assert:
@@ -299,3 +274,204 @@
- "'plugins/modules/test_module.py' in verify.stdout"
- "'plugins/modules/test_new_file.py' in verify.stdout"
- "'plugins/modules/test_new_dir' in verify.stdout"
+
+# TODO: add a test for offline Galaxy signature metadata
+
+- name: install a collection that was signed by setup_collections
+ command: ansible-galaxy collection install namespace1.name1:1.0.0
+
+- name: verify the installed collection with a detached signature
+ command: ansible-galaxy collection verify namespace1.name1:1.0.0 {{ galaxy_verbosity }} {{ signature_options }}
+ vars:
+ signature_options: "--signature {{ signature }} --keyring {{ keyring }}"
+ signature: "file://{{ gpg_homedir }}/namespace1-name1-1.0.0-MANIFEST.json.asc"
+ keyring: "{{ gpg_homedir }}/pubring.kbx"
+ register: verify
+
+- assert:
+ that:
+ - verify.rc == 0
+
+# This command is hardcoded with -vvvv purposefully to evaluate extra verbosity messages
+- name: verify the installed collection with invalid detached signature
+ command: ansible-galaxy collection verify namespace1.name1:1.0.0 -vvvv {{ signature_options }}
+ vars:
+ signature_options: "--signature {{ signature }} --keyring {{ keyring }}"
+ signature: "file://{{ gpg_homedir }}/namespace1-name1-1.0.9-MANIFEST.json.asc"
+ keyring: "{{ gpg_homedir }}/pubring.kbx"
+ register: verify
+ ignore_errors: yes
+
+- assert:
+ that:
+ - verify.rc != 0
+ - '"Signature verification failed for ''namespace1.name1'' (return code 1)" in verify.stdout'
+ - expected_errors[0] in verify_stdout
+ - expected_errors[1] in verify_stdout
+ vars:
+ expected_errors:
+ - "* This is the counterpart to SUCCESS and used to indicate a program failure."
+ - "* The signature with the keyid has not been verified okay."
+ stdout_no_color: "{{ verify.stdout | regex_replace(reset_color) | regex_replace(color) }}"
+ # Remove formatting from the reason so it's one line
+ verify_stdout: "{{ stdout_no_color | regex_replace('\"') | regex_replace('\\n') | regex_replace(' ', ' ') }}"
+ reset_color: '\x1b\[0m'
+ color: '\x1b\[[0-9];[0-9]{2}m'
+
+# This command is hardcoded with -vvvv purposefully to evaluate extra verbosity messages
+- name: verify the installed collection with invalid detached signature offline
+ command: ansible-galaxy collection verify namespace1.name1:1.0.0 -vvvv {{ signature_options }} --offline
+ vars:
+ signature_options: "--signature {{ signature }} --keyring {{ keyring }}"
+ signature: "file://{{ gpg_homedir }}/namespace1-name1-1.0.9-MANIFEST.json.asc"
+ keyring: "{{ gpg_homedir }}/pubring.kbx"
+ register: verify
+ ignore_errors: yes
+
+- assert:
+ that:
+ - verify.rc != 0
+ - '"Signature verification failed for ''namespace1.name1'' (return code 1)" in verify.stdout'
+ - expected_errors[0] in verify_stdout
+ - expected_errors[1] in verify_stdout
+ vars:
+ expected_errors:
+ - "* This is the counterpart to SUCCESS and used to indicate a program failure."
+ - "* The signature with the keyid has not been verified okay."
+ stdout_no_color: "{{ verify.stdout | regex_replace(reset_color) | regex_replace(color) }}"
+ # Remove formatting from the reason so it's one line
+ verify_stdout: "{{ stdout_no_color | regex_replace('\"') | regex_replace('\\n') | regex_replace(' ', ' ') }}"
+ reset_color: '\x1b\[0m'
+ color: '\x1b\[[0-9];[0-9]{2}m'
+
+- include_tasks: revoke_gpg_key.yml
+
+# This command is hardcoded with -vvvv purposefully to evaluate extra verbosity messages
+- name: verify the installed collection with a revoked detached signature
+ command: ansible-galaxy collection verify namespace1.name1:1.0.0 -vvvv {{ signature_options }}
+ vars:
+ signature_options: "--signature {{ signature }} --keyring {{ keyring }}"
+ signature: "file://{{ gpg_homedir }}/namespace1-name1-1.0.0-MANIFEST.json.asc"
+ keyring: "{{ gpg_homedir }}/pubring.kbx"
+ register: verify
+ ignore_errors: yes
+
+- assert:
+ that:
+ - verify.rc != 0
+ - '"Signature verification failed for ''namespace1.name1'' (return code 0)" in verify.stdout'
+ - expected_errors[0] in verify_stdout
+ - expected_errors[1] in verify_stdout
+ vars:
+ expected_errors:
+ - "* The used key has been revoked by its owner."
+ - "* The signature with the keyid is good, but the signature was made by a revoked key."
+ stdout_no_color: "{{ verify.stdout | regex_replace(reset_color) | regex_replace(color) }}"
+ # Remove formatting from the reason so it's one line
+ verify_stdout: "{{ stdout_no_color | regex_replace('\"') | regex_replace('\\n') | regex_replace(' ', ' ') }}"
+ reset_color: '\x1b\[0m'
+ color: '\x1b\[[0-9];[0-9]{2}m'
+
+# This command is hardcoded with no verbosity purposefully to evaluate overall gpg failure
+- name: verify that ignoring the signature error and no successful signatures is not successful verification
+ command: ansible-galaxy collection verify namespace1.name1:1.0.0 {{ signature_options }}
+ vars:
+ signature_options: "--signature {{ signature }} --keyring {{ keyring }}"
+ signature: "file://{{ gpg_homedir }}/namespace1-name1-1.0.0-MANIFEST.json.asc"
+ keyring: "{{ gpg_homedir }}/pubring.kbx"
+ register: verify
+ ignore_errors: yes
+ environment:
+ ANSIBLE_GALAXY_IGNORE_SIGNATURE_STATUS_CODES: REVKEYSIG,KEYREVOKED
+
+- assert:
+ that:
+ - verify.rc != 0
+ - '"Signature verification failed for ''namespace1.name1'': fewer successful signatures than required" in verify.stdout'
+ - ignored_errors[0] not in verify_stdout
+ - ignored_errors[1] not in verify_stdout
+ vars:
+ ignored_errors:
+ - "* The used key has been revoked by its owner."
+ - "* The signature with the keyid is good, but the signature was made by a revoked key."
+ stdout_no_color: "{{ verify.stdout | regex_replace(reset_color) | regex_replace(color) }}"
+ # Remove formatting from the reason so it's one line
+ verify_stdout: "{{ stdout_no_color | regex_replace('\"') | regex_replace('\\n') | regex_replace(' ', ' ') }}"
+ reset_color: '\x1b\[0m'
+ color: '\x1b\[[0-9];[0-9]{2}m'
+
+# This command is hardcoded with -vvvv purposefully to evaluate extra verbosity messages
+- name: verify that ignoring the signature error and no successful signatures and required signature count all is successful verification
+ command: ansible-galaxy collection verify namespace1.name1:1.0.0 -vvvv {{ signature_options }}
+ vars:
+ signature_options: "--signature {{ signature }} --keyring {{ keyring }}"
+ signature: "file://{{ gpg_homedir }}/namespace1-name1-1.0.0-MANIFEST.json.asc"
+ keyring: "{{ gpg_homedir }}/pubring.kbx"
+ register: verify
+ ignore_errors: yes
+ environment:
+ ANSIBLE_GALAXY_IGNORE_SIGNATURE_STATUS_CODES: REVKEYSIG,KEYREVOKED
+ ANSIBLE_GALAXY_REQUIRED_VALID_SIGNATURE_COUNT: all
+
+- assert:
+ that:
+ - verify is success
+ - verify.rc == 0
+ - '"Signature verification failed for ''namespace1.name1'': fewer successful signatures than required" not in verify.stdout'
+ - success_messages[0] in verify_stdout
+ - success_messages[1] in verify_stdout
+ - ignored_errors[0] not in verify_stdout
+ - ignored_errors[1] not in verify_stdout
+ vars:
+ success_messages:
+ - "GnuPG signature verification succeeded, verifying contents of namespace1.name1:1.0.0"
+ - "Successfully verified that checksums for 'namespace1.name1:1.0.0' match the remote collection."
+ ignored_errors:
+ - "* The used key has been revoked by its owner."
+ - "* The signature with the keyid is good, but the signature was made by a revoked key."
+ stdout_no_color: "{{ verify.stdout | regex_replace(reset_color) | regex_replace(color) }}"
+ # Remove formatting from the reason so it's one line
+ verify_stdout: "{{ stdout_no_color | regex_replace('\"') | regex_replace('\\n') | regex_replace(' ', ' ') }}"
+ reset_color: '\x1b\[0m'
+ color: '\x1b\[[0-9];[0-9]{2}m'
+
+- name: use lenient signature verification (default) without providing signatures
+ command: ansible-galaxy collection verify namespace1.name1:1.0.0 -vvvv --keyring {{ gpg_homedir }}/pubring.kbx
+ environment:
+ ANSIBLE_GALAXY_REQUIRED_VALID_SIGNATURE_COUNT: "1"
+ register: verify
+ ignore_errors: yes
+
+- assert:
+ that:
+ - verify is success
+ - verify.rc == 0
+ - error_message not in verify.stdout
+ - success_messages[0] in verify.stdout
+ - success_messages[1] in verify.stdout
+ vars:
+ error_message: "Signature verification failed for 'namespace1.name1': fewer successful signatures than required"
+ success_messages:
+ - "GnuPG signature verification succeeded, verifying contents of namespace1.name1:1.0.0"
+ - "Successfully verified that checksums for 'namespace1.name1:1.0.0' match the remote collection."
+
+- name: use strict signature verification without providing signatures
+ command: ansible-galaxy collection verify namespace1.name1:1.0.0 -vvvv --keyring {{ gpg_homedir }}/pubring.kbx
+ environment:
+ ANSIBLE_GALAXY_REQUIRED_VALID_SIGNATURE_COUNT: "+1"
+ register: verify
+ ignore_errors: yes
+
+- assert:
+ that:
+ - verify is failed
+ - verify.rc == 1
+ - '"Signature verification failed for ''namespace1.name1'': no successful signatures" in verify.stdout'
+
+- name: empty installed collections
+ file:
+ path: "{{ galaxy_dir }}/ansible_collections"
+ state: "{{ item }}"
+ loop:
+ - absent
+ - directory
diff --git a/test/integration/targets/ansible-galaxy-collection/vars/main.yml b/test/integration/targets/ansible-galaxy-collection/vars/main.yml
index e8ee9ca0..604ff1ab 100644
--- a/test/integration/targets/ansible-galaxy-collection/vars/main.yml
+++ b/test/integration/targets/ansible-galaxy-collection/vars/main.yml
@@ -1,5 +1,7 @@
galaxy_verbosity: "{{ '' if not ansible_verbosity else '-' ~ ('v' * ansible_verbosity) }}"
+gpg_homedir: "{{ galaxy_dir }}/gpg"
+
pulp_repositories:
- published
- secondary
diff --git a/test/integration/targets/ansible-galaxy/cleanup-default.yml b/test/integration/targets/ansible-galaxy/cleanup-default.yml
index f2265c09..80600792 100644
--- a/test/integration/targets/ansible-galaxy/cleanup-default.yml
+++ b/test/integration/targets/ansible-galaxy/cleanup-default.yml
@@ -1,5 +1,13 @@
-- name: remove unwanted packages
+- name: remove git package
package:
name: git
state: absent
when: git_install.changed
+- name: remove openssl package
+ package:
+ name: openssl
+ state: absent
+ when: ansible_distribution not in ["MacOSX", "Alpine"] and openssl_install.changed
+- name: remove openssl package
+ command: apk del openssl
+ when: ansible_distribution == "Alpine" and openssl_install.changed
diff --git a/test/integration/targets/ansible-galaxy/cleanup-freebsd.yml b/test/integration/targets/ansible-galaxy/cleanup-freebsd.yml
index fa224d83..87b987d1 100644
--- a/test/integration/targets/ansible-galaxy/cleanup-freebsd.yml
+++ b/test/integration/targets/ansible-galaxy/cleanup-freebsd.yml
@@ -1,6 +1,12 @@
-- name: remove auto-installed packages from FreeBSD
+- name: remove git from FreeBSD
pkgng:
name: git
state: absent
autoremove: yes
when: git_install.changed
+- name: remove openssl from FreeBSD
+ pkgng:
+ name: openssl
+ state: absent
+ autoremove: yes
+ when: openssl_install.changed
diff --git a/test/integration/targets/ansible-galaxy/cleanup.yml b/test/integration/targets/ansible-galaxy/cleanup.yml
index 57442631..e80eeefb 100644
--- a/test/integration/targets/ansible-galaxy/cleanup.yml
+++ b/test/integration/targets/ansible-galaxy/cleanup.yml
@@ -1,6 +1,8 @@
- hosts: localhost
vars:
git_install: '{{ lookup("file", lookup("env", "OUTPUT_DIR") + "/git_install.json") | from_json }}'
+ openssl_install: '{{ lookup("file", lookup("env", "OUTPUT_DIR") + "/openssl_install.json") | from_json }}'
+ ws_dir: '{{ lookup("file", lookup("env", "OUTPUT_DIR") + "/ws_dir.json") | from_json }}'
tasks:
- name: cleanup
include_tasks: "{{ cleanup_filename }}"
@@ -17,3 +19,8 @@
loop:
- "~/.ansible/collections/ansible_collections"
- /usr/share/ansible/collections/ansible_collections
+
+ - name: Remove webserver directory
+ file:
+ path: "{{ ws_dir }}"
+ state: absent
diff --git a/test/integration/targets/ansible-galaxy/files/testserver.py b/test/integration/targets/ansible-galaxy/files/testserver.py
new file mode 100644
index 00000000..13598507
--- /dev/null
+++ b/test/integration/targets/ansible-galaxy/files/testserver.py
@@ -0,0 +1,20 @@
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import sys
+import ssl
+
+if __name__ == '__main__':
+ if sys.version_info[0] >= 3:
+ import http.server
+ import socketserver
+ Handler = http.server.SimpleHTTPRequestHandler
+ httpd = socketserver.TCPServer(("", 4443), Handler)
+ else:
+ import BaseHTTPServer
+ import SimpleHTTPServer
+ Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
+ httpd = BaseHTTPServer.HTTPServer(("", 4443), Handler)
+
+ httpd.socket = ssl.wrap_socket(httpd.socket, certfile='./cert.pem', keyfile='./key.pem', server_side=True)
+ httpd.serve_forever()
diff --git a/test/integration/targets/ansible-galaxy/runme.sh b/test/integration/targets/ansible-galaxy/runme.sh
index 1f2d56b3..4bb45958 100755
--- a/test/integration/targets/ansible-galaxy/runme.sh
+++ b/test/integration/targets/ansible-galaxy/runme.sh
@@ -2,6 +2,20 @@
set -eux -o pipefail
+galaxy_testdir="${OUTPUT_DIR}/galaxy-test-dir"
+role_testdir="${OUTPUT_DIR}/role-test-dir"
+# Prep the local git repos with role and make a tar archive so we can test
+# different things
+galaxy_local_test_role="test-role"
+galaxy_local_test_role_dir="${OUTPUT_DIR}/galaxy-role-test-root"
+galaxy_local_test_role_git_repo="${galaxy_local_test_role_dir}/${galaxy_local_test_role}"
+galaxy_local_test_role_tar="${galaxy_local_test_role_dir}/${galaxy_local_test_role}.tar"
+galaxy_webserver_root="${OUTPUT_DIR}/ansible-galaxy-webserver"
+
+mkdir -p "${galaxy_local_test_role_dir}"
+mkdir -p "${role_testdir}"
+mkdir -p "${galaxy_webserver_root}"
+
ansible-playbook setup.yml "$@"
trap 'ansible-playbook ${ANSIBLE_PLAYBOOK_DIR}/cleanup.yml' EXIT
@@ -51,22 +65,26 @@ f_ansible_galaxy_create_role_repo_post()
--format=tar \
--prefix="${repo_name}/" \
master > "${repo_tar}"
+ # Configure basic (insecure) HTTPS-accessible repository
+ galaxy_local_test_role_http_repo="${galaxy_webserver_root}/${galaxy_local_test_role}.git"
+ if [[ ! -d "${galaxy_local_test_role_http_repo}" ]]; then
+ git clone --bare "${galaxy_local_test_role_git_repo}" "${galaxy_local_test_role_http_repo}"
+ pushd "${galaxy_local_test_role_http_repo}"
+ touch "git-daemon-export-ok"
+ git --bare update-server-info
+ mv "hooks/post-update.sample" "hooks/post-update"
+ popd # ${galaxy_local_test_role_http_repo}
+ fi
popd # "${repo_name}"
popd # "${repo_dir}"
}
-# Prep the local git repos with role and make a tar archive so we can test
-# different things
-galaxy_local_test_role="test-role"
-galaxy_local_test_role_dir=$(mktemp -d)
-galaxy_local_test_role_git_repo="${galaxy_local_test_role_dir}/${galaxy_local_test_role}"
-galaxy_local_test_role_tar="${galaxy_local_test_role_dir}/${galaxy_local_test_role}.tar"
-
f_ansible_galaxy_create_role_repo_pre "${galaxy_local_test_role}" "${galaxy_local_test_role_dir}"
f_ansible_galaxy_create_role_repo_post "${galaxy_local_test_role}" "${galaxy_local_test_role_tar}"
galaxy_local_parent_role="parent-role"
-galaxy_local_parent_role_dir=$(mktemp -d)
+galaxy_local_parent_role_dir="${OUTPUT_DIR}/parent-role"
+mkdir -p "${galaxy_local_parent_role_dir}"
galaxy_local_parent_role_git_repo="${galaxy_local_parent_role_dir}/${galaxy_local_parent_role}"
galaxy_local_parent_role_tar="${galaxy_local_parent_role_dir}/${galaxy_local_parent_role}.tar"
@@ -82,7 +100,7 @@ f_ansible_galaxy_create_role_repo_post "${galaxy_local_parent_role}" "${galaxy_l
#
# Install local git repo
f_ansible_galaxy_status "install of local git repo"
-galaxy_testdir=$(mktemp -d)
+mkdir -p "${galaxy_testdir}"
pushd "${galaxy_testdir}"
ansible-galaxy install git+file:///"${galaxy_local_test_role_git_repo}" "$@"
@@ -97,7 +115,7 @@ rm -fr "${HOME}/.ansible/roles/${galaxy_local_test_role}"
#
# Install local git repo and ensure that if a role_path is passed, it is in fact used
f_ansible_galaxy_status "install of local git repo with -p \$role_path"
-galaxy_testdir=$(mktemp -d)
+mkdir -p "${galaxy_testdir}"
pushd "${galaxy_testdir}"
mkdir -p "${galaxy_relative_rolespath}"
@@ -108,11 +126,40 @@ pushd "${galaxy_testdir}"
popd # ${galaxy_testdir}
rm -fr "${galaxy_testdir}"
+# Galaxy install test case - skipping cert verification
+#
+# Install from remote git repo and ensure that cert validation is skipped
+#
+# Protect against regression (GitHub Issue #41077)
+# https://github.com/ansible/ansible/issues/41077
+f_ansible_galaxy_status "install of role from untrusted repository"
+mkdir -p "${galaxy_testdir}"
+pushd "${galaxy_testdir}"
+ mkdir -p "${galaxy_relative_rolespath}"
+
+ # Without --ignore-certs, installing a role from an untrusted repository should fail
+ set +e
+ ansible-galaxy install --verbose git+https://localhost:4443/"${galaxy_local_test_role}.git" -p "${galaxy_relative_rolespath}" "$@" 2>&1 | tee out.txt
+ ansible_exit_code="$?"
+ set -e
+ cat out.txt
+
+ if [[ "$ansible_exit_code" -ne 1 ]]; then echo "Exit code ($ansible_exit_code) is expected to be 1" && exit "$ansible_exit_code"; fi
+ [[ $(grep -c 'ERROR' out.txt) -eq 1 ]]
+ [[ ! -d "${galaxy_relative_rolespath}/${galaxy_local_test_role}" ]]
+
+ ansible-galaxy install --verbose --ignore-certs git+https://localhost:4443/"${galaxy_local_test_role}.git" -p "${galaxy_relative_rolespath}" "$@"
+
+ # Test that the role was installed to the expected directory
+ [[ -d "${galaxy_relative_rolespath}/${galaxy_local_test_role}" ]]
+popd # ${galaxy_testdir}
+rm -fr "${galaxy_testdir}"
+
# Galaxy install test case
#
# Install local git repo with a meta/requirements.yml
f_ansible_galaxy_status "install of local git repo with meta/requirements.yml"
-galaxy_testdir=$(mktemp -d)
+mkdir -p "${galaxy_testdir}"
pushd "${galaxy_testdir}"
ansible-galaxy install git+file:///"${galaxy_local_parent_role_git_repo}" "$@"
@@ -132,7 +179,7 @@ rm -fr "${HOME}/.ansible/roles/${galaxy_local_test_role}"
#
# Install local git repo with a meta/requirements.yml + --no-deps argument
f_ansible_galaxy_status "install of local git repo with meta/requirements.yml + --no-deps argument"
-galaxy_testdir=$(mktemp -d)
+mkdir -p "${galaxy_testdir}"
pushd "${galaxy_testdir}"
ansible-galaxy install git+file:///"${galaxy_local_parent_role_git_repo}" --no-deps "$@"
@@ -158,7 +205,7 @@ rm -fr "${HOME}/.ansible/roles/${galaxy_local_test_role}"
f_ansible_galaxy_status \
"install of local git repo and local tarball with -p \$role_path and -r \$role_file" \
"Protect against regression (Issue #35217)"
-galaxy_testdir=$(mktemp -d)
+mkdir -p "${galaxy_testdir}"
pushd "${galaxy_testdir}"
git clone "${galaxy_local_test_role_git_repo}" "${galaxy_local_test_role}"
@@ -189,7 +236,7 @@ rm -fr "${galaxy_testdir}"
# Basic tests to ensure listing roles works
f_ansible_galaxy_status "role list"
-galaxy_testdir=$(mktemp -d)
+mkdir -p "${galaxy_testdir}"
pushd "${galaxy_testdir}"
ansible-galaxy install git+file:///"${galaxy_local_test_role_git_repo}" "$@"
@@ -207,7 +254,7 @@ popd # ${galaxy_testdir}
f_ansible_galaxy_status \
"list specific role not in the first path in ANSIBLE_ROLES_PATH"
-role_testdir=$(mktemp -d)
+mkdir -p "${role_testdir}"
pushd "${role_testdir}"
mkdir testroles
@@ -229,7 +276,7 @@ rm -fr "${role_testdir}"
# Get info about role that is not installed
f_ansible_galaxy_status "role info"
-galaxy_testdir=$(mktemp -d)
+mkdir -p "${galaxy_testdir}"
pushd "${galaxy_testdir}"
ansible-galaxy role info samdoran.fish | tee out.txt
@@ -241,7 +288,7 @@ popd # ${galaxy_testdir}
f_ansible_galaxy_status \
"role info non-existant role"
-role_testdir=$(mktemp -d)
+mkdir -p "${role_testdir}"
pushd "${role_testdir}"
ansible-galaxy role info notaroll | tee out.txt
@@ -293,7 +340,7 @@ rm -fr "${role_testdir}"
f_ansible_galaxy_status \
"list roles where the role name is the same or a subset of the role path (#67365)"
-role_testdir=$(mktemp -d)
+mkdir -p "${role_testdir}"
pushd "${role_testdir}"
mkdir parrot
@@ -312,7 +359,7 @@ rm -rf "${role_testdir}"
f_ansible_galaxy_status \
"Test role with non-ascii characters"
-role_testdir=$(mktemp -d)
+mkdir -p "${role_testdir}"
pushd "${role_testdir}"
mkdir nonascii
@@ -342,7 +389,7 @@ rm -rf "${role_testdir}"
#################################
# TODO: Move these to ansible-galaxy-collection
-galaxy_testdir=$(mktemp -d)
+mkdir -p "${galaxy_testdir}"
pushd "${galaxy_testdir}"
## ansible-galaxy collection list tests
diff --git a/test/integration/targets/ansible-galaxy/setup.yml b/test/integration/targets/ansible-galaxy/setup.yml
index ebd5a1c0..b4fb6d3b 100644
--- a/test/integration/targets/ansible-galaxy/setup.yml
+++ b/test/integration/targets/ansible-galaxy/setup.yml
@@ -1,11 +1,57 @@
- hosts: localhost
+ vars:
+ ws_dir: '{{ lookup("env", "OUTPUT_DIR") }}/ansible-galaxy-webserver'
tasks:
- - name: install git
+ - name: install git & OpenSSL
package:
name: git
when: ansible_distribution not in ["MacOSX", "Alpine"]
register: git_install
- - name: save install result
+
+ - name: install OpenSSL
+ package:
+ name: openssl
+ when: ansible_distribution not in ["MacOSX", "Alpine"]
+ register: openssl_install
+
+ - name: install OpenSSL
+ command: apk add openssl
+ when: ansible_distribution == "Alpine"
+ register: openssl_install
+
+ - name: setup webserver dir
+ file:
+ state: directory
+ path: "{{ ws_dir }}"
+
+ - name: copy webserver
+ copy:
+ src: testserver.py
+ dest: "{{ ws_dir }}"
+
+ - name: Create rand file
+ command: dd if=/dev/urandom of="{{ ws_dir }}/.rnd" bs=256 count=1
+
+ - name: Create self-signed cert
+ shell: RANDFILE={{ ws_dir }}/.rnd openssl req -x509 -newkey rsa:2048 \
+ -nodes -days 365 -keyout "{{ ws_dir }}/key.pem" -out "{{ ws_dir }}/cert.pem" \
+ -subj "/C=GB/O=Red Hat/OU=Ansible/CN=ansible-test-cert"
+
+ - name: start SimpleHTTPServer
+ shell: cd {{ ws_dir }} && {{ ansible_python.executable }} {{ ws_dir }}/testserver.py
+ async: 120 # this test set can take ~1m to run on FreeBSD (via Shippable)
+ poll: 0
+
+ - wait_for: port=4443
+
+ - name: save results
copy:
- content: '{{ git_install }}'
- dest: '{{ lookup("env", "OUTPUT_DIR") }}/git_install.json'
+ content: "{{ item.content }}"
+ dest: '{{ lookup("env", "OUTPUT_DIR") }}/{{ item.key }}.json'
+ loop:
+ - key: git_install
+ content: "{{ git_install }}"
+ - key: openssl_install
+ content: "{{ openssl_install }}"
+ - key: ws_dir
+ content: "{{ ws_dir | to_json }}"
diff --git a/test/integration/targets/ansible-test-cloud-aws/aliases b/test/integration/targets/ansible-test-cloud-aws/aliases
new file mode 100644
index 00000000..9442e888
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-aws/aliases
@@ -0,0 +1,3 @@
+cloud/aws
+shippable/generic/group1
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-aws/tasks/main.yml b/test/integration/targets/ansible-test-cloud-aws/tasks/main.yml
new file mode 100644
index 00000000..4f7c4c4d
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-aws/tasks/main.yml
@@ -0,0 +1,17 @@
+- name: Verify variables are set
+ assert:
+ that:
+ - aws_access_key
+ - aws_region
+ - aws_secret_key
+ - resource_prefix
+ - security_token
+ - tiny_prefix
+- name: Show variables
+ debug:
+ msg: "{{ lookup('vars', item) }}"
+ with_items:
+ - aws_access_key
+ - aws_region
+ - resource_prefix
+ - tiny_prefix
diff --git a/test/integration/targets/ansible-test-cloud-azure/aliases b/test/integration/targets/ansible-test-cloud-azure/aliases
new file mode 100644
index 00000000..a5526fe5
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-azure/aliases
@@ -0,0 +1,3 @@
+cloud/azure
+shippable/generic/group1
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-azure/tasks/main.yml b/test/integration/targets/ansible-test-cloud-azure/tasks/main.yml
new file mode 100644
index 00000000..c9201ba6
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-azure/tasks/main.yml
@@ -0,0 +1,18 @@
+- name: Verify variables are set
+ assert:
+ that:
+ - azure_client_id
+ - azure_secret
+ - azure_subscription_id
+ - azure_tenant
+ - resource_group
+ - resource_group_secondary
+- name: Show variables
+ debug:
+ msg: "{{ lookup('vars', item) }}"
+ with_items:
+ - azure_client_id
+ - azure_subscription_id
+ - azure_tenant
+ - resource_group
+ - resource_group_secondary
diff --git a/test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/module_utils/PSUtil.psm1 b/test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/module_utils/PSUtil.psm1
index d37e681a..f23b99e7 100644
--- a/test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/module_utils/PSUtil.psm1
+++ b/test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/module_utils/PSUtil.psm1
@@ -8,7 +8,7 @@ Function Get-PSUtilSpec {
#>
@{
options = @{
- option1 = @{ type = 'str'; required = $true; aliases = 'alias1' }
+ option1 = @{ type = 'str'; required = $true; aliases = 'alias1' }
}
}
}
diff --git a/test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/modules/win_util_args.ps1 b/test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/modules/win_util_args.ps1
index 9dab99da..69922cd6 100644
--- a/test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/modules/win_util_args.ps1
+++ b/test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/modules/win_util_args.ps1
@@ -8,7 +8,7 @@
$spec = @{
options = @{
- my_opt = @{ type = "str"; required = $true }
+ my_opt = @{ type = "str"; required = $true }
}
}
diff --git a/test/integration/targets/ansible-test/collection-tests/unsupported-directory.sh b/test/integration/targets/ansible-test/collection-tests/unsupported-directory.sh
index 713bd5d6..b1b9508a 100755
--- a/test/integration/targets/ansible-test/collection-tests/unsupported-directory.sh
+++ b/test/integration/targets/ansible-test/collection-tests/unsupported-directory.sh
@@ -4,7 +4,14 @@ set -eux -o pipefail
cd "${WORK_DIR}"
-if ansible-test --help 1>stdout 2>stderr; then
+# some options should succeed even in an unsupported directory
+ansible-test --help
+ansible-test --version
+
+# the --help option should show the current working directory when it is unsupported
+ansible-test --help 2>&1 | grep '^Current working directory: '
+
+if ansible-test sanity 1>stdout 2>stderr; then
echo "ansible-test did not fail"
exit 1
fi
diff --git a/test/integration/targets/ansible-test/collection-tests/venv-pythons.py b/test/integration/targets/ansible-test/collection-tests/venv-pythons.py
index ad41b1f5..b380f147 100755
--- a/test/integration/targets/ansible-test/collection-tests/venv-pythons.py
+++ b/test/integration/targets/ansible-test/collection-tests/venv-pythons.py
@@ -23,11 +23,6 @@ def main():
executable = shutil.which(f'python{python_version}')
if executable:
- if python_version == '2.6':
- # skip Python 2.6 since it requires docker to provide a PyPI proxy container
- print(f'{executable} - skip', file=sys.stderr)
- continue
-
if python_version.startswith('2.'):
cmd = [executable, '-m', 'virtualenv', '--version']
else:
diff --git a/test/integration/targets/ansible-vault/runme.sh b/test/integration/targets/ansible-vault/runme.sh
index e3b21d7f..bb893433 100755
--- a/test/integration/targets/ansible-vault/runme.sh
+++ b/test/integration/targets/ansible-vault/runme.sh
@@ -344,6 +344,8 @@ ansible-vault encrypt_string "$@" --vault-password-file "${NEW_VAULT_PASSWORD}"
# write to file
ansible-vault encrypt_string "$@" --vault-password-file "${NEW_VAULT_PASSWORD}" --name "blippy" "a test string names blippy" --output "${MYTMPDIR}/enc_string_test_file"
+[ -f "${MYTMPDIR}/enc_string_test_file" ];
+
# test ansible-vault edit with a faux editor
ansible-vault encrypt "$@" --vault-password-file vault-password "${TEST_FILE_EDIT}"
@@ -521,4 +523,29 @@ ansible-playbook -i ../../inventory -v "$@" --vault-password-file vault-password
# Ensure we don't leave unencrypted temp files dangling
ansible-playbook -v "$@" --vault-password-file vault-password test_dangling_temp.yml
-ansible-playbook "$@" --vault-password-file vault-password single_vault_as_string.yml \ No newline at end of file
+ansible-playbook "$@" --vault-password-file vault-password single_vault_as_string.yml
+
+# Test that only one accessible vault password is required
+export ANSIBLE_VAULT_IDENTITY_LIST="id1@./nonexistent, id2@${MYTMPDIR}/unreadable, id3@./vault-password"
+
+touch "${MYTMPDIR}/unreadable"
+sudo chmod 000 "${MYTMPDIR}/unreadable"
+
+ansible-vault encrypt_string content
+ansible-vault encrypt_string content --encrypt-vault-id id3
+
+set +e
+
+# Try to use a missing vault password file
+ansible-vault encrypt_string content --encrypt-vault-id id1 2>&1 | tee out.txt
+test $? -ne 0
+grep out.txt -e '[WARNING]: Error getting vault password file (id1)'
+grep out.txt -e "ERROR! Did not find a match for --encrypt-vault-id=id2 in the known vault-ids ['id3']"
+
+# Try to use an inaccessible vault password file
+ansible-vault encrypt_string content --encrypt-vault-id id2 2>&1 | tee out.txt
+test $? -ne 0
+grep out.txt -e "[WARNING]: Error in vault password file loading (id2)"
+grep out.txt -e "ERROR! Did not find a match for --encrypt-vault-id=id2 in the known vault-ids ['id3']"
+
+set -e
diff --git a/test/integration/targets/ansible-vault/single_vault_as_string.yml b/test/integration/targets/ansible-vault/single_vault_as_string.yml
index ca147b0b..2d523a0b 100644
--- a/test/integration/targets/ansible-vault/single_vault_as_string.yml
+++ b/test/integration/targets/ansible-vault/single_vault_as_string.yml
@@ -81,8 +81,6 @@
- assert:
that:
- vaulted_value|map('upper')|list == ['F', 'O', 'O', ' ', 'B', 'A', 'R']
- when: lookup('pipe', ansible_python.executable ~ ' -c "import jinja2; print(jinja2.__version__)"') is version('2.7', '>=')
-
- assert:
that:
@@ -90,23 +88,19 @@
- vaulted_value|select('equalto', 'o')|list == ['o', 'o']
- vaulted_value|title == 'Foo Bar'
- vaulted_value is equalto('foo bar')
- when: lookup('pipe', ansible_python.executable ~ ' -c "import jinja2; print(jinja2.__version__)"') is version('2.8', '>=')
- assert:
that:
- vaulted_value|string|tojson == '"foo bar"'
- vaulted_value|truncate(4) == 'foo bar'
- when: lookup('pipe', ansible_python.executable ~ ' -c "import jinja2; print(jinja2.__version__)"') is version('2.9', '>=')
- assert:
that:
- vaulted_value|wordwrap(4) == 'foo\nbar'
- when: lookup('pipe', ansible_python.executable ~ ' -c "import jinja2; print(jinja2.__version__)"') is version('2.11', '>=')
- assert:
that:
- vaulted_value|wordcount == 2
- when: lookup('pipe', ansible_python.executable ~ ' -c "import jinja2; print(jinja2.__version__)"') is version('2.11.2', '>=')
- ping:
data: !vault |
diff --git a/test/integration/targets/any_errors_fatal/50897.yml b/test/integration/targets/any_errors_fatal/50897.yml
new file mode 100644
index 00000000..1d09eb18
--- /dev/null
+++ b/test/integration/targets/any_errors_fatal/50897.yml
@@ -0,0 +1,19 @@
+- hosts: testhost,testhost2
+ gather_facts: no
+ any_errors_fatal: yes
+ tasks:
+ - name: EXPECTED FAILURE include_role that doesn't exist
+ include_role:
+ name: 'non-existant-role'
+ when:
+ - inventory_hostname == 'testhost2'
+ - test_name == 'test_include_role'
+
+ - name: EXPECTED FAILURE include_tasks that don't exist
+ include_tasks: non-existant.yml
+ when:
+ - inventory_hostname == 'testhost2'
+ - test_name == 'test_include_tasks'
+
+ - debug:
+ msg: 'any_errors_fatal_this_should_never_be_reached'
diff --git a/test/integration/targets/any_errors_fatal/runme.sh b/test/integration/targets/any_errors_fatal/runme.sh
index 02cd499f..c54ea8d5 100755
--- a/test/integration/targets/any_errors_fatal/runme.sh
+++ b/test/integration/targets/any_errors_fatal/runme.sh
@@ -5,14 +5,14 @@ ansible-playbook -i inventory "$@" play_level.yml| tee out.txt | grep 'any_error
res=$?
cat out.txt
if [ "${res}" -eq 0 ] ; then
- exit 1
+ exit 1
fi
ansible-playbook -i inventory "$@" on_includes.yml | tee out.txt | grep 'any_errors_fatal_this_should_never_be_reached'
res=$?
cat out.txt
if [ "${res}" -eq 0 ] ; then
- exit 1
+ exit 1
fi
set -ux
@@ -20,4 +20,18 @@ set -ux
ansible-playbook -i inventory "$@" always_block.yml | tee out.txt | grep 'any_errors_fatal_always_block_start'
res=$?
cat out.txt
-exit $res
+
+if [ "${res}" -ne 0 ] ; then
+ exit 1
+fi
+
+set -ux
+
+for test_name in test_include_role test_include_tasks; do
+ ansible-playbook -i inventory "$@" -e test_name=$test_name 50897.yml | tee out.txt | grep 'any_errors_fatal_this_should_never_be_reached'
+ res=$?
+ cat out.txt
+ if [ "${res}" -eq 0 ] ; then
+ exit 1
+ fi
+done
diff --git a/test/integration/targets/apt/aliases b/test/integration/targets/apt/aliases
index 941bce38..ce9d97d4 100644
--- a/test/integration/targets/apt/aliases
+++ b/test/integration/targets/apt/aliases
@@ -4,4 +4,3 @@ skip/freebsd
skip/osx
skip/macos
skip/rhel
-skip/aix
diff --git a/test/integration/targets/apt/defaults/main.yml b/test/integration/targets/apt/defaults/main.yml
index 05a5780f..7ad2497d 100644
--- a/test/integration/targets/apt/defaults/main.yml
+++ b/test/integration/targets/apt/defaults/main.yml
@@ -1 +1,2 @@
apt_foreign_arch: i386
+hello_old_version: 2.6-1
diff --git a/test/integration/targets/apt/handlers/main.yml b/test/integration/targets/apt/handlers/main.yml
new file mode 100644
index 00000000..0b6a98f7
--- /dev/null
+++ b/test/integration/targets/apt/handlers/main.yml
@@ -0,0 +1,4 @@
+- name: remove package hello
+ apt:
+ name: hello
+ state: absent
diff --git a/test/integration/targets/apt/tasks/apt.yml b/test/integration/targets/apt/tasks/apt.yml
index 1b2d9206..81614118 100644
--- a/test/integration/targets/apt/tasks/apt.yml
+++ b/test/integration/targets/apt/tasks/apt.yml
@@ -205,7 +205,7 @@
# INSTALL WITHOUT REMOVALS
- name: Install hello, that conflicts with hello-traditional
- apt:
+ apt:
pkg: hello
state: present
update_cache: no
@@ -221,7 +221,7 @@
- "dpkg_result.rc == 0"
- name: Try installing hello-traditional, that conflicts with hello
- apt:
+ apt:
pkg: hello-traditional
state: present
fail_on_autoremove: yes
@@ -235,7 +235,7 @@
- '"Packages need to be removed but remove is disabled." in apt_result.msg'
- name: uninstall hello with apt
- apt:
+ apt:
pkg: hello
state: absent
purge: yes
@@ -437,3 +437,73 @@
file:
path: /usr/sbin/policy-rc.d
state: absent
+
+# https://github.com/ansible/ansible/issues/65325
+- name: Download and install old version of hello (to test allow_change_held_packages option)
+ apt: "deb=https://ci-files.testing.ansible.com/test/integration/targets/dpkg_selections/hello_{{ hello_old_version }}_amd64.deb"
+ notify:
+ - remove package hello
+
+- name: Put hello on hold
+ shell: apt-mark hold hello
+
+- name: Get hold list
+ shell: apt-mark showhold
+ register: allow_change_held_packages_hold
+
+- name: Check that the package hello is on the hold list
+ assert:
+ that:
+ - "'hello' in allow_change_held_packages_hold.stdout"
+
+- name: Try updating package to the latest version (allow_change_held_packages=no)
+ apt:
+ name: hello
+ state: latest
+ ignore_errors: True
+ register: allow_change_held_packages_failed_update
+
+- name: Get the version of the package
+ shell: dpkg -s hello | grep Version | awk '{print $2}'
+ register: allow_change_held_packages_hello_version
+
+- name: Verify that the package was not updated (apt returns with an error)
+ assert:
+ that:
+ - "allow_change_held_packages_failed_update is failed"
+ - "'--allow-change-held-packages' in allow_change_held_packages_failed_update.stderr"
+ - "allow_change_held_packages_hello_version.stdout == hello_old_version"
+
+- name: Try updating package to the latest version (allow_change_held_packages=yes)
+ apt:
+ name: hello
+ state: latest
+ allow_change_held_packages: yes
+ register: allow_change_held_packages_successful_update
+
+- name: Get the version of the package
+ shell: dpkg -s hello | grep Version | awk '{print $2}'
+ register: allow_change_held_packages_hello_version
+
+- name: Verify that the package was updated
+ assert:
+ that:
+ - "allow_change_held_packages_successful_update is changed"
+ - "allow_change_held_packages_hello_version.stdout != hello_old_version"
+
+- name: Try updating package to the latest version again
+ apt:
+ name: hello
+ state: latest
+ allow_change_held_packages: yes
+ register: allow_change_held_packages_no_update
+
+- name: Get the version of the package
+ shell: dpkg -s hello | grep Version | awk '{print $2}'
+ register: allow_change_held_packages_hello_version_again
+
+- name: Verify that the package was not updated
+ assert:
+ that:
+ - "allow_change_held_packages_no_update is not changed"
+ - "allow_change_held_packages_hello_version.stdout == allow_change_held_packages_hello_version_again.stdout"
diff --git a/test/integration/targets/apt/tasks/repo.yml b/test/integration/targets/apt/tasks/repo.yml
index 8269452a..1705cb3e 100644
--- a/test/integration/targets/apt/tasks/repo.yml
+++ b/test/integration/targets/apt/tasks/repo.yml
@@ -86,6 +86,47 @@
state: absent
allow_unauthenticated: yes
+- block:
+ - name: Install foo=1.0.0
+ apt:
+ name: foo=1.0.0
+
+ - name: Run version test matrix
+ apt:
+ name: foo{{ item.0 }}
+ default_release: '{{ item.1 }}'
+ state: '{{ item.2 | ternary("latest","present") }}'
+ check_mode: true
+ register: apt_result
+ loop:
+ # [filter, release, state_latest, expected]
+ - ["", null, false, null]
+ - ["", null, true, "1.0.1"]
+ - ["=1.0.0", null, false, null]
+ - ["=1.0.0", null, true, null]
+ - ["=1.0.1", null, false, "1.0.1"]
+ #- ["=1.0.*", null, false, null] # legacy behavior. should not upgrade without state=latest
+ - ["=1.0.*", null, true, "1.0.1"]
+ - [">=1.0.0", null, false, null]
+ - [">=1.0.0", null, true, "1.0.1"]
+ - [">=1.0.1", null, false, "1.0.1"]
+ - ["", "testing", false, null]
+ - ["", "testing", true, "2.0.1"]
+ - ["=2.0.0", null, false, "2.0.0"]
+ - [">=2.0.0", "testing", false, "2.0.1"]
+
+ - name: Validate version test matrix
+ assert:
+ that:
+ - (item.item.3 is not none) == (item.stdout is defined)
+ - item.item.3 is none or "Inst foo [1.0.0] (" + item.item.3 + " localhost [all])" in item.stdout_lines
+ loop: '{{ apt_result.results }}'
+
+ always:
+ - name: Uninstall foo
+ apt:
+ name: foo
+ state: absent
# https://github.com/ansible/ansible/issues/35900
- block:
@@ -274,9 +315,6 @@
diff: yes
register: apt_result
- - debug:
- var: apt_result
-
- name: Check the content of diff.prepared
assert:
that:
@@ -288,3 +326,27 @@
name: foo
state: absent
allow_unauthenticated: yes
+
+- block:
+ - name: Install foo package version 1.0.0 with force=yes, implies allow_unauthenticated=yes
+ apt:
+ name: foo=1.0.0
+ force: yes
+ register: apt_result
+
+ - name: Check install with dpkg
+ shell: dpkg-query -l foo
+ register: dpkg_result
+
+ - name: Check if install was successful
+ assert:
+ that:
+ - "apt_result is success"
+ - "dpkg_result is success"
+ - "'1.0.0' in dpkg_result.stdout"
+ always:
+ - name: Clean up
+ apt:
+ name: foo
+ state: absent
+ allow_unauthenticated: yes
diff --git a/test/integration/targets/apt/tasks/url-with-deps.yml b/test/integration/targets/apt/tasks/url-with-deps.yml
index ed2f7073..7c70eb90 100644
--- a/test/integration/targets/apt/tasks/url-with-deps.yml
+++ b/test/integration/targets/apt/tasks/url-with-deps.yml
@@ -13,10 +13,10 @@
# on vim-tiny. Really any .deb will work here so long as it has
# dependencies that exist in a repo and get brought in.
# The source and files for building this .deb can be found here:
- # https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/apt/echo-hello-source.tar.gz
+ # https://ci-files.testing.ansible.com/test/integration/targets/apt/echo-hello-source.tar.gz
- name: Install deb file with dependencies from URL (check_mode)
apt:
- deb: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/apt/echo-hello_1.0_all.deb
+ deb: https://ci-files.testing.ansible.com/test/integration/targets/apt/echo-hello_1.0_all.deb
check_mode: true
register: apt_url_deps_check_mode
@@ -33,7 +33,7 @@
- name: Install deb file with dependencies from URL (for real this time)
apt:
- deb: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/apt/echo-hello_1.0_all.deb
+ deb: https://ci-files.testing.ansible.com/test/integration/targets/apt/echo-hello_1.0_all.deb
register: apt_url_deps
- name: check to make sure we installed the package
diff --git a/test/integration/targets/apt_key/aliases b/test/integration/targets/apt_key/aliases
index f46fd701..a820ec90 100644
--- a/test/integration/targets/apt_key/aliases
+++ b/test/integration/targets/apt_key/aliases
@@ -3,4 +3,3 @@ skip/freebsd
skip/osx
skip/macos
skip/rhel
-skip/aix
diff --git a/test/integration/targets/apt_key/tasks/apt_key_binary.yml b/test/integration/targets/apt_key/tasks/apt_key_binary.yml
index 4a351446..b120bd57 100644
--- a/test/integration/targets/apt_key/tasks/apt_key_binary.yml
+++ b/test/integration/targets/apt_key/tasks/apt_key_binary.yml
@@ -2,7 +2,7 @@
- name: Ensure import of binary key downloaded using URLs works
apt_key:
- url: https://ansible-ci-files.s3.us-east-1.amazonaws.com/test/integration/targets/apt_key/apt-key-example-binary.gpg
+ url: https://ci-files.testing.ansible.com/test/integration/targets/apt_key/apt-key-example-binary.gpg
register: apt_key_binary_test
- name: Validate the results
diff --git a/test/integration/targets/apt_key/tasks/apt_key_inline_data.yml b/test/integration/targets/apt_key/tasks/apt_key_inline_data.yml
index 13174e48..916fa5ae 100644
--- a/test/integration/targets/apt_key/tasks/apt_key_inline_data.yml
+++ b/test/integration/targets/apt_key/tasks/apt_key_inline_data.yml
@@ -1,5 +1,5 @@
- name: "Ensure import of a deliberately corrupted downloaded GnuPG binary key results in an 'inline data' occurence in the message"
apt_key:
- url: https://ansible-ci-files.s3.us-east-1.amazonaws.com/test/integration/targets/apt_key/apt-key-corrupt-zeros-2k.gpg
+ url: https://ci-files.testing.ansible.com/test/integration/targets/apt_key/apt-key-corrupt-zeros-2k.gpg
register: gpg_inline_result
failed_when: "not ('inline data' in gpg_inline_result.msg)"
diff --git a/test/integration/targets/apt_key/tasks/file.yml b/test/integration/targets/apt_key/tasks/file.yml
index 16b62736..c22f3a40 100644
--- a/test/integration/targets/apt_key/tasks/file.yml
+++ b/test/integration/targets/apt_key/tasks/file.yml
@@ -1,6 +1,6 @@
- name: Get Fedora GPG Key
get_url:
- url: https://ansible-ci-files.s3.us-east-1.amazonaws.com/test/integration/targets/apt_key/fedora.gpg
+ url: https://ci-files.testing.ansible.com/test/integration/targets/apt_key/fedora.gpg
dest: /tmp/fedora.gpg
- name: Ensure clean slate
@@ -42,7 +42,7 @@
- name: add key from url
apt_key:
- url: https://ansible-ci-files.s3.us-east-1.amazonaws.com/test/integration/targets/apt_key/fedora.gpg
+ url: https://ci-files.testing.ansible.com/test/integration/targets/apt_key/fedora.gpg
register: apt_key_url
- name: verify key from url
diff --git a/test/integration/targets/apt_key/tasks/main.yml b/test/integration/targets/apt_key/tasks/main.yml
index 9ef44e45..ffb89b22 100644
--- a/test/integration/targets/apt_key/tasks/main.yml
+++ b/test/integration/targets/apt_key/tasks/main.yml
@@ -16,14 +16,6 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-- set_fact: output_dir_test={{output_dir}}/apt_key
-
-- name: make sure our testing sub-directory does not exist
- file: path="{{ output_dir_test }}" state=absent
-
-- name: create our testing sub-directory
- file: path="{{ output_dir_test }}" state=directory
-
- import_tasks: 'apt_key.yml'
when: ansible_distribution in ('Ubuntu', 'Debian')
diff --git a/test/integration/targets/apt_repository/aliases b/test/integration/targets/apt_repository/aliases
index 7e462190..34e2b540 100644
--- a/test/integration/targets/apt_repository/aliases
+++ b/test/integration/targets/apt_repository/aliases
@@ -4,4 +4,3 @@ skip/freebsd
skip/osx
skip/macos
skip/rhel
-skip/aix
diff --git a/test/integration/targets/assemble/meta/main.yml b/test/integration/targets/assemble/meta/main.yml
index a9d0b468..d057311e 100644
--- a/test/integration/targets/assemble/meta/main.yml
+++ b/test/integration/targets/assemble/meta/main.yml
@@ -18,3 +18,4 @@
dependencies:
- prepare_tests
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/assemble/tasks/main.yml b/test/integration/targets/assemble/tasks/main.yml
index 5e779cfb..14eea3f3 100644
--- a/test/integration/targets/assemble/tasks/main.yml
+++ b/test/integration/targets/assemble/tasks/main.yml
@@ -16,21 +16,12 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-- name: create a new directory for file source
- file: dest="{{output_dir}}/src" state=directory
- register: result
-
-- name: assert the directory was created
- assert:
- that:
- - "result.state == 'directory'"
-
- name: copy the files to a new directory
- copy: src="./" dest="{{output_dir}}/src"
+ copy: src="./" dest="{{remote_tmp_dir}}/src"
register: result
- name: create unicode file for test
- shell: echo "π" > {{ output_dir }}/src/ßΩ.txt
+ shell: echo "π" > {{ remote_tmp_dir }}/src/ßΩ.txt
register: result
- name: assert that the new file was created
@@ -39,7 +30,7 @@
- "result.changed == true"
- name: test assemble with all fragments
- assemble: src="{{output_dir}}/src" dest="{{output_dir}}/assembled1"
+ assemble: src="{{remote_tmp_dir}}/src" dest="{{remote_tmp_dir}}/assembled1"
register: result
- name: assert the fragments were assembled
@@ -50,7 +41,7 @@
- "result.checksum == '74152e9224f774191bc0bedf460d35de86ad90e6'"
- name: test assemble with all fragments
- assemble: src="{{output_dir}}/src" dest="{{output_dir}}/assembled1"
+ assemble: src="{{remote_tmp_dir}}/src" dest="{{remote_tmp_dir}}/assembled1"
register: result
- name: assert that the same assemble made no changes
@@ -61,7 +52,7 @@
- "result.checksum == '74152e9224f774191bc0bedf460d35de86ad90e6'"
- name: test assemble with all fragments and decrypt=True
- assemble: src="{{output_dir}}/src" dest="{{output_dir}}/assembled2" decrypt=yes
+ assemble: src="{{remote_tmp_dir}}/src" dest="{{remote_tmp_dir}}/assembled2" decrypt=yes
register: result
- name: assert the fragments were assembled with decrypt=True
@@ -72,7 +63,7 @@
- "result.checksum == '74152e9224f774191bc0bedf460d35de86ad90e6'"
- name: test assemble with all fragments and decrypt=True
- assemble: src="{{output_dir}}/src" dest="{{output_dir}}/assembled2" decrypt=yes
+ assemble: src="{{remote_tmp_dir}}/src" dest="{{remote_tmp_dir}}/assembled2" decrypt=yes
register: result
- name: assert that the same assemble made no changes with decrypt=True
@@ -83,7 +74,7 @@
- "result.checksum == '74152e9224f774191bc0bedf460d35de86ad90e6'"
- name: test assemble with fragments matching a regex
- assemble: src="{{output_dir}}/src" dest="{{output_dir}}/assembled3" regexp="^fragment[1-3]$"
+ assemble: src="{{remote_tmp_dir}}/src" dest="{{remote_tmp_dir}}/assembled3" regexp="^fragment[1-3]$"
register: result
- name: assert the fragments were assembled with a regex
@@ -93,7 +84,7 @@
- "result.checksum == 'edfe2d7487ef8f5ebc0f1c4dc57ba7b70a7b8e2b'"
- name: test assemble with fragments matching a regex and decrypt=True
- assemble: src="{{output_dir}}/src" dest="{{output_dir}}/assembled4" regexp="^fragment[1-3]$" decrypt=yes
+ assemble: src="{{remote_tmp_dir}}/src" dest="{{remote_tmp_dir}}/assembled4" regexp="^fragment[1-3]$" decrypt=yes
register: result
- name: assert the fragments were assembled with a regex and decrypt=True
@@ -103,7 +94,7 @@
- "result.checksum == 'edfe2d7487ef8f5ebc0f1c4dc57ba7b70a7b8e2b'"
- name: test assemble with a delimiter
- assemble: src="{{output_dir}}/src" dest="{{output_dir}}/assembled5" delimiter="#--- delimiter ---#"
+ assemble: src="{{remote_tmp_dir}}/src" dest="{{remote_tmp_dir}}/assembled5" delimiter="#--- delimiter ---#"
register: result
- name: assert the fragments were assembled with a delimiter
@@ -113,7 +104,7 @@
- "result.checksum == 'd986cefb82e34e4cf14d33a3cda132ff45aa2980'"
- name: test assemble with a delimiter and decrypt=True
- assemble: src="{{output_dir}}/src" dest="{{output_dir}}/assembled6" delimiter="#--- delimiter ---#" decrypt=yes
+ assemble: src="{{remote_tmp_dir}}/src" dest="{{remote_tmp_dir}}/assembled6" delimiter="#--- delimiter ---#" decrypt=yes
register: result
- name: assert the fragments were assembled with a delimiter and decrypt=True
@@ -123,7 +114,7 @@
- "result.checksum == 'd986cefb82e34e4cf14d33a3cda132ff45aa2980'"
- name: test assemble with remote_src=False
- assemble: src="./" dest="{{output_dir}}/assembled7" remote_src=no
+ assemble: src="./" dest="{{remote_tmp_dir}}/assembled7" remote_src=no
register: result
- name: assert the fragments were assembled without remote
@@ -133,7 +124,7 @@
- "result.checksum == '048a1bd1951aa5ccc427eeb4ca19aee45e9c68b3'"
- name: test assemble with remote_src=False and decrypt=True
- assemble: src="./" dest="{{output_dir}}/assembled8" remote_src=no decrypt=yes
+ assemble: src="./" dest="{{remote_tmp_dir}}/assembled8" remote_src=no decrypt=yes
register: result
- name: assert the fragments were assembled without remote and decrypt=True
@@ -143,7 +134,7 @@
- "result.checksum == '048a1bd1951aa5ccc427eeb4ca19aee45e9c68b3'"
- name: test assemble with remote_src=False and a delimiter
- assemble: src="./" dest="{{output_dir}}/assembled9" remote_src=no delimiter="#--- delimiter ---#"
+ assemble: src="./" dest="{{remote_tmp_dir}}/assembled9" remote_src=no delimiter="#--- delimiter ---#"
register: result
- name: assert the fragments were assembled without remote
@@ -153,7 +144,7 @@
- "result.checksum == '505359f48c65b3904127cf62b912991d4da7ed6d'"
- name: test assemble with remote_src=False and a delimiter and decrypt=True
- assemble: src="./" dest="{{output_dir}}/assembled10" remote_src=no delimiter="#--- delimiter ---#" decrypt=yes
+ assemble: src="./" dest="{{remote_tmp_dir}}/assembled10" remote_src=no delimiter="#--- delimiter ---#" decrypt=yes
register: result
- name: assert the fragments were assembled without remote
diff --git a/test/integration/targets/async/aliases b/test/integration/targets/async/aliases
index 4d56e5c7..c989cd70 100644
--- a/test/integration/targets/async/aliases
+++ b/test/integration/targets/async/aliases
@@ -1,4 +1,3 @@
async_status
async_wrapper
shippable/posix/group2
-skip/aix
diff --git a/test/integration/targets/become/aliases b/test/integration/targets/become/aliases
index ad691e7d..db54e68c 100644
--- a/test/integration/targets/become/aliases
+++ b/test/integration/targets/become/aliases
@@ -1,4 +1,3 @@
destructive
shippable/posix/group1
-skip/aix
context/target
diff --git a/test/integration/targets/binary/meta/main.yml b/test/integration/targets/binary/meta/main.yml
index 07faa217..cb6005d0 100644
--- a/test/integration/targets/binary/meta/main.yml
+++ b/test/integration/targets/binary/meta/main.yml
@@ -1,2 +1,3 @@
dependencies:
- prepare_tests
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/binary/tasks/main.yml b/test/integration/targets/binary/tasks/main.yml
index 486ee6d6..2d417b56 100644
--- a/test/integration/targets/binary/tasks/main.yml
+++ b/test/integration/targets/binary/tasks/main.yml
@@ -12,37 +12,37 @@
- name: get checksums that we expect later files to have
copy:
src: from_playbook
- dest: "{{ output_dir }}"
+ dest: "{{ remote_tmp_dir }}"
- copy:
src: b64_utf8
- dest: "{{ output_dir }}"
+ dest: "{{ remote_tmp_dir }}"
- copy:
src: b64_latin1
- dest: "{{ output_dir }}"
+ dest: "{{ remote_tmp_dir }}"
- stat:
- path: "{{ output_dir }}/from_playbook"
+ path: "{{ remote_tmp_dir }}/from_playbook"
register: from_playbook
- stat:
- path: "{{ output_dir }}/b64_utf8"
+ path: "{{ remote_tmp_dir }}/b64_utf8"
register: b64_utf8
- stat:
- path: "{{ output_dir }}/b64_latin1"
+ path: "{{ remote_tmp_dir }}/b64_latin1"
register: b64_latin1
# Tests themselves
- name: copy with utf-8 content in a playbook
copy:
content: "{{ simple_accents }}\n"
- dest: "{{ output_dir }}/from_playbook.txt"
+ dest: "{{ remote_tmp_dir }}/from_playbook.txt"
- name: Check that copying utf-8 content matches
stat:
- path: "{{ output_dir }}/from_playbook.txt"
+ path: "{{ remote_tmp_dir }}/from_playbook.txt"
register: results
- assert:
@@ -52,11 +52,11 @@
- name: copy with utf8 in a base64 encoded string
copy:
content: "{{ utf8_simple_accents|b64decode }}\n"
- dest: "{{ output_dir }}/b64_utf8.txt"
+ dest: "{{ remote_tmp_dir }}/b64_utf8.txt"
- name: Check that utf8 in a base64 string matches
stat:
- path: "{{ output_dir }}/b64_utf8.txt"
+ path: "{{ remote_tmp_dir }}/b64_utf8.txt"
register: results
- assert:
@@ -66,11 +66,11 @@
- name: copy with latin1 in a base64 encoded string
copy:
content: "{{ latin1_simple_accents|b64decode }}\n"
- dest: "{{ output_dir }}/b64_latin1.txt"
+ dest: "{{ remote_tmp_dir }}/b64_latin1.txt"
- name: Check that latin1 in a base64 string matches
stat:
- path: "{{ output_dir }}/b64_latin1.txt"
+ path: "{{ remote_tmp_dir }}/b64_latin1.txt"
register: results
- assert:
@@ -83,11 +83,11 @@
- name: Template with a unicode string from the playbook
template:
src: "from_playbook_template.j2"
- dest: "{{ output_dir }}/from_playbook_template.txt"
+ dest: "{{ remote_tmp_dir }}/from_playbook_template.txt"
- name: Check that writing a template from a playbook var matches
stat:
- path: "{{ output_dir }}/from_playbook_template.txt"
+ path: "{{ remote_tmp_dir }}/from_playbook_template.txt"
register: results
- assert:
@@ -97,11 +97,11 @@
- name: Template with utf8 in a base64 encoded string
template:
src: "b64_utf8_template.j2"
- dest: "{{ output_dir }}/b64_utf8_template.txt"
+ dest: "{{ remote_tmp_dir }}/b64_utf8_template.txt"
- name: Check that writing a template from a base64 encoded utf8 string matches
stat:
- path: "{{ output_dir }}/b64_utf8_template.txt"
+ path: "{{ remote_tmp_dir }}/b64_utf8_template.txt"
register: results
- assert:
@@ -111,11 +111,11 @@
- name: Template with latin1 in a base64 encoded string
template:
src: "b64_latin1_template.j2"
- dest: "{{ output_dir }}/b64_latin1_template.txt"
+ dest: "{{ remote_tmp_dir }}/b64_latin1_template.txt"
- name: Check that writing a template from a base64 encoded latin1 string matches
stat:
- path: "{{ output_dir }}/b64_latin1_template.txt"
+ path: "{{ remote_tmp_dir }}/b64_latin1_template.txt"
register: results
- assert:
diff --git a/test/integration/targets/binary_modules/Makefile b/test/integration/targets/binary_modules/Makefile
index c3092e47..398866f6 100644
--- a/test/integration/targets/binary_modules/Makefile
+++ b/test/integration/targets/binary_modules/Makefile
@@ -3,11 +3,10 @@
all:
# Compiled versions of these binary modules are available at the url below.
# This avoids a dependency on go and keeps the binaries out of our git repository.
- # https://ansible-ci-files.s3.amazonaws.com/test/integration/roles/test_binary_modules/
+ # https://ci-files.testing.ansible.com/test/integration/roles/test_binary_modules/
cd library; \
GOOS=linux GOARCH=amd64 go build -o helloworld_linux_x86_64 helloworld.go; \
GOOS=linux GOARCH=ppc64le go build -o helloworld_linux_ppc64le helloworld.go; \
- GOOS=aix GOARCH=ppc64 go build -o helloworld_aix_chrp helloworld.go; \
GOOS=windows GOARCH=amd64 go build -o helloworld_win32nt_64-bit.exe helloworld.go; \
GOOS=darwin GOARCH=amd64 go build -o helloworld_darwin_x86_64 helloworld.go; \
GOOS=freebsd GOARCH=amd64 go build -o helloworld_freebsd_amd64 helloworld.go
diff --git a/test/integration/targets/binary_modules/download_binary_modules.yml b/test/integration/targets/binary_modules/download_binary_modules.yml
index e8f51b1a..80b91453 100644
--- a/test/integration/targets/binary_modules/download_binary_modules.yml
+++ b/test/integration/targets/binary_modules/download_binary_modules.yml
@@ -3,7 +3,7 @@
- name: download binary module
tags: test_binary_modules
get_url:
- url: "https://ansible-ci-files.s3.amazonaws.com/test/integration/roles/test_binary_modules/{{ filename }}"
+ url: "https://ci-files.testing.ansible.com/test/integration/roles/test_binary_modules/{{ filename }}"
dest: "{{ playbook_dir }}/library/{{ filename }}"
mode: 0755
delegate_to: localhost
diff --git a/test/integration/targets/callback_default/callback_default.out.result_format_yaml.stderr b/test/integration/targets/callback_default/callback_default.out.result_format_yaml.stderr
new file mode 100644
index 00000000..d3e07d47
--- /dev/null
+++ b/test/integration/targets/callback_default/callback_default.out.result_format_yaml.stderr
@@ -0,0 +1,2 @@
++ ansible-playbook -i inventory test.yml
+++ set +x
diff --git a/test/integration/targets/callback_default/callback_default.out.result_format_yaml.stdout b/test/integration/targets/callback_default/callback_default.out.result_format_yaml.stdout
new file mode 100644
index 00000000..5e93b976
--- /dev/null
+++ b/test/integration/targets/callback_default/callback_default.out.result_format_yaml.stdout
@@ -0,0 +1,97 @@
+
+PLAY [testhost] ****************************************************************
+
+TASK [Changed task] ************************************************************
+changed: [testhost]
+
+TASK [Ok task] *****************************************************************
+ok: [testhost]
+
+TASK [Failed task] *************************************************************
+fatal: [testhost]: FAILED! =>
+ changed: false
+ msg: no reason
+...ignoring
+
+TASK [Skipped task] ************************************************************
+skipping: [testhost]
+
+TASK [Task with var in name (foo bar)] *****************************************
+changed: [testhost]
+
+TASK [Loop task] ***************************************************************
+changed: [testhost] => (item=foo-1)
+changed: [testhost] => (item=foo-2)
+changed: [testhost] => (item=foo-3)
+
+TASK [debug loop] **************************************************************
+changed: [testhost] => (item=debug-1) =>
+ msg: debug-1
+failed: [testhost] (item=debug-2) =>
+ msg: debug-2
+ok: [testhost] => (item=debug-3) =>
+ msg: debug-3
+skipping: [testhost] => (item=debug-4)
+fatal: [testhost]: FAILED! =>
+ msg: One or more items failed
+...ignoring
+
+TASK [EXPECTED FAILURE Failed task to be rescued] ******************************
+fatal: [testhost]: FAILED! =>
+ changed: false
+ msg: Failed as requested from task
+
+TASK [Rescue task] *************************************************************
+changed: [testhost]
+
+TASK [include_tasks] ***********************************************************
+included: .../test/integration/targets/callback_default/include_me.yml for testhost => (item=1)
+
+TASK [debug] *******************************************************************
+ok: [testhost] =>
+ item: 1
+
+TASK [copy] ********************************************************************
+changed: [testhost]
+
+TASK [replace] *****************************************************************
+--- before: .../test_diff.txt
++++ after: .../test_diff.txt
+@@ -1 +1 @@
+-foo
+\ No newline at end of file
++bar
+\ No newline at end of file
+
+changed: [testhost]
+
+TASK [replace] *****************************************************************
+ok: [testhost]
+
+RUNNING HANDLER [Test handler 1] ***********************************************
+changed: [testhost]
+
+RUNNING HANDLER [Test handler 2] ***********************************************
+ok: [testhost]
+
+RUNNING HANDLER [Test handler 3] ***********************************************
+changed: [testhost]
+
+PLAY [testhost] ****************************************************************
+
+TASK [First free task] *********************************************************
+changed: [testhost]
+
+TASK [Second free task] ********************************************************
+changed: [testhost]
+
+TASK [Include some tasks] ******************************************************
+included: .../test/integration/targets/callback_default/include_me.yml for testhost => (item=1)
+
+TASK [debug] *******************************************************************
+ok: [testhost] =>
+ item: 1
+
+PLAY RECAP *********************************************************************
+testhost : ok=19 changed=11 unreachable=0 failed=0 skipped=1 rescued=1 ignored=2
+
diff --git a/test/integration/targets/callback_default/callback_default.out.result_format_yaml_lossy_verbose.stderr b/test/integration/targets/callback_default/callback_default.out.result_format_yaml_lossy_verbose.stderr
new file mode 100644
index 00000000..4884dfe0
--- /dev/null
+++ b/test/integration/targets/callback_default/callback_default.out.result_format_yaml_lossy_verbose.stderr
@@ -0,0 +1,2 @@
++ ansible-playbook -i inventory test.yml -v
+++ set +x
diff --git a/test/integration/targets/callback_default/callback_default.out.result_format_yaml_lossy_verbose.stdout b/test/integration/targets/callback_default/callback_default.out.result_format_yaml_lossy_verbose.stdout
new file mode 100644
index 00000000..806841b0
--- /dev/null
+++ b/test/integration/targets/callback_default/callback_default.out.result_format_yaml_lossy_verbose.stdout
@@ -0,0 +1,282 @@
+
+
+PLAY [testhost] ****************************************************************
+
+TASK [Changed task] ************************************************************
+changed: [testhost] =>
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: <omitted>
+ stdout: foo
+ stdout_lines: <omitted>
+
+TASK [Ok task] *****************************************************************
+ok: [testhost] =>
+ changed: false
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: <omitted>
+ stdout: foo
+ stdout_lines: <omitted>
+
+TASK [Failed task] *************************************************************
+fatal: [testhost]: FAILED! =>
+ changed: false
+ msg: no reason
+...ignoring
+
+TASK [Skipped task] ************************************************************
+skipping: [testhost] =>
+ changed: false
+ skip_reason: Conditional result was False
+
+TASK [Task with var in name (foo bar)] *****************************************
+changed: [testhost] =>
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: <omitted>
+ stdout: foo
+ stdout_lines: <omitted>
+
+TASK [Loop task] ***************************************************************
+changed: [testhost] => (item=foo-1) =>
+ ansible_loop_var: item
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ item: 1
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: <omitted>
+ stdout: foo
+ stdout_lines: <omitted>
+changed: [testhost] => (item=foo-2) =>
+ ansible_loop_var: item
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ item: 2
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: <omitted>
+ stdout: foo
+ stdout_lines: <omitted>
+changed: [testhost] => (item=foo-3) =>
+ ansible_loop_var: item
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ item: 3
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: <omitted>
+ stdout: foo
+ stdout_lines: <omitted>
+
+TASK [debug loop] **************************************************************
+changed: [testhost] => (item=debug-1) =>
+ msg: debug-1
+failed: [testhost] (item=debug-2) =>
+ msg: debug-2
+ok: [testhost] => (item=debug-3) =>
+ msg: debug-3
+skipping: [testhost] => (item=debug-4) =>
+ ansible_loop_var: item
+ item: 4
+fatal: [testhost]: FAILED! =>
+ msg: One or more items failed
+...ignoring
+
+TASK [EXPECTED FAILURE Failed task to be rescued] ******************************
+fatal: [testhost]: FAILED! =>
+ changed: false
+ msg: Failed as requested from task
+
+TASK [Rescue task] *************************************************************
+changed: [testhost] =>
+ changed: true
+ cmd:
+ - echo
+ - rescued
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: <omitted>
+ stdout: rescued
+ stdout_lines: <omitted>
+
+TASK [include_tasks] ***********************************************************
+included: .../test/integration/targets/callback_default/include_me.yml for testhost => (item=1)
+
+TASK [debug] *******************************************************************
+ok: [testhost] =>
+ item: 1
+
+TASK [copy] ********************************************************************
+changed: [testhost] =>
+ changed: true
+ checksum: 0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33
+ dest: .../test_diff.txt
+ gid: 0
+ group: root
+ md5sum: acbd18db4cc2f85cedef654fccc4a4d8
+ mode: '0644'
+ owner: root
+ size: 3
+ src: .../source
+ state: file
+ uid: 0
+
+TASK [replace] *****************************************************************
+--- before: .../test_diff.txt
++++ after: .../test_diff.txt
+@@ -1 +1 @@
+-foo
+\ No newline at end of file
++bar
+\ No newline at end of file
+
+changed: [testhost] =>
+ changed: true
+ msg: 1 replacements made
+ rc: 0
+
+TASK [replace] *****************************************************************
+ok: [testhost] =>
+ changed: false
+ msg: 1 replacements made
+ rc: 0
+
+RUNNING HANDLER [Test handler 1] ***********************************************
+changed: [testhost] =>
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: <omitted>
+ stdout: foo
+ stdout_lines: <omitted>
+
+RUNNING HANDLER [Test handler 2] ***********************************************
+ok: [testhost] =>
+ changed: false
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: <omitted>
+ stdout: foo
+ stdout_lines: <omitted>
+
+RUNNING HANDLER [Test handler 3] ***********************************************
+changed: [testhost] =>
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: <omitted>
+ stdout: foo
+ stdout_lines: <omitted>
+
+PLAY [testhost] ****************************************************************
+
+TASK [First free task] *********************************************************
+changed: [testhost] =>
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: <omitted>
+ stdout: foo
+ stdout_lines: <omitted>
+
+TASK [Second free task] ********************************************************
+changed: [testhost] =>
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: <omitted>
+ stdout: foo
+ stdout_lines: <omitted>
+
+TASK [Include some tasks] ******************************************************
+included: .../test/integration/targets/callback_default/include_me.yml for testhost => (item=1)
+
+TASK [debug] *******************************************************************
+ok: [testhost] =>
+ item: 1
+
+PLAY RECAP *********************************************************************
+testhost : ok=19 changed=11 unreachable=0 failed=0 skipped=1 rescued=1 ignored=2
+
diff --git a/test/integration/targets/callback_default/callback_default.out.result_format_yaml_verbose.stderr b/test/integration/targets/callback_default/callback_default.out.result_format_yaml_verbose.stderr
new file mode 100644
index 00000000..4884dfe0
--- /dev/null
+++ b/test/integration/targets/callback_default/callback_default.out.result_format_yaml_verbose.stderr
@@ -0,0 +1,2 @@
++ ansible-playbook -i inventory test.yml -v
+++ set +x
diff --git a/test/integration/targets/callback_default/callback_default.out.result_format_yaml_verbose.stdout b/test/integration/targets/callback_default/callback_default.out.result_format_yaml_verbose.stdout
new file mode 100644
index 00000000..b5017d7a
--- /dev/null
+++ b/test/integration/targets/callback_default/callback_default.out.result_format_yaml_verbose.stdout
@@ -0,0 +1,294 @@
+
+
+PLAY [testhost] ****************************************************************
+
+TASK [Changed task] ************************************************************
+changed: [testhost] =>
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: []
+ stdout: foo
+ stdout_lines:
+ - foo
+
+TASK [Ok task] *****************************************************************
+ok: [testhost] =>
+ changed: false
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: []
+ stdout: foo
+ stdout_lines:
+ - foo
+
+TASK [Failed task] *************************************************************
+fatal: [testhost]: FAILED! =>
+ changed: false
+ msg: no reason
+...ignoring
+
+TASK [Skipped task] ************************************************************
+skipping: [testhost] =>
+ changed: false
+ skip_reason: Conditional result was False
+
+TASK [Task with var in name (foo bar)] *****************************************
+changed: [testhost] =>
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: []
+ stdout: foo
+ stdout_lines:
+ - foo
+
+TASK [Loop task] ***************************************************************
+changed: [testhost] => (item=foo-1) =>
+ ansible_loop_var: item
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ item: 1
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: []
+ stdout: foo
+ stdout_lines:
+ - foo
+changed: [testhost] => (item=foo-2) =>
+ ansible_loop_var: item
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ item: 2
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: []
+ stdout: foo
+ stdout_lines:
+ - foo
+changed: [testhost] => (item=foo-3) =>
+ ansible_loop_var: item
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ item: 3
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: []
+ stdout: foo
+ stdout_lines:
+ - foo
+
+TASK [debug loop] **************************************************************
+changed: [testhost] => (item=debug-1) =>
+ msg: debug-1
+failed: [testhost] (item=debug-2) =>
+ msg: debug-2
+ok: [testhost] => (item=debug-3) =>
+ msg: debug-3
+skipping: [testhost] => (item=debug-4) =>
+ ansible_loop_var: item
+ item: 4
+fatal: [testhost]: FAILED! =>
+ msg: One or more items failed
+...ignoring
+
+TASK [EXPECTED FAILURE Failed task to be rescued] ******************************
+fatal: [testhost]: FAILED! =>
+ changed: false
+ msg: Failed as requested from task
+
+TASK [Rescue task] *************************************************************
+changed: [testhost] =>
+ changed: true
+ cmd:
+ - echo
+ - rescued
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: []
+ stdout: rescued
+ stdout_lines:
+ - rescued
+
+TASK [include_tasks] ***********************************************************
+included: .../test/integration/targets/callback_default/include_me.yml for testhost => (item=1)
+
+TASK [debug] *******************************************************************
+ok: [testhost] =>
+ item: 1
+
+TASK [copy] ********************************************************************
+changed: [testhost] =>
+ changed: true
+ checksum: 0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33
+ dest: .../test_diff.txt
+ gid: 0
+ group: root
+ md5sum: acbd18db4cc2f85cedef654fccc4a4d8
+ mode: '0644'
+ owner: root
+ size: 3
+ src: .../source
+ state: file
+ uid: 0
+
+TASK [replace] *****************************************************************
+--- before: .../test_diff.txt
++++ after: .../test_diff.txt
+@@ -1 +1 @@
+-foo
+\ No newline at end of file
++bar
+\ No newline at end of file
+
+changed: [testhost] =>
+ changed: true
+ msg: 1 replacements made
+ rc: 0
+
+TASK [replace] *****************************************************************
+ok: [testhost] =>
+ changed: false
+ msg: 1 replacements made
+ rc: 0
+
+RUNNING HANDLER [Test handler 1] ***********************************************
+changed: [testhost] =>
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: []
+ stdout: foo
+ stdout_lines:
+ - foo
+
+RUNNING HANDLER [Test handler 2] ***********************************************
+ok: [testhost] =>
+ changed: false
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: []
+ stdout: foo
+ stdout_lines:
+ - foo
+
+RUNNING HANDLER [Test handler 3] ***********************************************
+changed: [testhost] =>
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: []
+ stdout: foo
+ stdout_lines:
+ - foo
+
+PLAY [testhost] ****************************************************************
+
+TASK [First free task] *********************************************************
+changed: [testhost] =>
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: []
+ stdout: foo
+ stdout_lines:
+ - foo
+
+TASK [Second free task] ********************************************************
+changed: [testhost] =>
+ changed: true
+ cmd:
+ - echo
+ - foo
+ delta: '0:00:00.000000'
+ end: '0000-00-00 00:00:00.000000'
+ msg: ''
+ rc: 0
+ start: '0000-00-00 00:00:00.000000'
+ stderr: ''
+ stderr_lines: []
+ stdout: foo
+ stdout_lines:
+ - foo
+
+TASK [Include some tasks] ******************************************************
+included: .../test/integration/targets/callback_default/include_me.yml for testhost => (item=1)
+
+TASK [debug] *******************************************************************
+ok: [testhost] =>
+ item: 1
+
+PLAY RECAP *********************************************************************
+testhost : ok=19 changed=11 unreachable=0 failed=0 skipped=1 rescued=1 ignored=2
+
diff --git a/test/integration/targets/callback_default/callback_default.out.yaml_result_format_yaml_verbose.stderr b/test/integration/targets/callback_default/callback_default.out.yaml_result_format_yaml_verbose.stderr
new file mode 100644
index 00000000..6d767d29
--- /dev/null
+++ b/test/integration/targets/callback_default/callback_default.out.yaml_result_format_yaml_verbose.stderr
@@ -0,0 +1,2 @@
++ ansible-playbook -i inventory test_yaml.yml -v
+++ set +x
diff --git a/test/integration/targets/callback_default/callback_default.out.yaml_result_format_yaml_verbose.stdout b/test/integration/targets/callback_default/callback_default.out.yaml_result_format_yaml_verbose.stdout
new file mode 100644
index 00000000..36437e59
--- /dev/null
+++ b/test/integration/targets/callback_default/callback_default.out.yaml_result_format_yaml_verbose.stdout
@@ -0,0 +1,29 @@
+
+
+PLAY [testhost] ****************************************************************
+
+TASK [Sample task name] ********************************************************
+ok: [testhost] =>
+ msg: sample debug msg
+
+TASK [Umlaut output] ***********************************************************
+ok: [testhost] =>
+ msg: |-
+ äöü
+ éêè
+ ßï☺
+
+TASK [Test to_yaml] ************************************************************
+ok: [testhost] =>
+ msg: |-
+ 'line 1
+
+ line 2
+
+ line 3
+
+ '
+
+PLAY RECAP *********************************************************************
+testhost : ok=3 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
+
diff --git a/test/integration/targets/callback_default/runme.sh b/test/integration/targets/callback_default/runme.sh
index 50b4cf70..0ee4259d 100755
--- a/test/integration/targets/callback_default/runme.sh
+++ b/test/integration/targets/callback_default/runme.sh
@@ -15,21 +15,28 @@ set -eux
run_test() {
local testname=$1
+ local playbook=$2
# output was recorded w/o cowsay, ensure we reproduce the same
export ANSIBLE_NOCOWS=1
# The shenanigans with redirection and 'tee' are to capture STDOUT and
# STDERR separately while still displaying both to the console
- { ansible-playbook -i inventory test.yml \
+ { ansible-playbook -i inventory "$playbook" "${@:3}" \
> >(set +x; tee "${OUTFILE}.${testname}.stdout"); } \
2> >(set +x; tee "${OUTFILE}.${testname}.stderr" >&2)
# Scrub deprication warning that shows up in Python 2.6 on CentOS 6
sed -i -e '/RandomPool_DeprecationWarning/d' "${OUTFILE}.${testname}.stderr"
- sed -i -e 's/included: .*\/test\/integration/included: ...\/test\/integration/g' "${OUTFILE}.${testname}.stdout"
- sed -i -e 's/@@ -1,1 +1,1 @@/@@ -1 +1 @@/g' "${OUTFILE}.${testname}.stdout"
- sed -i -e 's/: .*\/test_diff\.txt/: ...\/test_diff.txt/g' "${OUTFILE}.${testname}.stdout"
- sed -i -e "s#${ANSIBLE_PLAYBOOK_DIR}#TEST_PATH#g" "${OUTFILE}.${testname}.stdout"
+ sed -i -e 's/included: .*\/test\/integration/included: ...\/test\/integration/g' "${OUTFILE}.${testname}.stdout"
+ sed -i -e 's/@@ -1,1 +1,1 @@/@@ -1 +1 @@/g' "${OUTFILE}.${testname}.stdout"
+ sed -i -e 's/: .*\/test_diff\.txt/: ...\/test_diff.txt/g' "${OUTFILE}.${testname}.stdout"
+ sed -i -e "s#${ANSIBLE_PLAYBOOK_DIR}#TEST_PATH#g" "${OUTFILE}.${testname}.stdout"
+ sed -i -e 's/^Using .*//g' "${OUTFILE}.${testname}.stdout"
+ sed -i -e 's/[0-9]:[0-9]\{2\}:[0-9]\{2\}\.[0-9]\{6\}/0:00:00.000000/g' "${OUTFILE}.${testname}.stdout"
+ sed -i -e 's/[0-9]\{4\}-[0-9]\{2\}-[0-9]\{2\} [0-9]\{2\}:[0-9]\{2\}:[0-9]\{2\}\.[0-9]\{6\}/0000-00-00 00:00:00.000000/g' "${OUTFILE}.${testname}.stdout"
+ sed -i -e 's#: .*/source$#: .../source#g' "${OUTFILE}.${testname}.stdout"
+ sed -i -e '/secontext:/d' "${OUTFILE}.${testname}.stdout"
+ sed -i -e 's/group: wheel/group: root/g' "${OUTFILE}.${testname}.stdout"
diff -u "${ORIGFILE}.${testname}.stdout" "${OUTFILE}.${testname}.stdout" || diff_failure
diff -u "${ORIGFILE}.${testname}.stderr" "${OUTFILE}.${testname}.stderr" || diff_failure
@@ -123,7 +130,7 @@ export ANSIBLE_DISPLAY_OK_HOSTS=1
export ANSIBLE_DISPLAY_FAILED_STDERR=0
export ANSIBLE_CHECK_MODE_MARKERS=0
-run_test default
+run_test default test.yml
# Check for async output
# NOTE: regex to match 1 or more digits works for both BSD and GNU grep
@@ -135,32 +142,32 @@ rm -f async_test.out
# Hide skipped
export ANSIBLE_DISPLAY_SKIPPED_HOSTS=0
-run_test hide_skipped
+run_test hide_skipped test.yml
# Hide skipped/ok
export ANSIBLE_DISPLAY_SKIPPED_HOSTS=0
export ANSIBLE_DISPLAY_OK_HOSTS=0
-run_test hide_skipped_ok
+run_test hide_skipped_ok test.yml
# Hide ok
export ANSIBLE_DISPLAY_SKIPPED_HOSTS=1
export ANSIBLE_DISPLAY_OK_HOSTS=0
-run_test hide_ok
+run_test hide_ok test.yml
# Failed to stderr
export ANSIBLE_DISPLAY_SKIPPED_HOSTS=1
export ANSIBLE_DISPLAY_OK_HOSTS=1
export ANSIBLE_DISPLAY_FAILED_STDERR=1
-run_test failed_to_stderr
+run_test failed_to_stderr test.yml
export ANSIBLE_DISPLAY_FAILED_STDERR=0
# Test displaying task path on failure
export ANSIBLE_SHOW_TASK_PATH_ON_FAILURE=1
-run_test display_path_on_failure
+run_test display_path_on_failure test.yml
export ANSIBLE_SHOW_TASK_PATH_ON_FAILURE=0
@@ -178,6 +185,25 @@ if test "$(grep -c 'UNREACHABLE' "${BASEFILE}.unreachable.stderr")" -ne 1; then
echo "Test failed"
exit 1
fi
+export ANSIBLE_DISPLAY_FAILED_STDERR=0
+
+export ANSIBLE_CALLBACK_RESULT_FORMAT=yaml
+run_test result_format_yaml test.yml
+export ANSIBLE_CALLBACK_RESULT_FORMAT=json
+
+export ANSIBLE_CALLBACK_RESULT_FORMAT=yaml
+export ANSIBLE_CALLBACK_FORMAT_PRETTY=1
+run_test result_format_yaml_lossy_verbose test.yml -v
+run_test yaml_result_format_yaml_verbose test_yaml.yml -v
+export ANSIBLE_CALLBACK_RESULT_FORMAT=json
+unset ANSIBLE_CALLBACK_FORMAT_PRETTY
+
+export ANSIBLE_CALLBACK_RESULT_FORMAT=yaml
+export ANSIBLE_CALLBACK_FORMAT_PRETTY=0
+run_test result_format_yaml_verbose test.yml -v
+export ANSIBLE_CALLBACK_RESULT_FORMAT=json
+unset ANSIBLE_CALLBACK_FORMAT_PRETTY
+
## DRY RUN tests
#
diff --git a/test/integration/targets/callback_default/test_yaml.yml b/test/integration/targets/callback_default/test_yaml.yml
new file mode 100644
index 00000000..64059064
--- /dev/null
+++ b/test/integration/targets/callback_default/test_yaml.yml
@@ -0,0 +1,19 @@
+- hosts: testhost
+ gather_facts: false
+ tasks:
+ - name: Sample task name
+ debug:
+ msg: sample debug msg
+
+ - name: Umlaut output
+ debug:
+ msg: "äöü\néêè\nßï☺"
+
+ - name: Test to_yaml
+ debug:
+ msg: "{{ data | to_yaml }}"
+ vars:
+ data: |
+ line 1
+ line 2
+ line 3
diff --git a/test/integration/targets/changed_when/tasks/main.yml b/test/integration/targets/changed_when/tasks/main.yml
index 4f0a8747..bc8da712 100644
--- a/test/integration/targets/changed_when/tasks/main.yml
+++ b/test/integration/targets/changed_when/tasks/main.yml
@@ -71,3 +71,41 @@
- invalid_conditional is failed
- invalid_conditional.stdout is defined
- invalid_conditional.changed_when_result is contains('boomboomboom')
+
+- add_host:
+ name: 'host_{{item}}'
+ loop:
+ - 1
+ - 2
+ changed_when: item == 2
+ register: add_host_loop_res
+
+- assert:
+ that:
+ - add_host_loop_res.results[0] is not changed
+ - add_host_loop_res.results[1] is changed
+ - add_host_loop_res is changed
+
+- group_by:
+ key: "test_{{ item }}"
+ loop:
+ - 1
+ - 2
+ changed_when: item == 2
+ register: group_by_loop_res
+
+- assert:
+ that:
+ - group_by_loop_res.results[0] is not changed
+ - group_by_loop_res.results[1] is changed
+ - group_by_loop_res is changed
+
+- name: use changed in changed_when
+ add_host:
+ name: 'host_3'
+ changed_when: add_host_loop_res is changed
+ register: add_host_loop_res
+
+- assert:
+ that:
+ - add_host_loop_res is changed
diff --git a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/win_selfcontained.ps1 b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/win_selfcontained.ps1
index 661bc0f6..986d5157 100644
--- a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/win_selfcontained.ps1
+++ b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/win_selfcontained.ps1
@@ -1,9 +1,9 @@
#!powershell
$res = @{
- changed = $false
- source = "user"
- msg = "hi from selfcontained.ps1"
+ changed = $false
+ source = "user"
+ msg = "hi from selfcontained.ps1"
}
ConvertTo-Json $res \ No newline at end of file
diff --git a/test/integration/targets/collections/test_bypass_host_loop.yml b/test/integration/targets/collections/test_bypass_host_loop.yml
index e95262b8..71f48d5e 100644
--- a/test/integration/targets/collections/test_bypass_host_loop.yml
+++ b/test/integration/targets/collections/test_bypass_host_loop.yml
@@ -5,9 +5,6 @@
collections:
- testns.testcoll
tasks:
- - meta: end_host
- when: lookup('pipe', ansible_playbook_python ~ ' -c "import jinja2; print(jinja2.__version__)"') is version('2.7', '<')
-
- bypass_host_loop:
register: bypass
diff --git a/test/integration/targets/collections/test_collection_meta.yml b/test/integration/targets/collections/test_collection_meta.yml
index b682d220..8e611500 100644
--- a/test/integration/targets/collections/test_collection_meta.yml
+++ b/test/integration/targets/collections/test_collection_meta.yml
@@ -31,7 +31,7 @@
- assert:
that:
- redirect_failure is failed
- - '"no filter named ''testns.testredirect.dead_end''" in (redirect_failure.msg | lower)'
+ - '"No filter named ''testns.testredirect.dead_end''" in redirect_failure.msg'
# recursive filter redirect
- debug: msg="{{ 'data' | testns.testredirect.recursive_redirect }}"
ignore_errors: yes
diff --git a/test/integration/targets/command_shell/aliases b/test/integration/targets/command_shell/aliases
index 8dd7b884..a1bd9947 100644
--- a/test/integration/targets/command_shell/aliases
+++ b/test/integration/targets/command_shell/aliases
@@ -1,4 +1,3 @@
command
shippable/posix/group2
shell
-skip/aix
diff --git a/test/integration/targets/command_shell/meta/main.yml b/test/integration/targets/command_shell/meta/main.yml
index 07faa217..cb6005d0 100644
--- a/test/integration/targets/command_shell/meta/main.yml
+++ b/test/integration/targets/command_shell/meta/main.yml
@@ -1,2 +1,3 @@
dependencies:
- prepare_tests
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/command_shell/tasks/main.yml b/test/integration/targets/command_shell/tasks/main.yml
index aad63c0d..031c5f4b 100644
--- a/test/integration/targets/command_shell/tasks/main.yml
+++ b/test/integration/targets/command_shell/tasks/main.yml
@@ -60,34 +60,34 @@
- argv_and_string_command.rc == 256
- set_fact:
- output_dir_test: "{{ output_dir }}/test_command_shell"
+ remote_tmp_dir_test: "{{ remote_tmp_dir }}/test_command_shell"
- name: make sure our testing sub-directory does not exist
file:
- path: "{{ output_dir_test }}"
+ path: "{{ remote_tmp_dir_test }}"
state: absent
- name: create our testing sub-directory
file:
- path: "{{ output_dir_test }}"
+ path: "{{ remote_tmp_dir_test }}"
state: directory
- name: prep our test script
copy:
src: test.sh
- dest: "{{ output_dir_test }}"
+ dest: "{{ remote_tmp_dir_test }}"
mode: '0755'
- name: prep our test script
copy:
src: create_afile.sh
- dest: "{{ output_dir_test }}"
+ dest: "{{ remote_tmp_dir_test }}"
mode: '0755'
- name: prep our test script
copy:
src: remove_afile.sh
- dest: "{{ output_dir_test }}"
+ dest: "{{ remote_tmp_dir_test }}"
mode: '0755'
- name: locate bash
@@ -99,7 +99,7 @@
##
- name: execute the test.sh script via command
- command: "{{ output_dir_test }}/test.sh"
+ command: "{{ remote_tmp_dir_test }}/test.sh"
register: command_result0
- name: assert that the script executed correctly
@@ -114,7 +114,7 @@
# FIXME doesn't have the expected stdout.
#- name: execute the test.sh script with executable via command
-# command: "{{output_dir_test }}/test.sh executable={{ bash.stdout }}"
+# command: "{{remote_tmp_dir_test }}/test.sh executable={{ bash.stdout }}"
# register: command_result1
#
#- name: assert that the script executed correctly with command
@@ -129,13 +129,13 @@
- name: execute the test.sh script with chdir via command
command: ./test.sh
args:
- chdir: "{{ output_dir_test }}"
+ chdir: "{{ remote_tmp_dir_test }}"
register: command_result2
- name: Check invalid chdir
command: echo
args:
- chdir: "{{ output_dir }}/nope"
+ chdir: "{{ remote_tmp_dir }}/nope"
ignore_errors: yes
register: chdir_invalid
@@ -152,23 +152,53 @@
- name: verify that afile.txt is absent
file:
- path: "{{ output_dir_test }}/afile.txt"
+ path: "{{ remote_tmp_dir_test }}/afile.txt"
state: absent
+- name: create afile.txt with create_afile.sh via command (check mode)
+ command: "{{ remote_tmp_dir_test }}/create_afile.sh {{remote_tmp_dir_test }}/afile.txt"
+ args:
+ creates: "{{ remote_tmp_dir_test }}/afile.txt"
+ register: check_mode_result
+ check_mode: yes
+
+- assert:
+ that:
+ - check_mode_result.changed
+ - "'skipped' not in check_mode_result"
+
+- name: verify that afile.txt still does not exist
+ stat:
+ path: "{{remote_tmp_dir_test}}/afile.txt"
+ register: stat_result
+ failed_when: stat_result.stat.exists
+
- name: create afile.txt with create_afile.sh via command
- command: "{{ output_dir_test }}/create_afile.sh {{output_dir_test }}/afile.txt"
+ command: "{{ remote_tmp_dir_test }}/create_afile.sh {{remote_tmp_dir_test }}/afile.txt"
args:
- creates: "{{ output_dir_test }}/afile.txt"
+ creates: "{{ remote_tmp_dir_test }}/afile.txt"
- name: verify that afile.txt is present
file:
- path: "{{ output_dir_test }}/afile.txt"
+ path: "{{ remote_tmp_dir_test }}/afile.txt"
state: file
+- name: re-run previous command using creates with globbing (check mode)
+ command: "{{ remote_tmp_dir_test }}/create_afile.sh {{ remote_tmp_dir_test }}/afile.txt"
+ args:
+ creates: "{{ remote_tmp_dir_test }}/afile.*"
+ register: check_mode_result
+ check_mode: yes
+
+- assert:
+ that:
+ - not check_mode_result.changed
+ - "'skipped' not in check_mode_result"
+
- name: re-run previous command using creates with globbing
- command: "{{ output_dir_test }}/create_afile.sh {{ output_dir_test }}/afile.txt"
+ command: "{{ remote_tmp_dir_test }}/create_afile.sh {{ remote_tmp_dir_test }}/afile.txt"
args:
- creates: "{{ output_dir_test }}/afile.*"
+ creates: "{{ remote_tmp_dir_test }}/afile.*"
register: command_result3
- name: assert that creates with globbing is working
@@ -178,18 +208,48 @@
# removes
+- name: remove afile.txt with remote_afile.sh via command (check mode)
+ command: "{{ remote_tmp_dir_test }}/remove_afile.sh {{ remote_tmp_dir_test }}/afile.txt"
+ args:
+ removes: "{{ remote_tmp_dir_test }}/afile.txt"
+ register: check_mode_result
+ check_mode: yes
+
+- assert:
+ that:
+ - check_mode_result.changed
+ - "'skipped' not in check_mode_result"
+
+- name: verify that afile.txt still exists
+ stat:
+ path: "{{remote_tmp_dir_test}}/afile.txt"
+ register: stat_result
+ failed_when: not stat_result.stat.exists
+
- name: remove afile.txt with remote_afile.sh via command
- command: "{{ output_dir_test }}/remove_afile.sh {{ output_dir_test }}/afile.txt"
+ command: "{{ remote_tmp_dir_test }}/remove_afile.sh {{ remote_tmp_dir_test }}/afile.txt"
args:
- removes: "{{ output_dir_test }}/afile.txt"
+ removes: "{{ remote_tmp_dir_test }}/afile.txt"
- name: verify that afile.txt is absent
- file: path={{output_dir_test}}/afile.txt state=absent
+ file: path={{remote_tmp_dir_test}}/afile.txt state=absent
+
+- name: re-run previous command using removes with globbing (check mode)
+ command: "{{ remote_tmp_dir_test }}/remove_afile.sh {{ remote_tmp_dir_test }}/afile.txt"
+ args:
+ removes: "{{ remote_tmp_dir_test }}/afile.*"
+ register: check_mode_result
+ check_mode: yes
+
+- assert:
+ that:
+ - not check_mode_result.changed
+ - "'skipped' not in check_mode_result"
- name: re-run previous command using removes with globbing
- command: "{{ output_dir_test }}/remove_afile.sh {{ output_dir_test }}/afile.txt"
+ command: "{{ remote_tmp_dir_test }}/remove_afile.sh {{ remote_tmp_dir_test }}/afile.txt"
args:
- removes: "{{ output_dir_test }}/afile.*"
+ removes: "{{ remote_tmp_dir_test }}/afile.*"
register: command_result4
- name: assert that removes with globbing is working
@@ -229,14 +289,14 @@
##
- name: Execute the test.sh script
- shell: "{{ output_dir_test }}/test.sh"
+ shell: "{{ remote_tmp_dir_test }}/test.sh"
register: shell_result0
- name: Assert that the script executed correctly
assert:
that:
- shell_result0 is changed
- - shell_result0.cmd == '{{ output_dir_test }}/test.sh'
+ - shell_result0.cmd == '{{ remote_tmp_dir_test }}/test.sh'
- shell_result0.rc == 0
- shell_result0.stderr == ''
- shell_result0.stdout == 'win'
@@ -246,7 +306,7 @@
# FIXME doesn't pass the expected stdout
#- name: execute the test.sh script
-# shell: "{{output_dir_test }}/test.sh executable={{ bash.stdout }}"
+# shell: "{{remote_tmp_dir_test }}/test.sh executable={{ bash.stdout }}"
# register: shell_result1
#
#- name: assert that the shell executed correctly
@@ -261,7 +321,7 @@
- name: Execute the test.sh script with chdir
shell: ./test.sh
args:
- chdir: "{{ output_dir_test }}"
+ chdir: "{{ remote_tmp_dir_test }}"
register: shell_result2
- name: Assert that the shell executed correctly with chdir
@@ -277,25 +337,25 @@
- name: Verify that afile.txt is absent
file:
- path: "{{ output_dir_test }}/afile.txt"
+ path: "{{ remote_tmp_dir_test }}/afile.txt"
state: absent
- name: Execute the test.sh script with chdir
- shell: "{{ output_dir_test }}/test.sh > {{ output_dir_test }}/afile.txt"
+ shell: "{{ remote_tmp_dir_test }}/test.sh > {{ remote_tmp_dir_test }}/afile.txt"
args:
- chdir: "{{ output_dir_test }}"
- creates: "{{ output_dir_test }}/afile.txt"
+ chdir: "{{ remote_tmp_dir_test }}"
+ creates: "{{ remote_tmp_dir_test }}/afile.txt"
- name: Verify that afile.txt is present
file:
- path: "{{ output_dir_test }}/afile.txt"
+ path: "{{ remote_tmp_dir_test }}/afile.txt"
state: file
# multiline
- name: Remove test file previously created
file:
- path: "{{ output_dir_test }}/afile.txt"
+ path: "{{ remote_tmp_dir_test }}/afile.txt"
state: absent
- name: Execute a shell command using a literal multiline block
@@ -321,7 +381,7 @@
- name: Execute a shell command using a literal multiline block with arguments in it
shell: |
executable="{{ bash.stdout }}"
- creates={{ output_dir_test }}/afile.txt
+ creates={{ remote_tmp_dir_test }}/afile.txt
echo "test"
register: shell_result6
@@ -351,14 +411,14 @@
- shell_result7.stdout == 'One\n Two\n Three'
- name: execute a shell command with no trailing newline to stdin
- shell: cat > {{output_dir_test }}/afile.txt
+ shell: cat > {{remote_tmp_dir_test }}/afile.txt
args:
stdin: test
stdin_add_newline: no
- name: make sure content matches expected
copy:
- dest: "{{output_dir_test }}/afile.txt"
+ dest: "{{remote_tmp_dir_test }}/afile.txt"
content: test
register: shell_result7
failed_when:
@@ -366,14 +426,14 @@
shell_result7 is changed
- name: execute a shell command with trailing newline to stdin
- shell: cat > {{output_dir_test }}/afile.txt
+ shell: cat > {{remote_tmp_dir_test }}/afile.txt
args:
stdin: test
stdin_add_newline: yes
- name: make sure content matches expected
copy:
- dest: "{{output_dir_test }}/afile.txt"
+ dest: "{{remote_tmp_dir_test }}/afile.txt"
content: |
test
register: shell_result8
@@ -382,13 +442,13 @@
shell_result8 is changed
- name: execute a shell command with trailing newline to stdin, default
- shell: cat > {{output_dir_test }}/afile.txt
+ shell: cat > {{remote_tmp_dir_test }}/afile.txt
args:
stdin: test
- name: make sure content matches expected
copy:
- dest: "{{output_dir_test }}/afile.txt"
+ dest: "{{remote_tmp_dir_test }}/afile.txt"
content: |
test
register: shell_result9
@@ -398,7 +458,7 @@
- name: remove the previously created file
file:
- path: "{{ output_dir_test }}/afile.txt"
+ path: "{{ remote_tmp_dir_test }}/afile.txt"
state: absent
- name: test warning with command
@@ -450,6 +510,8 @@
assert:
that:
- "'Command would have run if not in check mode' in result.msg"
+ - result.skipped
+ - not result.changed
- name: test check mode creates/removes message
command:
@@ -462,36 +524,39 @@
assert:
that:
- "'Command would have run if not in check mode' in result.msg"
+ - "'skipped' not in result"
+ - result.changed
- name: command symlink handling
block:
- name: Create target folders
file:
- path: '{{output_dir}}/www_root/site'
+ path: '{{remote_tmp_dir}}/www_root/site'
state: directory
- name: Create symlink
file:
- path: '{{output_dir}}/www'
+ path: '{{remote_tmp_dir}}/www'
state: link
- src: '{{output_dir}}/www_root'
+ src: '{{remote_tmp_dir}}/www_root'
- name: check parent using chdir
shell: dirname "$PWD"
args:
- chdir: '{{output_dir}}/www/site'
+ chdir: '{{remote_tmp_dir}}/www/site'
register: parent_dir_chdir
- name: check parent using cd
- shell: cd "{{output_dir}}/www/site" && dirname "$PWD"
+ shell: cd "{{remote_tmp_dir}}/www/site" && dirname "$PWD"
register: parent_dir_cd
- name: check expected outputs
assert:
that:
- parent_dir_chdir.stdout != parent_dir_cd.stdout
- - 'parent_dir_cd.stdout == "{{output_dir}}/www"'
- - 'parent_dir_chdir.stdout == "{{output_dir}}/www_root"'
+ # These tests use endswith, to get around /private/tmp on macos
+ - 'parent_dir_cd.stdout.endswith(remote_tmp_dir ~ "/www")'
+ - 'parent_dir_chdir.stdout.endswith(remote_tmp_dir ~ "/www_root")'
- name: Set print error command for Python 2
set_fact:
diff --git a/test/integration/targets/connection_local/aliases b/test/integration/targets/connection_local/aliases
index b5983214..0ca054fa 100644
--- a/test/integration/targets/connection_local/aliases
+++ b/test/integration/targets/connection_local/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+needs/target/connection
diff --git a/test/integration/targets/connection_local/runme.sh b/test/integration/targets/connection_local/runme.sh
index 70aa5dbd..a2c32adf 120000..100755
--- a/test/integration/targets/connection_local/runme.sh
+++ b/test/integration/targets/connection_local/runme.sh
@@ -1 +1,14 @@
-../connection_posix/test.sh \ No newline at end of file
+#!/usr/bin/env bash
+
+set -eux
+
+group=local
+
+cd ../connection
+
+INVENTORY="../connection_${group}/test_connection.inventory" ./test.sh \
+ -e target_hosts="${group}" \
+ -e action_prefix= \
+ -e local_tmp=/tmp/ansible-local \
+ -e remote_tmp=/tmp/ansible-remote \
+ "$@"
diff --git a/test/integration/targets/connection_paramiko_ssh/aliases b/test/integration/targets/connection_paramiko_ssh/aliases
index fd5b08a4..aa7fd949 100644
--- a/test/integration/targets/connection_paramiko_ssh/aliases
+++ b/test/integration/targets/connection_paramiko_ssh/aliases
@@ -1,4 +1,5 @@
needs/ssh
shippable/posix/group3
needs/target/setup_paramiko
+needs/target/connection
destructive # potentially installs/uninstalls OS packages via setup_paramiko
diff --git a/test/integration/targets/connection_paramiko_ssh/test.sh b/test/integration/targets/connection_paramiko_ssh/test.sh
index 70aa5dbd..de1ae673 120000..100755
--- a/test/integration/targets/connection_paramiko_ssh/test.sh
+++ b/test/integration/targets/connection_paramiko_ssh/test.sh
@@ -1 +1,14 @@
-../connection_posix/test.sh \ No newline at end of file
+#!/usr/bin/env bash
+
+set -eux
+
+group=paramiko_ssh
+
+cd ../connection
+
+INVENTORY="../connection_${group}/test_connection.inventory" ./test.sh \
+ -e target_hosts="${group}" \
+ -e action_prefix= \
+ -e local_tmp=/tmp/ansible-local \
+ -e remote_tmp=/tmp/ansible-remote \
+ "$@"
diff --git a/test/integration/targets/connection_posix/aliases b/test/integration/targets/connection_posix/aliases
deleted file mode 100644
index f5e09799..00000000
--- a/test/integration/targets/connection_posix/aliases
+++ /dev/null
@@ -1,2 +0,0 @@
-needs/target/connection
-hidden
diff --git a/test/integration/targets/connection_posix/test.sh b/test/integration/targets/connection_posix/test.sh
deleted file mode 100755
index d3976ff3..00000000
--- a/test/integration/targets/connection_posix/test.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env bash
-
-set -eux
-
-# Connection tests for POSIX platforms use this script by linking to it from the appropriate 'connection_' target dir.
-# The name of the inventory group to test is extracted from the directory name following the 'connection_' prefix.
-
-group=$(python -c \
- "from os import path; print(path.basename(path.abspath(path.dirname('$0'))).replace('connection_', ''))")
-
-cd ../connection
-
-INVENTORY="../connection_${group}/test_connection.inventory" ./test.sh \
- -e target_hosts="${group}" \
- -e action_prefix= \
- -e local_tmp=/tmp/ansible-local \
- -e remote_tmp=/tmp/ansible-remote \
- "$@"
diff --git a/test/integration/targets/connection_ssh/aliases b/test/integration/targets/connection_ssh/aliases
index 50fb8eb8..baa04acb 100644
--- a/test/integration/targets/connection_ssh/aliases
+++ b/test/integration/targets/connection_ssh/aliases
@@ -1,2 +1,3 @@
needs/ssh
shippable/posix/group1
+needs/target/connection
diff --git a/test/integration/targets/connection_ssh/posix.sh b/test/integration/targets/connection_ssh/posix.sh
index 70aa5dbd..8f036fbb 120000..100755
--- a/test/integration/targets/connection_ssh/posix.sh
+++ b/test/integration/targets/connection_ssh/posix.sh
@@ -1 +1,14 @@
-../connection_posix/test.sh \ No newline at end of file
+#!/usr/bin/env bash
+
+set -eux
+
+group=ssh
+
+cd ../connection
+
+INVENTORY="../connection_${group}/test_connection.inventory" ./test.sh \
+ -e target_hosts="${group}" \
+ -e action_prefix= \
+ -e local_tmp=/tmp/ansible-local \
+ -e remote_tmp=/tmp/ansible-remote \
+ "$@"
diff --git a/test/integration/targets/connection_windows_ssh/aliases b/test/integration/targets/connection_windows_ssh/aliases
index 45a48f09..af3f193f 100644
--- a/test/integration/targets/connection_windows_ssh/aliases
+++ b/test/integration/targets/connection_windows_ssh/aliases
@@ -1,6 +1,5 @@
windows
shippable/windows/group1
shippable/windows/smoketest
-skip/windows/2008 # Windows Server 2008 does not support Win32-OpenSSH
needs/target/connection
needs/target/setup_remote_tmp_dir
diff --git a/test/integration/targets/connection_winrm/aliases b/test/integration/targets/connection_winrm/aliases
index b3e9b8bc..af3f193f 100644
--- a/test/integration/targets/connection_winrm/aliases
+++ b/test/integration/targets/connection_winrm/aliases
@@ -2,3 +2,4 @@ windows
shippable/windows/group1
shippable/windows/smoketest
needs/target/connection
+needs/target/setup_remote_tmp_dir
diff --git a/test/integration/targets/connection_winrm/runme.sh b/test/integration/targets/connection_winrm/runme.sh
index e6772415..36a7aa85 100755
--- a/test/integration/targets/connection_winrm/runme.sh
+++ b/test/integration/targets/connection_winrm/runme.sh
@@ -16,3 +16,8 @@ INVENTORY="${OUTPUT_DIR}/test_connection.inventory" ./test.sh \
-e local_tmp=/tmp/ansible-local \
-e remote_tmp=c:/windows/temp/ansible-remote \
"$@"
+
+cd ../connection_winrm
+
+ansible-playbook -i "${OUTPUT_DIR}/test_connection.inventory" tests.yml \
+ "$@"
diff --git a/test/integration/targets/connection_winrm/tests.yml b/test/integration/targets/connection_winrm/tests.yml
new file mode 100644
index 00000000..78f92a49
--- /dev/null
+++ b/test/integration/targets/connection_winrm/tests.yml
@@ -0,0 +1,28 @@
+---
+- name: test out Windows WinRM specific tests
+ hosts: windows
+ force_handlers: yes
+ serial: 1
+ gather_facts: no
+
+ tasks:
+ - name: setup remote tmp dir
+ import_role:
+ name: ../../setup_remote_tmp_dir
+
+ - name: copy across empty file
+ win_copy:
+ content: ''
+ dest: '{{ remote_tmp_dir }}\empty.txt'
+ register: winrm_copy_empty
+
+ - name: get result of copy across empty file
+ win_stat:
+ path: '{{ remote_tmp_dir }}\empty.txt'
+ register: winrm_copy_empty_actual
+
+ - name: assert copy across empty file
+ assert:
+ that:
+ - winrm_copy_empty is changed
+ - winrm_copy_empty_actual.stat.size == 0
diff --git a/test/integration/targets/copy/aliases b/test/integration/targets/copy/aliases
index db9bbd8c..961b2051 100644
--- a/test/integration/targets/copy/aliases
+++ b/test/integration/targets/copy/aliases
@@ -1,4 +1,3 @@
needs/root
shippable/posix/group2
destructive
-skip/aix
diff --git a/test/integration/targets/incidental_win_copy/files-different/vault/folder/nested-vault-file b/test/integration/targets/copy/files-different/vault/folder/nested-vault-file
index d8d15498..d8d15498 100644
--- a/test/integration/targets/incidental_win_copy/files-different/vault/folder/nested-vault-file
+++ b/test/integration/targets/copy/files-different/vault/folder/nested-vault-file
diff --git a/test/integration/targets/incidental_win_copy/files-different/vault/readme.txt b/test/integration/targets/copy/files-different/vault/readme.txt
index dae883b5..0a30d8e0 100644
--- a/test/integration/targets/incidental_win_copy/files-different/vault/readme.txt
+++ b/test/integration/targets/copy/files-different/vault/readme.txt
@@ -1,5 +1,5 @@
This directory contains some files that have been encrypted with ansible-vault.
-This is to test out the decrypt parameter in win_copy.
+This is to test out the decrypt parameter in copy.
The password is: password
diff --git a/test/integration/targets/incidental_win_copy/files-different/vault/vault-file b/test/integration/targets/copy/files-different/vault/vault-file
index 2fff7619..2fff7619 100644
--- a/test/integration/targets/incidental_win_copy/files-different/vault/vault-file
+++ b/test/integration/targets/copy/files-different/vault/vault-file
diff --git a/test/integration/targets/copy/files/subdir/subdir1/bar.txt b/test/integration/targets/copy/files/subdir/subdir1/bar.txt
deleted file mode 120000
index 315e865d..00000000
--- a/test/integration/targets/copy/files/subdir/subdir1/bar.txt
+++ /dev/null
@@ -1 +0,0 @@
-../bar.txt \ No newline at end of file
diff --git a/test/integration/targets/incidental_win_copy/files/empty.txt b/test/integration/targets/copy/files/subdir/subdir1/empty.txt
index e69de29b..e69de29b 100644
--- a/test/integration/targets/incidental_win_copy/files/empty.txt
+++ b/test/integration/targets/copy/files/subdir/subdir1/empty.txt
diff --git a/test/integration/targets/copy/meta/main.yml b/test/integration/targets/copy/meta/main.yml
index 06d4fd29..e655a4f1 100644
--- a/test/integration/targets/copy/meta/main.yml
+++ b/test/integration/targets/copy/meta/main.yml
@@ -1,3 +1,4 @@
dependencies:
- prepare_tests
- setup_nobody
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/copy/tasks/main.yml b/test/integration/targets/copy/tasks/main.yml
index bef182b8..a5211de1 100644
--- a/test/integration/targets/copy/tasks/main.yml
+++ b/test/integration/targets/copy/tasks/main.yml
@@ -7,7 +7,7 @@
- set_fact:
local_temp_dir: '{{ tempfile_result.stdout }}'
- remote_dir: '{{ output_dir }}'
+ remote_dir: '{{ remote_tmp_dir }}/copy'
symlinks:
ansible-test-abs-link: /tmp/ansible-test-abs-link
ansible-test-abs-link-dir: /tmp/ansible-test-abs-link-dir
@@ -16,6 +16,7 @@
invalid2: ../invalid
out_of_tree_circle: /tmp/ansible-test-link-dir/out_of_tree_circle
subdir3: ../subdir2/subdir3
+ bar.txt: ../bar.txt
- file: path={{local_temp_dir}} state=directory
name: ensure temp dir exists
diff --git a/test/integration/targets/copy/tasks/tests.yml b/test/integration/targets/copy/tasks/tests.yml
index fa4254c7..2c419690 100644
--- a/test/integration/targets/copy/tasks/tests.yml
+++ b/test/integration/targets/copy/tasks/tests.yml
@@ -2259,3 +2259,29 @@
- "dest_dir_readwrite_stat.stat.mode == '0644'"
- "dest_dir_executable_stat.stat.mode == '0755'"
- "dest_dir_readonly_stat.stat.mode == '0444'"
+
+- name: fail to copy an encrypted file without the password set
+ copy:
+ src: '{{role_path}}/files-different/vault/vault-file'
+ dest: '{{remote_tmp_dir}}/copy/file'
+ register: fail_copy_encrypted_file
+ ignore_errors: yes # weird failed_when doesn't work in this case
+
+- name: assert failure message when copying an encrypted file without the password set
+ assert:
+ that:
+ - fail_copy_encrypted_file is failed
+ - fail_copy_encrypted_file.msg == 'A vault password or secret must be specified to decrypt {{role_path}}/files-different/vault/vault-file'
+
+- name: fail to copy a directory with an encrypted file without the password
+ copy:
+ src: '{{role_path}}/files-different/vault'
+ dest: '{{remote_tmp_dir}}/copy'
+ register: fail_copy_directory_with_enc_file
+ ignore_errors: yes
+
+- name: assert failure message when copying a directory that contains an encrypted file without the password set
+ assert:
+ that:
+ - fail_copy_directory_with_enc_file is failed
+ - fail_copy_directory_with_enc_file.msg == 'A vault password or secret must be specified to decrypt {{role_path}}/files-different/vault/vault-file'
diff --git a/test/integration/targets/cron/aliases b/test/integration/targets/cron/aliases
index b2033afd..e1a9ab48 100644
--- a/test/integration/targets/cron/aliases
+++ b/test/integration/targets/cron/aliases
@@ -1,5 +1,4 @@
destructive
shippable/posix/group4
-skip/aix
skip/osx
skip/macos
diff --git a/test/integration/targets/debconf/aliases b/test/integration/targets/debconf/aliases
index f8e28c7e..a6dafcf8 100644
--- a/test/integration/targets/debconf/aliases
+++ b/test/integration/targets/debconf/aliases
@@ -1,2 +1 @@
shippable/posix/group1
-skip/aix
diff --git a/test/integration/targets/debugger/aliases b/test/integration/targets/debugger/aliases
new file mode 100644
index 00000000..7bf2c121
--- /dev/null
+++ b/test/integration/targets/debugger/aliases
@@ -0,0 +1,3 @@
+shippable/posix/group1
+context/controller
+setup/always/setup_pexpect
diff --git a/test/integration/targets/debugger/inventory b/test/integration/targets/debugger/inventory
new file mode 100644
index 00000000..81502d55
--- /dev/null
+++ b/test/integration/targets/debugger/inventory
@@ -0,0 +1,2 @@
+testhost ansible_connection=local
+testhost2 ansible_connection=local
diff --git a/test/integration/targets/debugger/runme.sh b/test/integration/targets/debugger/runme.sh
new file mode 100755
index 00000000..6a51d23d
--- /dev/null
+++ b/test/integration/targets/debugger/runme.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+set -eux
+
+./test_run_once.py -i inventory "$@"
diff --git a/test/integration/targets/debugger/test_run_once.py b/test/integration/targets/debugger/test_run_once.py
new file mode 100755
index 00000000..237f9c2d
--- /dev/null
+++ b/test/integration/targets/debugger/test_run_once.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+import io
+import os
+import sys
+
+import pexpect
+
+
+env_vars = {
+ 'ANSIBLE_NOCOLOR': 'True',
+ 'ANSIBLE_RETRY_FILES_ENABLED': 'False',
+}
+
+env = os.environ.copy()
+env.update(env_vars)
+
+with io.BytesIO() as logfile:
+ debugger_test_test = pexpect.spawn(
+ 'ansible-playbook',
+ args=['test_run_once_playbook.yml'] + sys.argv[1:],
+ timeout=10,
+ env=env
+ )
+
+ debugger_test_test.logfile = logfile
+
+ debugger_test_test.expect_exact('TASK: Task 1 (debug)> ')
+ debugger_test_test.send('task.args["that"] = "true"\r')
+ debugger_test_test.expect_exact('TASK: Task 1 (debug)> ')
+ debugger_test_test.send('r\r')
+ debugger_test_test.expect(pexpect.EOF)
+ debugger_test_test.close()
+
+ assert str(logfile.getvalue()).count('Task 2 executed') == 2
diff --git a/test/integration/targets/debugger/test_run_once_playbook.yml b/test/integration/targets/debugger/test_run_once_playbook.yml
new file mode 100644
index 00000000..ede3a537
--- /dev/null
+++ b/test/integration/targets/debugger/test_run_once_playbook.yml
@@ -0,0 +1,12 @@
+- hosts: testhost, testhost2
+ gather_facts: false
+ debugger: on_failed
+ tasks:
+ - name: Task 1
+ assert:
+ that: 'false'
+ run_once: yes
+
+ - name: Task 2
+ debug:
+ msg: "Task 2 executed"
diff --git a/test/integration/targets/delegate_to/delegate_facts_loop.yml b/test/integration/targets/delegate_to/delegate_facts_loop.yml
index 28a1488d..b05c4064 100644
--- a/test/integration/targets/delegate_to/delegate_facts_loop.yml
+++ b/test/integration/targets/delegate_to/delegate_facts_loop.yml
@@ -17,7 +17,7 @@
that:
- "'test' in hostvars[item]"
- hostvars[item]['test'] == 123
- loop: "{{ groups['all'] | difference(['localhost']) }}"
+ loop: "{{ groups['all'] | difference(['localhost'])}}"
- name: test that we don't polute whole group with one value
diff --git a/test/integration/targets/dnf/aliases b/test/integration/targets/dnf/aliases
index 4d1afd64..e555959e 100644
--- a/test/integration/targets/dnf/aliases
+++ b/test/integration/targets/dnf/aliases
@@ -1,6 +1,5 @@
destructive
shippable/posix/group4
-skip/aix
skip/power/centos
skip/freebsd
skip/osx
diff --git a/test/integration/targets/dnf/tasks/dnf.yml b/test/integration/targets/dnf/tasks/dnf.yml
index bf1ea848..22e2fb15 100644
--- a/test/integration/targets/dnf/tasks/dnf.yml
+++ b/test/integration/targets/dnf/tasks/dnf.yml
@@ -488,7 +488,7 @@
- name: try to install from non existing url
dnf:
- name: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/dnf/non-existing-1.0.0.fc26.noarch.rpm
+ name: https://ci-files.testing.ansible.com/test/integration/targets/dnf/non-existing-1.0.0.fc26.noarch.rpm
state: present
register: dnf_result
ignore_errors: yes
@@ -583,7 +583,7 @@
- name: try to install not compatible arch rpm, should fail
dnf:
- name: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/dnf/banner-1.3.4-3.el7.ppc64le.rpm
+ name: https://ci-files.testing.ansible.com/test/integration/targets/dnf/banner-1.3.4-3.el7.ppc64le.rpm
state: present
register: dnf_result
ignore_errors: True
diff --git a/test/integration/targets/dnf/tasks/filters.yml b/test/integration/targets/dnf/tasks/filters.yml
index 2bff25bc..1ce9b668 100644
--- a/test/integration/targets/dnf/tasks/filters.yml
+++ b/test/integration/targets/dnf/tasks/filters.yml
@@ -2,7 +2,7 @@
# from its repomd.xml.
- block:
- set_fact:
- updateinfo_repo: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/setup_rpm_repo/repo-with-updateinfo
+ updateinfo_repo: https://ci-files.testing.ansible.com/test/integration/targets/setup_rpm_repo/repo-with-updateinfo
- name: Install the test repo
yum_repository:
diff --git a/test/integration/targets/dnf/tasks/filters_check_mode.yml b/test/integration/targets/dnf/tasks/filters_check_mode.yml
index 024ac066..c931c072 100644
--- a/test/integration/targets/dnf/tasks/filters_check_mode.yml
+++ b/test/integration/targets/dnf/tasks/filters_check_mode.yml
@@ -2,7 +2,7 @@
# from its repomd.xml.
- block:
- set_fact:
- updateinfo_repo: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/setup_rpm_repo/repo-with-updateinfo
+ updateinfo_repo: https://ci-files.testing.ansible.com/test/integration/targets/setup_rpm_repo/repo-with-updateinfo
- name: Install the test repo
yum_repository:
diff --git a/test/integration/targets/dnf/tasks/gpg.yml b/test/integration/targets/dnf/tasks/gpg.yml
index 184270a0..72bdee02 100644
--- a/test/integration/targets/dnf/tasks/gpg.yml
+++ b/test/integration/targets/dnf/tasks/gpg.yml
@@ -10,10 +10,11 @@
- '{{ pkg_name }}'
state: absent
- - name: Install rpm-sign
+ - name: Install rpm-sign and attr
dnf:
name:
- rpm-sign
+ - attr
state: present
- name: Create directory to use as local repo
@@ -31,6 +32,14 @@
- name: Unsign the RPM
shell: rpmsign --delsign {{ remote_tmp_dir }}/unsigned/{{ pkg_name }}*
+ # In RHEL 8.5 dnf uses libdnf to do checksum verification, which caches the checksum on an xattr of the file
+ # itself, so we need to clear that cache
+ - name: Clear libdnf checksum cache
+ shell: setfattr -x user.Librepo.checksum.sha256 {{ remote_tmp_dir }}/unsigned/{{ pkg_name }}*
+ when: ansible_distribution in ['RedHat', 'CentOS'] and
+ ansible_distribution_version is version('8.5', '>=') and
+ ansible_distribution_version is version('9', '<')
+
- name: createrepo
command: createrepo .
args:
@@ -57,12 +66,14 @@
that:
- res is failed
- "'Failed to validate GPG signature' in res.msg"
+ - "'is not signed' in res.msg"
always:
- - name: Remove rpm-sign (and test package if it got installed)
+ - name: Remove rpm-sign and attr (and test package if it got installed)
dnf:
name:
- rpm-sign
+ - attr
- "{{ pkg_name }}"
state: absent
diff --git a/test/integration/targets/dnf/tasks/main.yml b/test/integration/targets/dnf/tasks/main.yml
index d66a0653..591dc33a 100644
--- a/test/integration/targets/dnf/tasks/main.yml
+++ b/test/integration/targets/dnf/tasks/main.yml
@@ -56,6 +56,8 @@
- include_tasks: modularity.yml
when: (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('29', '>=')) or
(ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '>='))
+ tags:
+ - dnf_modularity
- include_tasks: logging.yml
when: (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('31', '>=')) or
@@ -66,10 +68,12 @@
#
# This fails due to conflicts on Fedora 34, but we can nuke this entirely once
# #74224 lands, because it covers nobest cases.
+# Skipped in RHEL9 by changing the version test to == instead of >=
+# due to missing RHEL9 docker-ce packages currently
- include_tasks: nobest.yml
when: (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('24', '>=') and
ansible_distribution_major_version is version('34', '!=')) or
- (ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '>='))
+ (ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '=='))
- include_tasks: cacheonly.yml
diff --git a/test/integration/targets/dnf/vars/Fedora-35.yml b/test/integration/targets/dnf/vars/Fedora-35.yml
new file mode 100644
index 00000000..f99228fb
--- /dev/null
+++ b/test/integration/targets/dnf/vars/Fedora-35.yml
@@ -0,0 +1,2 @@
+astream_name: '@varnish:6.0/default'
+astream_name_no_stream: '@varnish/default'
diff --git a/test/integration/targets/dnf/vars/RedHat-9.yml b/test/integration/targets/dnf/vars/RedHat-9.yml
new file mode 100644
index 00000000..e700a9b9
--- /dev/null
+++ b/test/integration/targets/dnf/vars/RedHat-9.yml
@@ -0,0 +1,2 @@
+astream_name: '@container-tools:latest/common'
+astream_name_no_stream: '@container-tools/common'
diff --git a/test/integration/targets/dpkg_selections/aliases b/test/integration/targets/dpkg_selections/aliases
index 55da8c88..c0d5684b 100644
--- a/test/integration/targets/dpkg_selections/aliases
+++ b/test/integration/targets/dpkg_selections/aliases
@@ -1,6 +1,5 @@
shippable/posix/group1
destructive
-skip/aix
skip/freebsd
skip/osx
skip/macos
diff --git a/test/integration/targets/dpkg_selections/tasks/dpkg_selections.yaml b/test/integration/targets/dpkg_selections/tasks/dpkg_selections.yaml
index cd50f436..080db262 100644
--- a/test/integration/targets/dpkg_selections/tasks/dpkg_selections.yaml
+++ b/test/integration/targets/dpkg_selections/tasks/dpkg_selections.yaml
@@ -1,5 +1,5 @@
- name: download and install old version of hello
- apt: "deb=https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/dpkg_selections/hello_{{ hello_old_version }}_amd64.deb"
+ apt: "deb=https://ci-files.testing.ansible.com/test/integration/targets/dpkg_selections/hello_{{ hello_old_version }}_amd64.deb"
- name: freeze version for hello
dpkg_selections:
diff --git a/test/integration/targets/entry_points/aliases b/test/integration/targets/entry_points/aliases
new file mode 100644
index 00000000..45c1d4ee
--- /dev/null
+++ b/test/integration/targets/entry_points/aliases
@@ -0,0 +1,2 @@
+context/controller
+shippable/posix/group5
diff --git a/test/integration/targets/entry_points/runme.sh b/test/integration/targets/entry_points/runme.sh
new file mode 100755
index 00000000..bb213694
--- /dev/null
+++ b/test/integration/targets/entry_points/runme.sh
@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+
+set -eu
+source virtualenv.sh
+set +x
+unset PYTHONPATH
+export SETUPTOOLS_USE_DISTUTILS=stdlib
+
+base_dir="$(dirname "$(dirname "$(dirname "$(dirname "${OUTPUT_DIR}")")")")"
+bin_dir="$(dirname "$(command -v pip)")"
+
+# deps are already installed, using --no-deps to avoid re-installing them
+pip install "${base_dir}" --disable-pip-version-check --no-deps
+# --use-feature=in-tree-build not available on all platforms
+
+for bin in "${bin_dir}/ansible"*; do
+ name="$(basename "${bin}")"
+
+ entry_point="${name//ansible-/}"
+ entry_point="${entry_point//ansible/adhoc}"
+
+ echo "=== ${name} (${entry_point})=${bin} ==="
+
+ if [ "${name}" == "ansible-test" ]; then
+ "${bin}" --help | tee /dev/stderr | grep -Eo "^usage:\ ansible-test\ .*"
+ python -m ansible "${entry_point}" --help | tee /dev/stderr | grep -Eo "^usage:\ ansible-test\ .*"
+ else
+ "${bin}" --version | tee /dev/stderr | grep -Eo "(^${name}\ \[core\ .*|executable location = ${bin}$)"
+ python -m ansible "${entry_point}" --version | tee /dev/stderr | grep -Eo "(^${name}\ \[core\ .*|executable location = ${bin}$)"
+ fi
+done
diff --git a/test/integration/targets/incidental_mongodb_parameter/meta/main.yml b/test/integration/targets/expect/meta/main.yml
index 10fc3936..1810d4be 100644
--- a/test/integration/targets/incidental_mongodb_parameter/meta/main.yml
+++ b/test/integration/targets/expect/meta/main.yml
@@ -1,3 +1,2 @@
dependencies:
- - incidental_setup_mongodb
- setup_remote_tmp_dir
diff --git a/test/integration/targets/expect/tasks/main.yml b/test/integration/targets/expect/tasks/main.yml
index 168663c9..7316b3e7 100644
--- a/test/integration/targets/expect/tasks/main.yml
+++ b/test/integration/targets/expect/tasks/main.yml
@@ -20,13 +20,13 @@
name: setup_pexpect
- name: record the test_command file
- set_fact: test_command_file={{output_dir | expanduser}}/test_command.py
+ set_fact: test_command_file={{remote_tmp_dir | expanduser}}/test_command.py
- name: copy script into output directory
copy: src=test_command.py dest={{test_command_file}} mode=0444
- name: record the output file
- set_fact: output_file={{output_dir}}/foo.txt
+ set_fact: output_file={{remote_tmp_dir}}/foo.txt
- copy:
content: "foo"
@@ -104,7 +104,7 @@
- name: test chdir
expect:
command: "/bin/sh -c 'pwd && sleep 1'"
- chdir: "{{output_dir}}"
+ chdir: "{{remote_tmp_dir}}"
responses:
foo: bar
register: chdir_result
@@ -112,7 +112,7 @@
- name: assert chdir works
assert:
that:
- - "'{{chdir_result.stdout |expanduser | realpath }}' == '{{output_dir | expanduser | realpath}}'"
+ - "'{{chdir_result.stdout |expanduser | realpath }}' == '{{remote_tmp_dir | expanduser | realpath}}'"
- name: test timeout option
expect:
diff --git a/test/integration/targets/facts_d/aliases b/test/integration/targets/facts_d/aliases
index 90ea9e12..6452e6d4 100644
--- a/test/integration/targets/facts_d/aliases
+++ b/test/integration/targets/facts_d/aliases
@@ -1,2 +1,2 @@
shippable/posix/group2
-context/controller
+context/target
diff --git a/test/integration/targets/facts_d/meta/main.yml b/test/integration/targets/facts_d/meta/main.yml
index 07faa217..cb6005d0 100644
--- a/test/integration/targets/facts_d/meta/main.yml
+++ b/test/integration/targets/facts_d/meta/main.yml
@@ -1,2 +1,3 @@
dependencies:
- prepare_tests
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/facts_d/tasks/main.yml b/test/integration/targets/facts_d/tasks/main.yml
index aadef4c6..7f7b5f96 100644
--- a/test/integration/targets/facts_d/tasks/main.yml
+++ b/test/integration/targets/facts_d/tasks/main.yml
@@ -4,7 +4,7 @@
- name: prep for local facts tests
block:
- name: set factdir var
- set_fact: fact_dir={{output_dir}}/facts.d
+ set_fact: fact_dir={{remote_tmp_dir}}/facts.d
- name: create fact dir
file: path={{ fact_dir }} state=directory
diff --git a/test/integration/targets/file/aliases b/test/integration/targets/file/aliases
index 4a2ce27c..6bd893d4 100644
--- a/test/integration/targets/file/aliases
+++ b/test/integration/targets/file/aliases
@@ -1,3 +1,2 @@
shippable/posix/group2
needs/root
-skip/aix
diff --git a/test/integration/targets/file/meta/main.yml b/test/integration/targets/file/meta/main.yml
index 06d4fd29..e655a4f1 100644
--- a/test/integration/targets/file/meta/main.yml
+++ b/test/integration/targets/file/meta/main.yml
@@ -1,3 +1,4 @@
dependencies:
- prepare_tests
- setup_nobody
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/file/tasks/directory_as_dest.yml b/test/integration/targets/file/tasks/directory_as_dest.yml
index 85451e43..161a12a4 100644
--- a/test/integration/targets/file/tasks/directory_as_dest.yml
+++ b/test/integration/targets/file/tasks/directory_as_dest.yml
@@ -15,12 +15,12 @@
- name: create a test sub-directory
file:
- dest: '{{output_dir}}/sub1'
+ dest: '{{remote_tmp_dir_test}}/sub1'
state: directory
- name: create a file for linking to
copy:
- dest: '{{output_dir}}/file_to_link'
+ dest: '{{remote_tmp_dir_test}}/file_to_link'
content: 'Hello World'
#
@@ -30,7 +30,7 @@
# file raises an error
- name: Try to create a file with directory as dest
file:
- dest: '{{output_dir}}/sub1'
+ dest: '{{remote_tmp_dir_test}}/sub1'
state: file
force: False
ignore_errors: True
@@ -38,7 +38,7 @@
- name: Get stat info to show the directory has not been changed to a file
stat:
- path: '{{ output_dir }}/sub1'
+ path: '{{ remote_tmp_dir_test }}/sub1'
follow: False
register: file1_dir_stat
@@ -51,8 +51,8 @@
# link raises an error
- name: Try to create a symlink with directory as dest
file:
- src: '{{ output_dir }}/file_to_link'
- dest: '{{output_dir}}/sub1'
+ src: '{{ remote_tmp_dir_test }}/file_to_link'
+ dest: '{{remote_tmp_dir_test}}/sub1'
state: link
force: False
ignore_errors: True
@@ -60,7 +60,7 @@
- name: Get stat info to show the directory has not been changed to a file
stat:
- path: '{{ output_dir }}/sub1'
+ path: '{{ remote_tmp_dir_test }}/sub1'
follow: False
register: file2_dir_stat
@@ -76,12 +76,12 @@
- copy:
content: 'test'
- dest: '{{ output_dir }}/sub1/passwd'
+ dest: '{{ remote_tmp_dir_test }}/sub1/passwd'
# file raises an error
- name: Try to create a file with directory as dest
file:
- dest: '{{output_dir}}/sub1'
+ dest: '{{remote_tmp_dir_test}}/sub1'
state: file
force: True
ignore_errors: True
@@ -89,7 +89,7 @@
- name: Get stat info to show the directory has not been changed to a file
stat:
- path: '{{ output_dir }}/sub1'
+ path: '{{ remote_tmp_dir_test }}/sub1'
follow: False
register: file3_dir_stat
@@ -102,8 +102,8 @@
# link raises an error
- name: Try to create a symlink with directory as dest
file:
- src: '{{ output_dir }}/file_to_link'
- dest: '{{output_dir}}/sub1'
+ src: '{{ remote_tmp_dir_test }}/file_to_link'
+ dest: '{{remote_tmp_dir_test}}/sub1'
state: link
force: True
ignore_errors: True
@@ -111,7 +111,7 @@
- name: Get stat info to show the directory has not been changed to a file
stat:
- path: '{{ output_dir }}/sub1'
+ path: '{{ remote_tmp_dir_test }}/sub1'
follow: False
register: file4_dir_stat
@@ -125,7 +125,7 @@
- name: Cleanup the file that made the directory nonempty
file:
state: 'absent'
- dest: '{{ output_dir }}/sub1/passwd'
+ dest: '{{ remote_tmp_dir_test }}/sub1/passwd'
#
# Error condition: file cannot even overwrite an empty directory with force=True
@@ -134,7 +134,7 @@
# file raises an error
- name: Try to create a file with directory as dest
file:
- dest: '{{output_dir}}/sub1'
+ dest: '{{remote_tmp_dir_test}}/sub1'
state: file
force: True
ignore_errors: True
@@ -142,7 +142,7 @@
- name: Get stat info to show the directory has not been changed to a file
stat:
- path: '{{ output_dir }}/sub1'
+ path: '{{ remote_tmp_dir_test }}/sub1'
follow: False
register: file5_dir_stat
@@ -159,15 +159,15 @@
# link can overwrite an empty directory with force=True
- name: Try to create a symlink with directory as dest
file:
- src: '{{ output_dir }}/file_to_link'
- dest: '{{output_dir}}/sub1'
+ src: '{{ remote_tmp_dir_test }}/file_to_link'
+ dest: '{{remote_tmp_dir_test}}/sub1'
state: link
force: True
register: file6_result
- name: Get stat info to show the directory has been overwritten
stat:
- path: '{{ output_dir }}/sub1'
+ path: '{{ remote_tmp_dir_test }}/sub1'
follow: False
register: file6_dir_stat
@@ -184,12 +184,12 @@
- name: Cleanup the test subdirectory
file:
- dest: '{{output_dir}}/sub1'
+ dest: '{{remote_tmp_dir_test}}/sub1'
state: 'absent'
- name: Re-create the test sub-directory
file:
- dest: '{{output_dir}}/sub1'
+ dest: '{{remote_tmp_dir_test}}/sub1'
state: 'directory'
#
@@ -198,8 +198,8 @@
- name: Try to create a hardlink with directory as dest
file:
- src: '{{ output_dir }}/file_to_link'
- dest: '{{ output_dir }}/sub1'
+ src: '{{ remote_tmp_dir_test }}/file_to_link'
+ dest: '{{ remote_tmp_dir_test }}/sub1'
state: hard
force: False
ignore_errors: True
@@ -207,13 +207,13 @@
- name: Get stat info to show the directory has not been changed to a file
stat:
- path: '{{ output_dir }}/sub1'
+ path: '{{ remote_tmp_dir_test }}/sub1'
follow: False
register: file7_dir_stat
- name: Get stat info to show the link has been created
stat:
- path: '{{ output_dir }}/sub1/file_to_link'
+ path: '{{ remote_tmp_dir_test }}/sub1/file_to_link'
follow: False
register: file7_link_stat
@@ -238,7 +238,7 @@
- name: Get initial stat info to compare with later
stat:
- path: '{{ output_dir }}/sub1'
+ path: '{{ remote_tmp_dir_test }}/sub1'
follow: False
register: file8_initial_dir_stat
@@ -248,14 +248,14 @@
- name: Use touch with directory as dest
file:
- dest: '{{output_dir}}/sub1'
+ dest: '{{remote_tmp_dir_test}}/sub1'
state: touch
force: False
register: file8_result
- name: Get stat info to show the directory has not been changed to a file
stat:
- path: '{{ output_dir }}/sub1'
+ path: '{{ remote_tmp_dir_test }}/sub1'
follow: False
register: file8_dir_stat
@@ -268,13 +268,13 @@
- name: Get initial stat info to compare with later
stat:
- path: '{{ output_dir }}/sub1'
+ path: '{{ remote_tmp_dir_test }}/sub1'
follow: False
register: file11_initial_dir_stat
- name: Use touch with directory as dest and keep mtime and atime
file:
- dest: '{{output_dir}}/sub1'
+ dest: '{{remote_tmp_dir_test}}/sub1'
state: touch
force: False
modification_time: preserve
@@ -283,7 +283,7 @@
- name: Get stat info to show the directory has not been changed
stat:
- path: '{{ output_dir }}/sub1'
+ path: '{{ remote_tmp_dir_test }}/sub1'
follow: False
register: file11_dir_stat
@@ -300,20 +300,20 @@
#
- name: Get initial stat info to compare with later
stat:
- path: '{{ output_dir }}/sub1'
+ path: '{{ remote_tmp_dir_test }}/sub1'
follow: False
register: file9_initial_dir_stat
- name: Use directory with directory as dest
file:
- dest: '{{output_dir}}/sub1'
+ dest: '{{remote_tmp_dir_test}}/sub1'
state: directory
force: False
register: file9_result
- name: Get stat info to show the directory has not been changed
stat:
- path: '{{ output_dir }}/sub1'
+ path: '{{ remote_tmp_dir_test }}/sub1'
follow: False
register: file9_dir_stat
@@ -326,14 +326,14 @@
- name: Use directory with directory as dest and force=True
file:
- dest: '{{output_dir}}/sub1'
+ dest: '{{remote_tmp_dir_test}}/sub1'
state: directory
force: True
register: file10_result
- name: Get stat info to show the directory has not been changed
stat:
- path: '{{ output_dir }}/sub1'
+ path: '{{ remote_tmp_dir_test }}/sub1'
follow: False
register: file10_dir_stat
diff --git a/test/integration/targets/file/tasks/initialize.yml b/test/integration/targets/file/tasks/initialize.yml
index dd7d1274..ad9f6649 100644
--- a/test/integration/targets/file/tasks/initialize.yml
+++ b/test/integration/targets/file/tasks/initialize.yml
@@ -3,12 +3,12 @@
#
- name: Cleanup the output directory
file:
- dest: '{{ output_dir }}'
+ dest: '{{ remote_tmp_dir_test }}'
state: 'absent'
- name: Recreate the toplevel output dir
file:
- dest: '{{ output_dir }}'
+ dest: '{{ remote_tmp_dir_test }}'
state: 'directory'
- name: prep with a basic file to operate on
diff --git a/test/integration/targets/file/tasks/main.yml b/test/integration/targets/file/tasks/main.yml
index c96beba3..a74cbc28 100644
--- a/test/integration/targets/file/tasks/main.yml
+++ b/test/integration/targets/file/tasks/main.yml
@@ -16,7 +16,11 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-- set_fact: output_file={{output_dir}}/foo.txt
+- set_fact:
+ remote_tmp_dir_test: '{{ remote_tmp_dir }}/file'
+
+- set_fact:
+ output_file: '{{remote_tmp_dir_test}}/foo.txt'
# same as expanduser & expandvars called on managed host
- command: 'echo {{ output_file }}'
@@ -48,6 +52,8 @@
- name: Test _diff_peek
import_tasks: diff_peek.yml
+- name: Test modification time
+ import_tasks: modification_time.yml
# These tests need to be organized by state parameter into separate files later
@@ -81,7 +87,7 @@
- "'cannot continue' in ghost_file_result.msg"
- name: verify that we are checking an absent file
- file: path={{output_dir}}/bar.txt state=absent
+ file: path={{remote_tmp_dir_test}}/bar.txt state=absent
register: file2_result
- name: verify that the file was marked as changed
@@ -92,7 +98,7 @@
- name: verify we can touch a file
file:
- path: "{{output_dir}}/baz.txt"
+ path: "{{remote_tmp_dir_test}}/baz.txt"
state: touch
mode: '0644'
register: file3_result
@@ -105,7 +111,7 @@
- "file3_result.mode == '0644'"
- name: change file mode
- file: path={{output_dir}}/baz.txt mode=0600
+ file: path={{remote_tmp_dir_test}}/baz.txt mode=0600
register: file4_result
- name: verify that the file was marked as changed
@@ -116,7 +122,7 @@
- name: define file to verify chattr/lsattr with
set_fact:
- attributes_file: "{{ output_dir }}/attributes.txt"
+ attributes_file: "{{ remote_tmp_dir_test }}/attributes.txt"
attributes_supported: no
- name: create file to verify chattr/lsattr with
@@ -150,13 +156,13 @@
- "'A' not in attribute_A_unset.stdout_lines[0].split()[0]"
- name: explicitly set file attribute "A"
- file: path={{output_dir}}/baz.txt attributes=A
+ file: path={{remote_tmp_dir_test}}/baz.txt attributes=A
register: file_attributes_result
ignore_errors: True
when: attributes_supported
- name: add file attribute "A"
- file: path={{output_dir}}/baz.txt attributes=+A
+ file: path={{remote_tmp_dir_test}}/baz.txt attributes=+A
register: file_attributes_result_2
when: file_attributes_result is changed
@@ -167,12 +173,12 @@
when: file_attributes_result is changed
- name: remove file attribute "A"
- file: path={{output_dir}}/baz.txt attributes=-A
+ file: path={{remote_tmp_dir_test}}/baz.txt attributes=-A
register: file_attributes_result_3
ignore_errors: True
- name: explicitly remove file attributes
- file: path={{output_dir}}/baz.txt attributes=""
+ file: path={{remote_tmp_dir_test}}/baz.txt attributes=""
register: file_attributes_result_4
when: file_attributes_result_3 is changed
@@ -195,18 +201,18 @@
notify: remove groups
- name: change ownership and group
- file: path={{output_dir}}/baz.txt owner=1234 group=1234
+ file: path={{remote_tmp_dir_test}}/baz.txt owner=1234 group=1234
- name: Get stat info to check atime later
- stat: path={{output_dir}}/baz.txt
+ stat: path={{remote_tmp_dir_test}}/baz.txt
register: file_attributes_result_5_before
- name: updates access time
- file: path={{output_dir}}/baz.txt access_time=now
+ file: path={{remote_tmp_dir_test}}/baz.txt access_time=now
register: file_attributes_result_5
- name: Get stat info to check atime later
- stat: path={{output_dir}}/baz.txt
+ stat: path={{remote_tmp_dir_test}}/baz.txt
register: file_attributes_result_5_after
- name: verify that the file was marked as changed and atime changed
@@ -239,7 +245,7 @@
file: path=/tmp/worldwritable state=absent
- name: create hard link to file
- file: src={{output_file}} dest={{output_dir}}/hard.txt state=hard
+ file: src={{output_file}} dest={{remote_tmp_dir_test}}/hard.txt state=hard
register: file6_result
- name: verify that the file was marked as changed
@@ -249,7 +255,7 @@
- name: touch a hard link
file:
- dest: '{{ output_dir }}/hard.txt'
+ dest: '{{ remote_tmp_dir_test }}/hard.txt'
state: 'touch'
register: file6_touch_result
@@ -263,7 +269,7 @@
register: hlstat1
- name: stat2
- stat: path={{output_dir}}/hard.txt
+ stat: path={{remote_tmp_dir_test}}/hard.txt
register: hlstat2
- name: verify that hard link is still the same after timestamp updated
@@ -272,7 +278,7 @@
- "hlstat1.stat.inode == hlstat2.stat.inode"
- name: create hard link to file 2
- file: src={{output_file}} dest={{output_dir}}/hard.txt state=hard
+ file: src={{output_file}} dest={{remote_tmp_dir_test}}/hard.txt state=hard
register: hlink_result
- name: verify that hard link creation is idempotent
@@ -281,7 +287,7 @@
- "hlink_result.changed == False"
- name: Change mode on a hard link
- file: src={{output_file}} dest={{output_dir}}/hard.txt mode=0701
+ file: src={{output_file}} dest={{remote_tmp_dir_test}}/hard.txt mode=0701
register: file6_mode_change
- name: verify that the hard link was touched
@@ -294,7 +300,7 @@
register: hlstat1
- name: stat2
- stat: path={{output_dir}}/hard.txt
+ stat: path={{remote_tmp_dir_test}}/hard.txt
register: hlstat2
- name: verify that hard link is still the same after timestamp updated
@@ -304,7 +310,7 @@
- "hlstat1.stat.mode == '0701'"
- name: create a directory
- file: path={{output_dir}}/foobar state=directory
+ file: path={{remote_tmp_dir_test}}/foobar state=directory
register: file7_result
- name: verify that the file was marked as changed
@@ -324,19 +330,19 @@
ignore_errors: true
- name: remove directory foobar
- file: path={{output_dir}}/foobar state=absent
+ file: path={{remote_tmp_dir_test}}/foobar state=absent
- name: remove file foo.txt
- file: path={{output_dir}}/foo.txt state=absent
+ file: path={{remote_tmp_dir_test}}/foo.txt state=absent
- name: remove file bar.txt
- file: path={{output_dir}}/foo.txt state=absent
+ file: path={{remote_tmp_dir_test}}/foo.txt state=absent
- name: remove file baz.txt
- file: path={{output_dir}}/foo.txt state=absent
+ file: path={{remote_tmp_dir_test}}/foo.txt state=absent
- name: copy directory structure over
- copy: src=foobar dest={{output_dir}}
+ copy: src=foobar dest={{remote_tmp_dir_test}}
- name: check what would be removed if folder state was absent and diff is enabled
file:
@@ -345,8 +351,8 @@
check_mode: yes
diff: yes
with_items:
- - "{{ output_dir }}"
- - "{{ output_dir }}/foobar/fileA"
+ - "{{ remote_tmp_dir_test }}"
+ - "{{ remote_tmp_dir_test }}/foobar/fileA"
register: folder_absent_result
- name: 'assert that the "absent" state lists expected files and folders for only directories'
@@ -361,10 +367,10 @@
test_file: "{{ folder_absent_result.results[0].path }}/foobar/fileA"
- name: Change ownership of a directory with recurse=no(default)
- file: path={{output_dir}}/foobar owner=1234
+ file: path={{remote_tmp_dir_test}}/foobar owner=1234
- name: verify that the permission of the directory was set
- file: path={{output_dir}}/foobar state=directory
+ file: path={{remote_tmp_dir_test}}/foobar state=directory
register: file8_result
- name: assert that the directory has changed to have owner 1234
@@ -373,7 +379,7 @@
- "file8_result.uid == 1234"
- name: verify that the permission of a file under the directory was not set
- file: path={{output_dir}}/foobar/fileA state=file
+ file: path={{remote_tmp_dir_test}}/foobar/fileA state=file
register: file9_result
- name: assert the file owner has not changed to 1234
@@ -387,10 +393,10 @@
uid: 1235
- name: change the ownership of a directory with recurse=yes
- file: path={{output_dir}}/foobar owner=1235 recurse=yes
+ file: path={{remote_tmp_dir_test}}/foobar owner=1235 recurse=yes
- name: verify that the permission of the directory was set
- file: path={{output_dir}}/foobar state=directory
+ file: path={{remote_tmp_dir_test}}/foobar state=directory
register: file10_result
- name: assert that the directory has changed to have owner 1235
@@ -399,7 +405,7 @@
- "file10_result.uid == 1235"
- name: verify that the permission of a file under the directory was not set
- file: path={{output_dir}}/foobar/fileA state=file
+ file: path={{remote_tmp_dir_test}}/foobar/fileA state=file
register: file11_result
- name: assert that the file has changed to have owner 1235
@@ -408,7 +414,7 @@
- "file11_result.uid == 1235"
- name: remove directory foobar
- file: path={{output_dir}}/foobar state=absent
+ file: path={{remote_tmp_dir_test}}/foobar state=absent
register: file14_result
- name: verify that the directory was removed
@@ -418,7 +424,7 @@
- 'file14_result.state == "absent"'
- name: create a test sub-directory
- file: dest={{output_dir}}/sub1 state=directory
+ file: dest={{remote_tmp_dir_test}}/sub1 state=directory
register: file15_result
- name: verify that the new directory was created
@@ -428,7 +434,7 @@
- 'file15_result.state == "directory"'
- name: create test files in the sub-directory
- file: dest={{output_dir}}/sub1/{{item}} state=touch
+ file: dest={{remote_tmp_dir_test}}/sub1/{{item}} state=touch
with_items:
- file1
- file2
@@ -443,7 +449,7 @@
with_items: "{{file16_result.results}}"
- name: test file creation with symbolic mode
- file: dest={{output_dir}}/test_symbolic state=touch mode=u=rwx,g=rwx,o=rwx
+ file: dest={{remote_tmp_dir_test}}/test_symbolic state=touch mode=u=rwx,g=rwx,o=rwx
register: result
- name: assert file mode
@@ -452,7 +458,7 @@
- result.mode == '0777'
- name: modify symbolic mode for all
- file: dest={{output_dir}}/test_symbolic state=touch mode=a=r
+ file: dest={{remote_tmp_dir_test}}/test_symbolic state=touch mode=a=r
register: result
- name: assert file mode
@@ -461,7 +467,7 @@
- result.mode == '0444'
- name: modify symbolic mode for owner
- file: dest={{output_dir}}/test_symbolic state=touch mode=u+w
+ file: dest={{remote_tmp_dir_test}}/test_symbolic state=touch mode=u+w
register: result
- name: assert file mode
@@ -470,7 +476,7 @@
- result.mode == '0644'
- name: modify symbolic mode for group
- file: dest={{output_dir}}/test_symbolic state=touch mode=g+w
+ file: dest={{remote_tmp_dir_test}}/test_symbolic state=touch mode=g+w
register: result
- name: assert file mode
@@ -479,7 +485,7 @@
- result.mode == '0664'
- name: modify symbolic mode for world
- file: dest={{output_dir}}/test_symbolic state=touch mode=o+w
+ file: dest={{remote_tmp_dir_test}}/test_symbolic state=touch mode=o+w
register: result
- name: assert file mode
@@ -488,7 +494,7 @@
- result.mode == '0666'
- name: modify symbolic mode for owner
- file: dest={{output_dir}}/test_symbolic state=touch mode=u+x
+ file: dest={{remote_tmp_dir_test}}/test_symbolic state=touch mode=u+x
register: result
- name: assert file mode
@@ -497,7 +503,7 @@
- result.mode == '0766'
- name: modify symbolic mode for group
- file: dest={{output_dir}}/test_symbolic state=touch mode=g+x
+ file: dest={{remote_tmp_dir_test}}/test_symbolic state=touch mode=g+x
register: result
- name: assert file mode
@@ -506,7 +512,7 @@
- result.mode == '0776'
- name: modify symbolic mode for world
- file: dest={{output_dir}}/test_symbolic state=touch mode=o+x
+ file: dest={{remote_tmp_dir_test}}/test_symbolic state=touch mode=o+x
register: result
- name: assert file mode
@@ -515,7 +521,7 @@
- result.mode == '0777'
- name: remove symbolic mode for world
- file: dest={{output_dir}}/test_symbolic state=touch mode=o-wx
+ file: dest={{remote_tmp_dir_test}}/test_symbolic state=touch mode=o-wx
register: result
- name: assert file mode
@@ -524,7 +530,7 @@
- result.mode == '0774'
- name: remove symbolic mode for group
- file: dest={{output_dir}}/test_symbolic state=touch mode=g-wx
+ file: dest={{remote_tmp_dir_test}}/test_symbolic state=touch mode=g-wx
register: result
- name: assert file mode
@@ -533,7 +539,7 @@
- result.mode == '0744'
- name: remove symbolic mode for owner
- file: dest={{output_dir}}/test_symbolic state=touch mode=u-wx
+ file: dest={{remote_tmp_dir_test}}/test_symbolic state=touch mode=u-wx
register: result
- name: assert file mode
@@ -542,7 +548,7 @@
- result.mode == '0444'
- name: set sticky bit with symbolic mode
- file: dest={{output_dir}}/test_symbolic state=touch mode=o+t
+ file: dest={{remote_tmp_dir_test}}/test_symbolic state=touch mode=o+t
register: result
- name: assert file mode
@@ -551,7 +557,7 @@
- result.mode == '01444'
- name: remove sticky bit with symbolic mode
- file: dest={{output_dir}}/test_symbolic state=touch mode=o-t
+ file: dest={{remote_tmp_dir_test}}/test_symbolic state=touch mode=o-t
register: result
- name: assert file mode
@@ -560,7 +566,7 @@
- result.mode == '0444'
- name: add setgid with symbolic mode
- file: dest={{output_dir}}/test_symbolic state=touch mode=g+s
+ file: dest={{remote_tmp_dir_test}}/test_symbolic state=touch mode=g+s
register: result
- name: assert file mode
@@ -569,7 +575,7 @@
- result.mode == '02444'
- name: remove setgid with symbolic mode
- file: dest={{output_dir}}/test_symbolic state=touch mode=g-s
+ file: dest={{remote_tmp_dir_test}}/test_symbolic state=touch mode=g-s
register: result
- name: assert file mode
@@ -578,7 +584,7 @@
- result.mode == '0444'
- name: add setuid with symbolic mode
- file: dest={{output_dir}}/test_symbolic state=touch mode=u+s
+ file: dest={{remote_tmp_dir_test}}/test_symbolic state=touch mode=u+s
register: result
- name: assert file mode
@@ -587,7 +593,7 @@
- result.mode == '04444'
- name: remove setuid with symbolic mode
- file: dest={{output_dir}}/test_symbolic state=touch mode=u-s
+ file: dest={{remote_tmp_dir_test}}/test_symbolic state=touch mode=u-s
register: result
- name: assert file mode
@@ -704,7 +710,7 @@
- group_gid_exists.warnings is not defined
# https://github.com/ansible/ansible/issues/50943
-# Need to use /tmp as nobody can't access output_dir at all
+# Need to use /tmp as nobody can't access remote_tmp_dir_test at all
- name: create file as root with all write permissions
file: dest=/tmp/write_utime state=touch mode=0666 owner={{ansible_user_id}}
@@ -743,39 +749,39 @@
# Follow + recursive tests
- name: create a toplevel directory
- file: path={{output_dir}}/test_follow_rec state=directory mode=0755
+ file: path={{remote_tmp_dir_test}}/test_follow_rec state=directory mode=0755
- name: create a file outside of the toplevel
- file: path={{output_dir}}/test_follow_rec_target_file state=touch mode=0700
+ file: path={{remote_tmp_dir_test}}/test_follow_rec_target_file state=touch mode=0700
- name: create a directory outside of the toplevel
- file: path={{output_dir}}/test_follow_rec_target_dir state=directory mode=0700
+ file: path={{remote_tmp_dir_test}}/test_follow_rec_target_dir state=directory mode=0700
- name: create a file inside of the link target directory
- file: path={{output_dir}}/test_follow_rec_target_dir/foo state=touch mode=0700
+ file: path={{remote_tmp_dir_test}}/test_follow_rec_target_dir/foo state=touch mode=0700
- name: create a symlink to the file
- file: path={{output_dir}}/test_follow_rec/test_link state=link src="../test_follow_rec_target_file"
+ file: path={{remote_tmp_dir_test}}/test_follow_rec/test_link state=link src="../test_follow_rec_target_file"
- name: create a symlink to the directory
- file: path={{output_dir}}/test_follow_rec/test_link_dir state=link src="../test_follow_rec_target_dir"
+ file: path={{remote_tmp_dir_test}}/test_follow_rec/test_link_dir state=link src="../test_follow_rec_target_dir"
- name: create a symlink to a nonexistent file
- file: path={{output_dir}}/test_follow_rec/nonexistent state=link src=does_not_exist force=True
+ file: path={{remote_tmp_dir_test}}/test_follow_rec/nonexistent state=link src=does_not_exist force=True
- name: try to change permissions without following symlinks
- file: path={{output_dir}}/test_follow_rec follow=False mode="a-x" recurse=True
+ file: path={{remote_tmp_dir_test}}/test_follow_rec follow=False mode="a-x" recurse=True
- name: stat the link file target
- stat: path={{output_dir}}/test_follow_rec_target_file
+ stat: path={{remote_tmp_dir_test}}/test_follow_rec_target_file
register: file_result
- name: stat the link dir target
- stat: path={{output_dir}}/test_follow_rec_target_dir
+ stat: path={{remote_tmp_dir_test}}/test_follow_rec_target_dir
register: dir_result
- name: stat the file inside the link dir target
- stat: path={{output_dir}}/test_follow_rec_target_dir/foo
+ stat: path={{remote_tmp_dir_test}}/test_follow_rec_target_dir/foo
register: file_in_dir_result
- name: assert that the link targets were unmodified
@@ -786,18 +792,18 @@
- file_in_dir_result.stat.mode == '0700'
- name: try to change permissions with following symlinks
- file: path={{output_dir}}/test_follow_rec follow=True mode="a-x" recurse=True
+ file: path={{remote_tmp_dir_test}}/test_follow_rec follow=True mode="a-x" recurse=True
- name: stat the link file target
- stat: path={{output_dir}}/test_follow_rec_target_file
+ stat: path={{remote_tmp_dir_test}}/test_follow_rec_target_file
register: file_result
- name: stat the link dir target
- stat: path={{output_dir}}/test_follow_rec_target_dir
+ stat: path={{remote_tmp_dir_test}}/test_follow_rec_target_dir
register: dir_result
- name: stat the file inside the link dir target
- stat: path={{output_dir}}/test_follow_rec_target_dir/foo
+ stat: path={{remote_tmp_dir_test}}/test_follow_rec_target_dir/foo
register: file_in_dir_result
- name: assert that the link targets were modified
@@ -821,7 +827,7 @@
- name: Test missing src
file:
- dest: "{{ output_dir }}/hard.txt"
+ dest: "{{ remote_tmp_dir_test }}/hard.txt"
state: hard
register: file_error2
ignore_errors: yes
@@ -834,7 +840,7 @@
- name: Test non-existing src
file:
src: non-existing-file-that-does-not-exist.txt
- dest: "{{ output_dir }}/hard.txt"
+ dest: "{{ remote_tmp_dir_test }}/hard.txt"
state: hard
register: file_error3
ignore_errors: yes
@@ -843,7 +849,7 @@
that:
- "file_error3 is failed"
- "file_error3.msg == 'src does not exist'"
- - "file_error3.dest == '{{ output_dir }}/hard.txt' | expanduser"
+ - "file_error3.dest == '{{ remote_tmp_dir_test }}/hard.txt' | expanduser"
- "file_error3.src == 'non-existing-file-that-does-not-exist.txt'"
- block:
diff --git a/test/integration/targets/file/tasks/modification_time.yml b/test/integration/targets/file/tasks/modification_time.yml
new file mode 100644
index 00000000..daec0362
--- /dev/null
+++ b/test/integration/targets/file/tasks/modification_time.yml
@@ -0,0 +1,70 @@
+# file module tests for dealing with modification_time
+
+- name: Initialize the test output dir
+ import_tasks: initialize.yml
+
+- name: Setup the modification time for the tests
+ set_fact:
+ modification_timestamp: "202202081414.00"
+
+- name: Get stat info for the file
+ stat:
+ path: "{{ output_file }}"
+ register: initial_file_stat
+
+- name: Set a modification time in check_mode
+ ansible.builtin.file:
+ path: "{{ output_file }}"
+ modification_time: "{{ modification_timestamp }}"
+ modification_time_format: "%Y%m%d%H%M.%S"
+ check_mode: true
+ register: file_change_check_mode
+
+- name: Re-stat the file
+ stat:
+ path: "{{ output_file }}"
+ register: check_mode_stat
+
+- name: Confirm check_mode did not change the file
+ assert:
+ that:
+ - initial_file_stat.stat.mtime == check_mode_stat.stat.mtime
+ # Ensure the changed flag was set
+ - file_change_check_mode.changed
+ # Ensure the diff is present
+ # Note: file diff always contains the path
+ - file_change_check_mode.diff.after | length > 1
+
+- name: Set a modification time for real
+ ansible.builtin.file:
+ path: "{{ output_file }}"
+ modification_time: "{{ modification_timestamp }}"
+ modification_time_format: "%Y%m%d%H%M.%S"
+ register: file_change_no_check_mode
+
+- name: Stat of the file after the change
+ stat:
+ path: "{{ output_file }}"
+ register: change_stat
+
+- name: Confirm the modification time changed
+ assert:
+ that:
+ - initial_file_stat.stat.mtime != change_stat.stat.mtime
+ - file_change_no_check_mode.changed
+ # Note: file diff always contains the path
+ - file_change_no_check_mode.diff.after | length > 1
+
+- name: Set a modification time a second time to confirm no changes or diffs
+ ansible.builtin.file:
+ path: "{{ output_file }}"
+ modification_time: "{{ modification_timestamp }}"
+ modification_time_format: "%Y%m%d%H%M.%S"
+ register: file_change_no_check_mode_second
+
+- name: Confirm no changes made registered
+ assert:
+ that:
+ - not file_change_no_check_mode_second.changed
+ # Note: file diff always contains the path
+ - file_change_no_check_mode_second.diff.after | length == 1
diff --git a/test/integration/targets/file/tasks/selinux_tests.yml b/test/integration/targets/file/tasks/selinux_tests.yml
index 8efe8195..eda54f1e 100644
--- a/test/integration/targets/file/tasks/selinux_tests.yml
+++ b/test/integration/targets/file/tasks/selinux_tests.yml
@@ -20,7 +20,7 @@
import_tasks: initialize.yml
- name: touch a file for testing
- file: path={{output_dir}}/foo-se.txt state=touch
+ file: path={{remote_tmp_dir_test}}/foo-se.txt state=touch
register: file_se_result
- name: verify that the file was marked as changed
@@ -30,4 +30,4 @@
- "file_se_result.secontext == 'unconfined_u:object_r:admin_home_t:s0'"
- name: remove the file used for testing
- file: path={{output_dir}}/foo-se.txt state=absent
+ file: path={{remote_tmp_dir_test}}/foo-se.txt state=absent
diff --git a/test/integration/targets/file/tasks/state_link.yml b/test/integration/targets/file/tasks/state_link.yml
index 851b213e..ec6c0712 100644
--- a/test/integration/targets/file/tasks/state_link.yml
+++ b/test/integration/targets/file/tasks/state_link.yml
@@ -7,12 +7,12 @@
# Basic absolute symlink to a file
#
- name: create soft link to file
- file: src={{output_file}} dest={{output_dir}}/soft.txt state=link
+ file: src={{output_file}} dest={{remote_tmp_dir_test}}/soft.txt state=link
register: file1_result
- name: Get stat info for the link
stat:
- path: '{{ output_dir }}/soft.txt'
+ path: '{{ remote_tmp_dir_test }}/soft.txt'
follow: False
register: file1_link_stat
@@ -27,12 +27,12 @@
# Change an absolute soft link into a relative soft link
#
- name: change soft link to relative
- file: src={{output_file|basename}} dest={{output_dir}}/soft.txt state=link
+ file: src={{output_file|basename}} dest={{remote_tmp_dir_test}}/soft.txt state=link
register: file2_result
- name: Get stat info for the link
stat:
- path: '{{ output_dir }}/soft.txt'
+ path: '{{ remote_tmp_dir_test }}/soft.txt'
follow: False
register: file2_link_stat
@@ -49,12 +49,12 @@
# Check that creating the soft link a second time was idempotent
#
- name: soft link idempotency check
- file: src={{output_file|basename}} dest={{output_dir}}/soft.txt state=link
+ file: src={{output_file|basename}} dest={{remote_tmp_dir_test}}/soft.txt state=link
register: file3_result
- name: Get stat info for the link
stat:
- path: '{{ output_dir }}/soft.txt'
+ path: '{{ remote_tmp_dir_test }}/soft.txt'
follow: False
register: file3_link_stat
@@ -71,7 +71,7 @@
- name: fail to create soft link to non existent file
file:
src: '/nonexistent'
- dest: '{{output_dir}}/soft2.txt'
+ dest: '{{remote_tmp_dir_test}}/soft2.txt'
state: 'link'
force: False
register: file4_result
@@ -85,14 +85,14 @@
- name: force creation soft link to non existent
file:
src: '/nonexistent'
- dest: '{{ output_dir}}/soft2.txt'
+ dest: '{{ remote_tmp_dir_test}}/soft2.txt'
state: 'link'
force: True
register: file5_result
- name: Get stat info for the link
stat:
- path: '{{ output_dir }}/soft2.txt'
+ path: '{{ remote_tmp_dir_test }}/soft2.txt'
follow: False
register: file5_link_stat
@@ -106,7 +106,7 @@
- name: Prove idempotence of force creation soft link to non existent
file:
src: '/nonexistent'
- dest: '{{ output_dir }}/soft2.txt'
+ dest: '{{ remote_tmp_dir_test }}/soft2.txt'
state: 'link'
force: True
register: file6a_result
@@ -207,19 +207,19 @@
- name: create soft link to directory using absolute path
file:
src: '/'
- dest: '{{ output_dir }}/root'
+ dest: '{{ remote_tmp_dir_test }}/root'
state: 'link'
register: file6_result
- name: Get stat info for the link
stat:
- path: '{{ output_dir }}/root'
+ path: '{{ remote_tmp_dir_test }}/root'
follow: False
register: file6_link_stat
- name: Get stat info for the pointed to file
stat:
- path: '{{ output_dir }}/root'
+ path: '{{ remote_tmp_dir_test }}/root'
follow: True
register: file6_links_dest_stat
@@ -248,41 +248,41 @@
# Relative link to file
- name: create a test sub-directory to link to
file:
- dest: '{{ output_dir }}/sub1'
+ dest: '{{ remote_tmp_dir_test }}/sub1'
state: 'directory'
- name: create a file to link to in the test sub-directory
file:
- dest: '{{ output_dir }}/sub1/file1'
+ dest: '{{ remote_tmp_dir_test }}/sub1/file1'
state: 'touch'
- name: create another test sub-directory to place links within
file:
- dest: '{{output_dir}}/sub2'
+ dest: '{{remote_tmp_dir_test}}/sub2'
state: 'directory'
- name: create soft link to relative file
file:
src: '../sub1/file1'
- dest: '{{ output_dir }}/sub2/link1'
+ dest: '{{ remote_tmp_dir_test }}/sub2/link1'
state: 'link'
register: file7_result
- name: Get stat info for the link
stat:
- path: '{{ output_dir }}/sub2/link1'
+ path: '{{ remote_tmp_dir_test }}/sub2/link1'
follow: False
register: file7_link_stat
- name: Get stat info for the pointed to file
stat:
- path: '{{ output_dir }}/sub2/link1'
+ path: '{{ remote_tmp_dir_test }}/sub2/link1'
follow: True
register: file7_links_dest_stat
- name: Get stat info for the file we intend to point to
stat:
- path: '{{ output_dir }}/sub1/file1'
+ path: '{{ remote_tmp_dir_test }}/sub1/file1'
follow: False
register: file7_dest_stat
@@ -302,25 +302,25 @@
- name: create soft link to relative directory
file:
src: sub1
- dest: '{{ output_dir }}/sub1-link'
+ dest: '{{ remote_tmp_dir_test }}/sub1-link'
state: 'link'
register: file8_result
- name: Get stat info for the link
stat:
- path: '{{ output_dir }}/sub1-link'
+ path: '{{ remote_tmp_dir_test }}/sub1-link'
follow: False
register: file8_link_stat
- name: Get stat info for the pointed to file
stat:
- path: '{{ output_dir }}/sub1-link'
+ path: '{{ remote_tmp_dir_test }}/sub1-link'
follow: True
register: file8_links_dest_stat
- name: Get stat info for the file we intend to point to
stat:
- path: '{{ output_dir }}/sub1'
+ path: '{{ remote_tmp_dir_test }}/sub1'
follow: False
register: file8_dest_stat
@@ -341,26 +341,26 @@
- name: create a test file
copy:
- dest: '{{output_dir}}/test_follow'
+ dest: '{{remote_tmp_dir_test}}/test_follow'
content: 'this is a test file\n'
mode: 0666
- name: create a symlink to the test file
file:
- path: '{{output_dir}}/test_follow_link'
+ path: '{{remote_tmp_dir_test}}/test_follow_link'
src: './test_follow'
state: 'link'
- name: modify the permissions on the link using follow=yes
file:
- path: '{{output_dir}}/test_follow_link'
+ path: '{{remote_tmp_dir_test}}/test_follow_link'
mode: 0644
follow: yes
register: file9_result
- name: stat the link target
stat:
- path: '{{output_dir}}/test_follow'
+ path: '{{remote_tmp_dir_test}}/test_follow'
register: file9_stat
- name: assert that the chmod worked
@@ -374,7 +374,7 @@
#
- name: attempt to modify the permissions of the link itself
file:
- path: '{{output_dir}}/test_follow_link'
+ path: '{{remote_tmp_dir_test}}/test_follow_link'
src: './test_follow'
state: 'link'
mode: 0600
@@ -385,7 +385,7 @@
# Just check that the underlying file was not changed
- name: stat the link target
stat:
- path: '{{output_dir}}/test_follow'
+ path: '{{remote_tmp_dir_test}}/test_follow'
register: file10_target_stat
- name: assert that the link target was unmodified
@@ -399,19 +399,19 @@
- name: Create a testing file
file:
- path: "{{ output_dir }}/test_follow1"
+ path: "{{ remote_tmp_dir_test }}/test_follow1"
state: touch
- name: Create a symlink and change mode of the original file, since follow == yes by default
file:
- src: "{{ output_dir }}/test_follow1"
- dest: "{{ output_dir }}/test_follow1_link"
+ src: "{{ remote_tmp_dir_test }}/test_follow1"
+ dest: "{{ remote_tmp_dir_test }}/test_follow1_link"
state: link
mode: 0700
- name: stat the original file
stat:
- path: "{{ output_dir }}/test_follow1"
+ path: "{{ remote_tmp_dir_test }}/test_follow1"
register: stat_out
- name: Check if the mode of the original file was set
@@ -425,8 +425,8 @@
path: "{{ item }}"
state: absent
loop:
- - "{{ output_dir }}/test_follow1"
- - "{{ output_dir }}/test_follow1_link"
+ - "{{ remote_tmp_dir_test }}/test_follow1"
+ - "{{ remote_tmp_dir_test }}/test_follow1_link"
# END #56928
@@ -435,7 +435,7 @@
- name: Specify src without state
file:
src: "{{ output_file }}"
- dest: "{{ output_dir }}/link.txt"
+ dest: "{{ remote_tmp_dir_test }}/link.txt"
ignore_errors: yes
register: src_state
@@ -448,35 +448,35 @@
# Test creating a symlink when the destination exists and is a file
- name: create a test file
copy:
- dest: '{{ output_dir }}/file.txt'
+ dest: '{{ remote_tmp_dir_test }}/file.txt'
content: 'this is a test file\n'
mode: 0666
- name: Create a symlink with dest already a file
file:
src: '{{ output_file }}'
- dest: '{{ output_dir }}/file.txt'
+ dest: '{{ remote_tmp_dir_test }}/file.txt'
state: link
ignore_errors: true
register: dest_is_existing_file_fail
- name: Stat to make sure the symlink was not created
stat:
- path: '{{ output_dir }}/file.txt'
+ path: '{{ remote_tmp_dir_test }}/file.txt'
follow: false
register: dest_is_existing_file_fail_stat
- name: Forcefully a symlink with dest already a file
file:
src: '{{ output_file }}'
- dest: '{{ output_dir }}/file.txt'
+ dest: '{{ remote_tmp_dir_test }}/file.txt'
state: link
force: true
register: dest_is_existing_file_force
- name: Stat to make sure the symlink was created
stat:
- path: '{{ output_dir }}/file.txt'
+ path: '{{ remote_tmp_dir_test }}/file.txt'
follow: false
register: dest_is_existing_file_force_stat
diff --git a/test/integration/targets/file/tasks/unicode_path.yml b/test/integration/targets/file/tasks/unicode_path.yml
index d78af765..a4902a9b 100644
--- a/test/integration/targets/file/tasks/unicode_path.yml
+++ b/test/integration/targets/file/tasks/unicode_path.yml
@@ -1,10 +1,10 @@
- name: create local file with unicode filename and content
lineinfile:
- dest: "{{ output_dir }}/语/汉语.txt"
+ dest: "{{ remote_tmp_dir_test }}/语/汉语.txt"
create: true
line: 汉语
- name: remove local file with unicode filename and content
file:
- path: "{{ output_dir }}/语/汉语.txt"
+ path: "{{ remote_tmp_dir_test }}/语/汉语.txt"
state: absent
diff --git a/test/integration/targets/filter_core/tasks/main.yml b/test/integration/targets/filter_core/tasks/main.yml
index 5a5d813f..05622d84 100644
--- a/test/integration/targets/filter_core/tasks/main.yml
+++ b/test/integration/targets/filter_core/tasks/main.yml
@@ -79,8 +79,6 @@
- "31 == ['x','y']|map('extract',{'x':42,'y':31})|list|last"
- "'local' == ['localhost']|map('extract',hostvars,'ansible_connection')|list|first"
- "'local' == ['localhost']|map('extract',hostvars,['ansible_connection'])|list|first"
- # map was added to jinja2 in version 2.7
- when: lookup('pipe', ansible_python.executable ~ ' -c "import jinja2; print(jinja2.__version__)"') is version('2.7', '>=')
- name: Test extract filter with defaults
vars:
diff --git a/test/integration/targets/filter_mathstuff/runme.sh b/test/integration/targets/filter_mathstuff/runme.sh
index fad8443c..5a474cf5 100755
--- a/test/integration/targets/filter_mathstuff/runme.sh
+++ b/test/integration/targets/filter_mathstuff/runme.sh
@@ -5,13 +5,3 @@ set -eux
export ANSIBLE_ROLES_PATH=../
ansible-playbook runme.yml "$@"
-
-source virtualenv.sh
-
-# Install Jinja < 2.10 since we want to test the fallback to Ansible's custom
-# unique filter. Jinja < 2.10 does not have do_unique so we will trigger the
-# fallback.
-pip install 'jinja2 < 2.10' 'markupsafe < 2.1'
-
-# Run the playbook again in the venv with Jinja < 2.10
-ansible-playbook runme.yml "$@"
diff --git a/test/integration/targets/filter_urls/runme.sh b/test/integration/targets/filter_urls/runme.sh
deleted file mode 100755
index 2ed1cd84..00000000
--- a/test/integration/targets/filter_urls/runme.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env bash
-
-set -eux
-
-export ANSIBLE_ROLES_PATH=../
-
-ansible-playbook runme.yml "$@"
-
-source virtualenv.sh
-
-# This is necessary for installing Jinja 2.6. We need this because Jinja 2.6
-# won't install with newer setuptools, and because setuptools 45+ won't work
-# with Python 2.
-pip install 'setuptools<45'
-
-# Install Jinja 2.6 since we want to test the fallback to Ansible's custom
-# urlencode functions. Jinja 2.6 does not have urlencode so we will trigger the
-# fallback.
-pip install 'jinja2 >= 2.6, < 2.7' 'markupsafe < 2.1'
-
-# Run the playbook again in the venv with Jinja 2.6
-ansible-playbook runme.yml "$@"
diff --git a/test/integration/targets/filter_urls/runme.yml b/test/integration/targets/filter_urls/runme.yml
deleted file mode 100644
index 527a03e3..00000000
--- a/test/integration/targets/filter_urls/runme.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- hosts: localhost
- gather_facts: false
- roles:
- - { role: filter_urls }
diff --git a/test/integration/targets/filter_urls/tasks/main.yml b/test/integration/targets/filter_urls/tasks/main.yml
index 935ed479..c062326c 100644
--- a/test/integration/targets/filter_urls/tasks/main.yml
+++ b/test/integration/targets/filter_urls/tasks/main.yml
@@ -1,10 +1,3 @@
-- name: Get Jinja2 version
- shell: "{{ ansible_python_interpreter }} -c 'import jinja2; print(jinja2.__version__)'"
- register: jinja2_version
-
-- name: Print Jinja2 version
- debug: var=jinja2_version.stdout
-
- name: Test urldecode filter
set_fact:
urldecoded_string: key="@{}é&%£ foo bar '(;\<>""°)
diff --git a/test/integration/targets/find/meta/main.yml b/test/integration/targets/find/meta/main.yml
index 07faa217..cb6005d0 100644
--- a/test/integration/targets/find/meta/main.yml
+++ b/test/integration/targets/find/meta/main.yml
@@ -1,2 +1,3 @@
dependencies:
- prepare_tests
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/find/tasks/main.yml b/test/integration/targets/find/tasks/main.yml
index 366ef312..5787a81f 100644
--- a/test/integration/targets/find/tasks/main.yml
+++ b/test/integration/targets/find/tasks/main.yml
@@ -16,16 +16,16 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-- set_fact: output_dir_test={{output_dir}}/test_find
+- set_fact: remote_tmp_dir_test={{remote_tmp_dir}}/test_find
- name: make sure our testing sub-directory does not exist
file:
- path: "{{ output_dir_test }}"
+ path: "{{ remote_tmp_dir_test }}"
state: absent
- name: create our testing sub-directory
file:
- path: "{{ output_dir_test }}"
+ path: "{{ remote_tmp_dir_test }}"
state: directory
##
@@ -34,7 +34,7 @@
- name: make some directories
file:
- path: "{{ output_dir_test }}/{{ item }}"
+ path: "{{ remote_tmp_dir_test }}/{{ item }}"
state: directory
with_items:
- a/b/c/d
@@ -42,7 +42,7 @@
- name: make some files
copy:
- dest: "{{ output_dir_test }}/{{ item }}"
+ dest: "{{ remote_tmp_dir_test }}/{{ item }}"
content: 'data'
with_items:
- a/1.txt
@@ -56,7 +56,7 @@
- name: find the directories
find:
- paths: "{{ output_dir_test }}"
+ paths: "{{ remote_tmp_dir_test }}"
file_type: directory
recurse: yes
register: find_test0
@@ -75,7 +75,7 @@
- name: find the xml and img files
find:
- paths: "{{ output_dir_test }}"
+ paths: "{{ remote_tmp_dir_test }}"
file_type: file
patterns: "*.xml,*.img"
recurse: yes
@@ -89,7 +89,7 @@
- name: find the xml file
find:
- paths: "{{ output_dir_test }}"
+ paths: "{{ remote_tmp_dir_test }}"
patterns: "*.xml"
recurse: yes
register: find_test2
@@ -103,7 +103,7 @@
- name: find the xml file with empty excludes
find:
- paths: "{{ output_dir_test }}"
+ paths: "{{ remote_tmp_dir_test }}"
patterns: "*.xml"
recurse: yes
excludes: []
@@ -119,7 +119,7 @@
- name: Copy some files into the test dir
copy:
src: "{{ item }}"
- dest: "{{ output_dir_test }}/{{ item }}"
+ dest: "{{ remote_tmp_dir_test }}/{{ item }}"
mode: 0644
with_items:
- a.txt
@@ -127,7 +127,7 @@
- name: Ensure '$' only matches the true end of the file with read_whole_file, not a line
find:
- paths: "{{ output_dir_test }}"
+ paths: "{{ remote_tmp_dir_test }}"
patterns: "*.txt"
contains: "KO$"
read_whole_file: true
@@ -141,7 +141,7 @@
- name: Match the end of the file successfully
find:
- paths: "{{ output_dir_test }}"
+ paths: "{{ remote_tmp_dir_test }}"
patterns: "*.txt"
contains: "OK$"
read_whole_file: true
@@ -155,7 +155,7 @@
- name: When read_whole_file=False, $ should match an individual line
find:
- paths: "{{ output_dir_test }}"
+ paths: "{{ remote_tmp_dir_test }}"
patterns: "*.txt"
contains: ".*KO$"
read_whole_file: false
@@ -169,7 +169,7 @@
- name: When read_whole_file=True, match across line boundaries
find:
- paths: "{{ output_dir_test }}"
+ paths: "{{ remote_tmp_dir_test }}"
patterns: "*.txt"
contains: "has\na few"
read_whole_file: true
@@ -183,7 +183,7 @@
- name: When read_whole_file=False, do not match across line boundaries
find:
- paths: "{{ output_dir_test }}"
+ paths: "{{ remote_tmp_dir_test }}"
patterns: "*.txt"
contains: "has\na few"
read_whole_file: false
@@ -214,7 +214,7 @@
block:
- name: Get all files/directories in the path
find:
- paths: "{{ output_dir_test }}"
+ paths: "{{ remote_tmp_dir_test }}"
recurse: yes
file_type: any
register: total_contents
@@ -226,7 +226,7 @@
- name: Get files and directories with depth
find:
- paths: "{{ output_dir_test }}"
+ paths: "{{ remote_tmp_dir_test }}"
recurse: yes
file_type: any
depth: 2
@@ -241,7 +241,7 @@
- name: Find files with depth
find:
- paths: "{{ output_dir_test }}"
+ paths: "{{ remote_tmp_dir_test }}"
depth: 2
recurse: yes
register: files_with_depth
@@ -254,117 +254,96 @@
- files_with_depth.examined == 12
- name: exclude with regex
find:
- paths: "{{ output_dir_test }}"
+ paths: "{{ remote_tmp_dir_test }}"
recurse: yes
use_regex: true
exclude: .*\.ogg
register: find_test3
-# Note that currently sane ways of doing this with map() or
-# selectattr() aren't available in centos6 era jinja2 ...
+
- set_fact:
- find_test3_list: >-
- [ {% for f in find_test3.files %}
- {{ f.path }}
- {% if not loop.last %},{% endif %}
- {% endfor %}
- ]
-- debug: var=find_test3_list
+ find_test3_list: "{{ find_test3.files|map(attribute='path') }}"
+
- name: assert we skipped the ogg file
assert:
that:
- - '"{{ output_dir_test }}/e/f/g/h/8.ogg" not in find_test3_list'
+ - '"{{ remote_tmp_dir_test }}/e/f/g/h/8.ogg" not in find_test3_list'
- name: create our age/size testing sub-directory
file:
- path: "{{ output_dir_test }}/astest"
+ path: "{{ remote_tmp_dir_test }}/astest"
state: directory
- name: create test file with old timestamps
file:
- path: "{{ output_dir_test }}/astest/old.txt"
+ path: "{{ remote_tmp_dir_test }}/astest/old.txt"
state: touch
modification_time: "202001011200.0"
- name: create test file with current timestamps
file:
- path: "{{ output_dir_test }}/astest/new.txt"
+ path: "{{ remote_tmp_dir_test }}/astest/new.txt"
state: touch
- name: create hidden test file with current timestamps
file:
- path: "{{ output_dir_test }}/astest/.hidden.txt"
+ path: "{{ remote_tmp_dir_test }}/astest/.hidden.txt"
state: touch
- name: find files older than 1 week
find:
- path: "{{ output_dir_test }}/astest"
+ path: "{{ remote_tmp_dir_test }}/astest"
age: 1w
hidden: true
register: result
- set_fact:
- astest_list: >-
- [ {% for f in result.files %}
- {{ f.path }}
- {% if not loop.last %},{% endif %}
- {% endfor %}
- ]
+ astest_list: "{{ result.files|map(attribute='path') }}"
- name: assert we only find the old file
assert:
that:
- result.matched == 1
- - '"{{ output_dir_test }}/astest/old.txt" in astest_list'
+ - '"{{ remote_tmp_dir_test }}/astest/old.txt" in astest_list'
- name: find files newer than 1 week
find:
- path: "{{ output_dir_test }}/astest"
+ path: "{{ remote_tmp_dir_test }}/astest"
age: -1w
register: result
- set_fact:
- astest_list: >-
- [ {% for f in result.files %}
- {{ f.path }}
- {% if not loop.last %},{% endif %}
- {% endfor %}
- ]
+ astest_list: "{{ result.files|map(attribute='path') }}"
- name: assert we only find the current file
assert:
that:
- result.matched == 1
- - '"{{ output_dir_test }}/astest/new.txt" in astest_list'
+ - '"{{ remote_tmp_dir_test }}/astest/new.txt" in astest_list'
- name: add some content to the new file
- shell: "echo hello world > {{ output_dir_test }}/astest/new.txt"
+ shell: "echo hello world > {{ remote_tmp_dir_test }}/astest/new.txt"
- name: find files with MORE than 5 bytes, also get checksums
find:
- path: "{{ output_dir_test }}/astest"
+ path: "{{ remote_tmp_dir_test }}/astest"
size: 5
hidden: true
get_checksum: true
register: result
- set_fact:
- astest_list: >-
- [ {% for f in result.files %}
- {{ f.path }}
- {% if not loop.last %},{% endif %}
- {% endfor %}
- ]
+ astest_list: "{{ result.files|map(attribute='path') }}"
- name: assert we only find the hello world file
assert:
that:
- result.matched == 1
- - '"{{ output_dir_test }}/astest/new.txt" in astest_list'
+ - '"{{ remote_tmp_dir_test }}/astest/new.txt" in astest_list'
- '"checksum" in result.files[0]'
- name: find ANY item with LESS than 5 bytes, also get checksums
find:
- path: "{{ output_dir_test }}/astest"
+ path: "{{ remote_tmp_dir_test }}/astest"
size: -5
hidden: true
get_checksum: true
@@ -372,17 +351,12 @@
register: result
- set_fact:
- astest_list: >-
- [ {% for f in result.files %}
- {{ f.path }}
- {% if not loop.last %},{% endif %}
- {% endfor %}
- ]
+ astest_list: "{{ result.files|map(attribute='path') }}"
- name: assert we do not find the hello world file and a checksum is present
assert:
that:
- result.matched == 2
- - '"{{ output_dir_test }}/astest/old.txt" in astest_list'
- - '"{{ output_dir_test }}/astest/.hidden.txt" in astest_list'
+ - '"{{ remote_tmp_dir_test }}/astest/old.txt" in astest_list'
+ - '"{{ remote_tmp_dir_test }}/astest/.hidden.txt" in astest_list'
- '"checksum" in result.files[0]'
diff --git a/test/integration/targets/get_url/aliases b/test/integration/targets/get_url/aliases
index f82a267b..90ef161f 100644
--- a/test/integration/targets/get_url/aliases
+++ b/test/integration/targets/get_url/aliases
@@ -1,4 +1,3 @@
destructive
shippable/posix/group1
needs/httptester
-skip/aix
diff --git a/test/integration/targets/getent/aliases b/test/integration/targets/getent/aliases
index f8e28c7e..a6dafcf8 100644
--- a/test/integration/targets/getent/aliases
+++ b/test/integration/targets/getent/aliases
@@ -1,2 +1 @@
shippable/posix/group1
-skip/aix
diff --git a/test/integration/targets/git/aliases b/test/integration/targets/git/aliases
index f71c8117..3005e4b2 100644
--- a/test/integration/targets/git/aliases
+++ b/test/integration/targets/git/aliases
@@ -1,2 +1 @@
shippable/posix/group4
-skip/aix
diff --git a/test/integration/targets/git/handlers/cleanup-freebsd.yml b/test/integration/targets/git/handlers/cleanup-freebsd.yml
index 1ee35013..29220c3a 100644
--- a/test/integration/targets/git/handlers/cleanup-freebsd.yml
+++ b/test/integration/targets/git/handlers/cleanup-freebsd.yml
@@ -1,4 +1,4 @@
-- name: remove git fromn FreeBSD
+- name: remove git from FreeBSD
pkgng:
name: git
state: absent
diff --git a/test/integration/targets/git/tasks/archive.yml b/test/integration/targets/git/tasks/archive.yml
index 18b9dff3..952154dc 100644
--- a/test/integration/targets/git/tasks/archive.yml
+++ b/test/integration/targets/git/tasks/archive.yml
@@ -11,14 +11,9 @@
register: git_archive
with_items: "{{ git_archive_extensions[ansible_os_family ~ ansible_distribution_major_version | default('default') ] | default(git_archive_extensions.default) }}"
-# The map filter was added in Jinja2 2.7, which is newer than the version on RHEL/CentOS 6,
-# so we skip this validation on those hosts
- name: ARCHIVE | Assert that archives were downloaded
assert:
that: (git_archive.results | map(attribute='changed') | unique | list)[0]
- when:
- - "ansible_os_family == 'RedHat'"
- - ansible_distribution_major_version is version('7', '>=')
- name: ARCHIVE | Check if archive file is created or not
stat:
@@ -53,14 +48,9 @@
register: git_archive
with_items: "{{ git_archive_extensions[ansible_os_family ~ ansible_distribution_major_version | default('default') ] | default(git_archive_extensions.default) }}"
-# The map filter was added in Jinja2 2.7, which is newer than the version on RHEL/CentOS 6,
-# so we skip this validation on those hosts
- name: ARCHIVE | Assert that archives were downloaded
assert:
that: (git_archive.results | map(attribute='changed') | unique | list)[0]
- when:
- - "ansible_os_family == 'RedHat'"
- - ansible_distribution_major_version is version('7', '>=')
- name: ARCHIVE | Check if archive file is created or not
stat:
@@ -82,14 +72,11 @@
register: archive_content
with_items: "{{ git_archive_extensions[ansible_os_family ~ ansible_distribution_major_version | default('default') ] | default(git_archive_extensions.default) }}"
-# Does not work on RedHat6 (jinja2 too old?)
- name: ARCHIVE | Ensure archive content is correct
assert:
that:
- item.stdout_lines | sort | first == 'defaults/'
with_items: "{{ archive_content.results }}"
- when:
- - ansible_os_family ~ ansible_distribution_major_version != 'RedHat6'
- name: ARCHIVE | Clear checkout_dir
file:
diff --git a/test/integration/targets/group/aliases b/test/integration/targets/group/aliases
index f8e28c7e..a6dafcf8 100644
--- a/test/integration/targets/group/aliases
+++ b/test/integration/targets/group/aliases
@@ -1,2 +1 @@
shippable/posix/group1
-skip/aix
diff --git a/test/integration/targets/groupby_filter/aliases b/test/integration/targets/groupby_filter/aliases
index 58201272..90ea9e12 100644
--- a/test/integration/targets/groupby_filter/aliases
+++ b/test/integration/targets/groupby_filter/aliases
@@ -1,3 +1,2 @@
shippable/posix/group2
-needs/file/test/lib/ansible_test/_data/requirements/constraints.txt
context/controller
diff --git a/test/integration/targets/groupby_filter/requirements.txt b/test/integration/targets/groupby_filter/requirements.txt
deleted file mode 100644
index fdd9ec5c..00000000
--- a/test/integration/targets/groupby_filter/requirements.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-# pip 7.1 added support for constraints, which are required by ansible-test to install most python requirements
-# see https://github.com/pypa/pip/blame/e648e00dc0226ade30ade99591b245b0c98e86c9/NEWS.rst#L1258
-pip >= 7.1, < 10 ; python_version < '2.7' # pip 10+ drops support for python 2.6 (sanity_ok)
-pip >= 7.1 ; python_version >= '2.7' # sanity_ok
diff --git a/test/integration/targets/groupby_filter/runme.sh b/test/integration/targets/groupby_filter/runme.sh
deleted file mode 100755
index 9c9c6f03..00000000
--- a/test/integration/targets/groupby_filter/runme.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env bash
-
-set -eux
-
-source virtualenv.sh
-
-# Update pip in the venv to a version that supports constraints
-pip install --requirement requirements.txt
-
-pip install -U jinja2==2.9.4 "markupsafe<2.1.0" --constraint "../../../lib/ansible_test/_data/requirements/constraints.txt"
-
-ansible-playbook -i ../../inventory test_jinja2_groupby.yml -v "$@"
-
-pip install -U "jinja2<2.9.0" "markupsafe<2.1.0" --constraint "../../../lib/ansible_test/_data/requirements/constraints.txt"
-
-ansible-playbook -i ../../inventory test_jinja2_groupby.yml -v "$@"
diff --git a/test/integration/targets/groupby_filter/tasks/main.yml b/test/integration/targets/groupby_filter/tasks/main.yml
new file mode 100644
index 00000000..45c86876
--- /dev/null
+++ b/test/integration/targets/groupby_filter/tasks/main.yml
@@ -0,0 +1,16 @@
+- set_fact:
+ result: "{{ fruits | groupby('enjoy') }}"
+ vars:
+ fruits:
+ - name: apple
+ enjoy: yes
+ - name: orange
+ enjoy: no
+ - name: strawberry
+ enjoy: yes
+
+- assert:
+ that:
+ - result == expected
+ vars:
+ expected: [[false, [{"enjoy": false, "name": "orange"}]], [true, [{"enjoy": true, "name": "apple"}, {"enjoy": true, "name": "strawberry"}]]]
diff --git a/test/integration/targets/groupby_filter/test_jinja2_groupby.yml b/test/integration/targets/groupby_filter/test_jinja2_groupby.yml
deleted file mode 100644
index 3cd02959..00000000
--- a/test/integration/targets/groupby_filter/test_jinja2_groupby.yml
+++ /dev/null
@@ -1,29 +0,0 @@
----
-- name: Test jinja2 groupby
- hosts: localhost
- gather_facts: True
- connection: local
- vars:
- fruits:
- - name: apple
- enjoy: yes
- - name: orange
- enjoy: no
- - name: strawberry
- enjoy: yes
- expected: [[false, [{"enjoy": false, "name": "orange"}]], [true, [{"enjoy": true, "name": "apple"}, {"enjoy": true, "name": "strawberry"}]]]
- tasks:
- - name: show python interpreter
- debug:
- msg: "{{ ansible_python['executable'] }}"
-
- - name: show jinja2 version
- debug:
- msg: "{{ lookup('pipe', '{{ ansible_python[\"executable\"] }} -c \"import jinja2; print(jinja2.__version__)\"') }}"
-
- - set_fact:
- result: "{{ fruits | groupby('enjoy') }}"
-
- - assert:
- that:
- - result == expected
diff --git a/test/integration/targets/hostname/aliases b/test/integration/targets/hostname/aliases
index c552d611..6eae8bd8 100644
--- a/test/integration/targets/hostname/aliases
+++ b/test/integration/targets/hostname/aliases
@@ -1,3 +1,2 @@
shippable/posix/group1
destructive
-skip/aix # currently unsupported by hostname module
diff --git a/test/integration/targets/hostname/tasks/Debian.yml b/test/integration/targets/hostname/tasks/Debian.yml
new file mode 100644
index 00000000..dfa88fef
--- /dev/null
+++ b/test/integration/targets/hostname/tasks/Debian.yml
@@ -0,0 +1,20 @@
+---
+- name: Test DebianStrategy by setting hostname
+ become: 'yes'
+ hostname:
+ use: debian
+ name: "{{ ansible_distribution_release }}-bebop.ansible.example.com"
+
+- name: Test DebianStrategy by getting current hostname
+ command: hostname
+ register: get_hostname
+
+- name: Test DebianStrategy by verifying /etc/hostname content
+ command: grep -v '^#' /etc/hostname
+ register: grep_hostname
+
+- name: Test DebianStrategy using assertions
+ assert:
+ that:
+ - "'{{ ansible_distribution_release }}-bebop.ansible.example.com' in get_hostname.stdout"
+ - "'{{ ansible_distribution_release }}-bebop.ansible.example.com' in grep_hostname.stdout"
diff --git a/test/integration/targets/hostname/tasks/main.yml b/test/integration/targets/hostname/tasks/main.yml
index 8a9e34bd..596dd89a 100644
--- a/test/integration/targets/hostname/tasks/main.yml
+++ b/test/integration/targets/hostname/tasks/main.yml
@@ -30,6 +30,7 @@
always:
# Reset back to original hostname
- name: Move back original file if it existed
+ become: 'yes'
command: mv -f {{ _hostname_file }}.orig {{ _hostname_file }}
when: hn_stat.stat.exists | default(False)
@@ -40,6 +41,7 @@
when: not hn_stat.stat.exists | default(True)
- name: Reset back to original hostname
+ become: 'yes'
hostname:
name: "{{ original.stdout }}"
register: revert
diff --git a/test/integration/targets/hostname/tasks/test_normal.yml b/test/integration/targets/hostname/tasks/test_normal.yml
index a40d96e9..ed5ac735 100644
--- a/test/integration/targets/hostname/tasks/test_normal.yml
+++ b/test/integration/targets/hostname/tasks/test_normal.yml
@@ -1,4 +1,19 @@
+- name: Ensure hostname doesn't confuse NetworkManager
+ when: ansible_os_family == 'RedHat'
+ block:
+ - name: slurp /var/log/messages
+ slurp:
+ path: /var/log/messages
+ become: yes
+ register: messages_before
+
+ - assert:
+ that:
+ - >
+ 'current hostname was changed outside NetworkManager' not in messages_before.content|b64decode
+
- name: Run hostname module for real now
+ become: 'yes'
hostname:
name: crocodile.ansible.test.doesthiswork.net.example.com
register: hn2
@@ -7,7 +22,22 @@
command: hostname
register: current_after_hn2
+- name: Ensure hostname doesn't confuse NetworkManager
+ when: ansible_os_family == 'RedHat'
+ block:
+ - name: slurp /var/log/messages
+ slurp:
+ path: /var/log/messages
+ become: yes
+ register: messages_after
+
+ - assert:
+ that:
+ - >
+ 'current hostname was changed outside NetworkManager' not in messages_after.content|b64decode
+
- name: Run hostname again to ensure it does not change
+ become: 'yes'
hostname:
name: crocodile.ansible.test.doesthiswork.net.example.com
register: hn3
diff --git a/test/integration/targets/incidental_cloud_init_data_facts/aliases b/test/integration/targets/incidental_cloud_init_data_facts/aliases
deleted file mode 100644
index 544fcacd..00000000
--- a/test/integration/targets/incidental_cloud_init_data_facts/aliases
+++ /dev/null
@@ -1,7 +0,0 @@
-destructive
-shippable/posix/incidental
-skip/aix
-skip/osx
-skip/macos
-skip/freebsd
-context/target
diff --git a/test/integration/targets/incidental_cloud_init_data_facts/tasks/main.yml b/test/integration/targets/incidental_cloud_init_data_facts/tasks/main.yml
deleted file mode 100644
index eca905c6..00000000
--- a/test/integration/targets/incidental_cloud_init_data_facts/tasks/main.yml
+++ /dev/null
@@ -1,50 +0,0 @@
----
-- name: test cloud-init
- # TODO: check for a workaround
- # install 'cloud-init'' failed: dpkg-divert: error: `diversion of /etc/init/ureadahead.conf
- # to /etc/init/ureadahead.conf.disabled by cloud-init' clashes with `local diversion of
- # /etc/init/ureadahead.conf to /etc/init/ureadahead.conf.distrib
- # https://bugs.launchpad.net/ubuntu/+source/ureadahead/+bug/997838
- # Will also have to skip on OpenSUSE when running on Python 2 on newer Leap versions
- # (!= 42 and >= 15) ascloud-init will install the Python 3 package, breaking our build on py2.
- when:
- - not (ansible_distribution == "Ubuntu" and ansible_distribution_major_version|int == 14)
- - not (ansible_os_family == "Suse" and ansible_distribution_major_version|int != 42 and ansible_python.version.major != 3)
- block:
- - name: setup install cloud-init
- package:
- name:
- - cloud-init
- - udev
-
- - name: setup run cloud-init
- service:
- name: cloud-init-local
- state: restarted
-
- - name: test gather cloud-init facts in check mode
- cloud_init_data_facts:
- check_mode: yes
- register: result
- - name: verify test gather cloud-init facts in check mode
- assert:
- that:
- - result.cloud_init_data_facts.status.v1 is defined
- - result.cloud_init_data_facts.status.v1.stage is defined
- - not result.cloud_init_data_facts.status.v1.stage
- - cloud_init_data_facts.status.v1 is defined
- - cloud_init_data_facts.status.v1.stage is defined
- - not cloud_init_data_facts.status.v1.stage
-
- - name: test gather cloud-init facts
- cloud_init_data_facts:
- register: result
- - name: verify test gather cloud-init facts
- assert:
- that:
- - result.cloud_init_data_facts.status.v1 is defined
- - result.cloud_init_data_facts.status.v1.stage is defined
- - not result.cloud_init_data_facts.status.v1.stage
- - cloud_init_data_facts.status.v1 is defined
- - cloud_init_data_facts.status.v1.stage is defined
- - not cloud_init_data_facts.status.v1.stage
diff --git a/test/integration/targets/incidental_cs_common/aliases b/test/integration/targets/incidental_cs_common/aliases
deleted file mode 100644
index 136c05e0..00000000
--- a/test/integration/targets/incidental_cs_common/aliases
+++ /dev/null
@@ -1 +0,0 @@
-hidden
diff --git a/test/integration/targets/incidental_cs_common/defaults/main.yml b/test/integration/targets/incidental_cs_common/defaults/main.yml
deleted file mode 100644
index 942316bd..00000000
--- a/test/integration/targets/incidental_cs_common/defaults/main.yml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-cs_resource_prefix: "cs-{{ (ansible_date_time.iso8601_micro | to_uuid).split('-')[0] }}"
-cs_common_template: CentOS 5.6 (64-bit) no GUI (Simulator)
-cs_common_service_offering: Small Instance
-cs_common_zone_adv: Sandbox-simulator-advanced
-cs_common_zone_basic: Sandbox-simulator-basic
diff --git a/test/integration/targets/incidental_deploy_helper/aliases b/test/integration/targets/incidental_deploy_helper/aliases
deleted file mode 100644
index 3b88c806..00000000
--- a/test/integration/targets/incidental_deploy_helper/aliases
+++ /dev/null
@@ -1,2 +0,0 @@
-shippable/posix/incidental
-context/target
diff --git a/test/integration/targets/incidental_deploy_helper/tasks/main.yml b/test/integration/targets/incidental_deploy_helper/tasks/main.yml
deleted file mode 100644
index 962c894a..00000000
--- a/test/integration/targets/incidental_deploy_helper/tasks/main.yml
+++ /dev/null
@@ -1,149 +0,0 @@
----
-- name: record the output directory
- set_fact: deploy_helper_test_root={{output_dir}}/deploy_helper_test_root
-
-- name: State=query with default parameters
- deploy_helper: path={{ deploy_helper_test_root }} state=query
-- name: Assert State=query with default parameters
- assert:
- that:
- - "'project_path' in deploy_helper"
- - "deploy_helper.current_path == '{{ deploy_helper.project_path }}/current'"
- - "deploy_helper.releases_path == '{{ deploy_helper.project_path }}/releases'"
- - "deploy_helper.shared_path == '{{ deploy_helper.project_path }}/shared'"
- - "deploy_helper.unfinished_filename == 'DEPLOY_UNFINISHED'"
- - "'previous_release' in deploy_helper"
- - "'previous_release_path' in deploy_helper"
- - "'new_release' in deploy_helper"
- - "'new_release_path' in deploy_helper"
- - "deploy_helper.new_release_path == '{{ deploy_helper.releases_path }}/{{ deploy_helper.new_release }}'"
-
-- name: State=query with relative overridden paths
- deploy_helper: path={{ deploy_helper_test_root }} current_path=CURRENT_PATH releases_path=RELEASES_PATH shared_path=SHARED_PATH state=query
-- name: Assert State=query with relative overridden paths
- assert:
- that:
- - "deploy_helper.current_path == '{{ deploy_helper.project_path }}/CURRENT_PATH'"
- - "deploy_helper.releases_path == '{{ deploy_helper.project_path }}/RELEASES_PATH'"
- - "deploy_helper.shared_path == '{{ deploy_helper.project_path }}/SHARED_PATH'"
- - "deploy_helper.new_release_path == '{{ deploy_helper.releases_path }}/{{ deploy_helper.new_release}}'"
-
-- name: State=query with absolute overridden paths
- deploy_helper: path={{ deploy_helper_test_root }} current_path=/CURRENT_PATH releases_path=/RELEASES_PATH shared_path=/SHARED_PATH state=query
-- name: Assert State=query with absolute overridden paths
- assert:
- that:
- - "deploy_helper.current_path == '/CURRENT_PATH'"
- - "deploy_helper.releases_path == '/RELEASES_PATH'"
- - "deploy_helper.shared_path == '/SHARED_PATH'"
- - "deploy_helper.new_release_path == '{{ deploy_helper.releases_path }}/{{ deploy_helper.new_release}}'"
-
-- name: State=query with overridden unfinished_filename
- deploy_helper: path={{ deploy_helper_test_root }} unfinished_filename=UNFINISHED_DEPLOY state=query
-- name: Assert State=query with overridden unfinished_filename
- assert:
- that:
- - "'UNFINISHED_DEPLOY' == deploy_helper.unfinished_filename"
-
-# Remove the root folder just in case it exists
-- file: path={{ deploy_helper_test_root }} state=absent
-
-- name: State=present with default parameters
- deploy_helper: path={{ deploy_helper_test_root }} state=present
-- stat: path={{ deploy_helper.releases_path }}
- register: releases_path
-- stat: path={{ deploy_helper.shared_path }}
- register: shared_path
-- name: Assert State=present with default parameters
- assert:
- that:
- - "releases_path.stat.exists"
- - "shared_path.stat.exists"
-
-# Setup older releases for tests
-- file: path={{ deploy_helper.releases_path }}/{{ item }} state=directory
- with_items: ['first', 'second', 'third', 'fourth', 'fifth', 'sixth', 'seventh']
-# Setup the new release
-- file: path={{ deploy_helper.new_release_path }} state=directory
-# Add a buildfile, just like in a real deploy
-- copy: content='' dest={{ deploy_helper.new_release_path }}/{{ deploy_helper.unfinished_filename }}
-# Add a buildfile, to an older deploy
-- copy: content='' dest={{ deploy_helper.releases_path }}/third/{{ deploy_helper.unfinished_filename }}
-
-- name: State=finalize with default parameters
- deploy_helper: path={{ deploy_helper_test_root }} release={{ deploy_helper.new_release }} state=finalize
-- stat: path={{ deploy_helper.current_path }}
- register: current_path
-- stat: path={{ deploy_helper.current_path }}/DEPLOY_UNFINISHED
- register: current_path_unfinished_filename
-- name: Assert State=finalize with default parameters
- assert:
- that:
- - "current_path.stat.islnk"
- - "deploy_helper.new_release_path in current_path.stat.lnk_source"
- - "not current_path_unfinished_filename.stat.exists"
-- stat: path={{ deploy_helper.releases_path }}/third
- register: third_release_path
-- shell: "ls {{ deploy_helper.releases_path }} | wc -l"
- register: releases_count
-- name: Assert State=finalize with default parameters (clean=true checks)
- assert:
- that:
- - "not third_release_path.stat.exists"
- - "releases_count.stdout|trim == '6'"
-- deploy_helper: path={{ deploy_helper_test_root }} release={{ deploy_helper.new_release }} state=query
-- name: Assert State=finalize with default parameters (previous_release checks)
- assert:
- that:
- - "deploy_helper.new_release == deploy_helper.previous_release"
-
-- name: State=absent with default parameters
- deploy_helper: path={{ deploy_helper_test_root }} state=absent
-- stat: path={{ deploy_helper_test_root }}
- register: project_path
-- name: Assert State=absent with default parameters
- assert:
- that:
- - "not project_path.stat.exists"
-
-- debug: msg="Clearing all release data and facts ---------"
-
-- name: State=present with shared_path set to False
- deploy_helper: path={{ deploy_helper_test_root }} state=present shared_path=''
-- stat: path={{ deploy_helper.releases_path }}
- register: releases_path
-- stat: path={{ deploy_helper.shared_path }}
- register: shared_path
-- name: Assert State=present with shared_path set to False
- assert:
- that:
- - "releases_path.stat.exists"
- - "not shared_path.stat.exists"
-
-# Setup older releases for tests
-- file: path={{ deploy_helper.releases_path }}/{{ item }} state=directory
- with_items: ['first', 'second', 'third', 'fourth', 'fifth']
-# Setup the new release
-- file: path={{ deploy_helper.new_release_path }} state=directory
-# Add a buildfile, just like in a real deploy
-- copy: content='' dest={{ deploy_helper.new_release_path }}/{{ deploy_helper.unfinished_filename }}
-# Add a buildfile, to an older deploy
-- copy: content='' dest={{ deploy_helper.releases_path }}/third/{{ deploy_helper.unfinished_filename }}
-
-- shell: "ls {{ deploy_helper_test_root }}/releases | wc -l"
- register: before_releases_count
-- name: State=clean with keep_releases=3
- deploy_helper: path={{ deploy_helper_test_root }} release={{ deploy_helper.new_release }} state=clean keep_releases=3
-- stat: path={{ deploy_helper.releases_path }}/third
- register: third_release_path
-- shell: "ls {{ deploy_helper.releases_path }} | wc -l"
- register: releases_count
-- name: Assert State=finalize with default parameters (clean=true checks)
- assert:
- that:
- - "not third_release_path.stat.exists"
- - "before_releases_count.stdout|trim == '6'"
- - "releases_count.stdout|trim == '3'"
-
-# Remove the root folder
-- file: path={{ deploy_helper_test_root }} state=absent
diff --git a/test/integration/targets/incidental_inventory_docker_swarm/aliases b/test/integration/targets/incidental_inventory_docker_swarm/aliases
index 74d3befe..66362758 100644
--- a/test/integration/targets/incidental_inventory_docker_swarm/aliases
+++ b/test/integration/targets/incidental_inventory_docker_swarm/aliases
@@ -3,6 +3,7 @@ context/controller
skip/osx
skip/macos
skip/freebsd
+skip/rhel/9.0b # there are no docker-ce packages for CentOS/RHEL 9
destructive
skip/docker # The tests sometimes make docker daemon unstable; hence,
# we skip all docker-based CI runs to avoid disrupting
diff --git a/test/integration/targets/incidental_mongodb_parameter/aliases b/test/integration/targets/incidental_mongodb_parameter/aliases
deleted file mode 100644
index 72ed62eb..00000000
--- a/test/integration/targets/incidental_mongodb_parameter/aliases
+++ /dev/null
@@ -1,9 +0,0 @@
-destructive
-shippable/posix/incidental
-skip/aix
-skip/osx
-skip/macos
-skip/freebsd
-skip/rhel
-needs/root
-context/target
diff --git a/test/integration/targets/incidental_mongodb_parameter/defaults/main.yml b/test/integration/targets/incidental_mongodb_parameter/defaults/main.yml
deleted file mode 100644
index aac55526..00000000
--- a/test/integration/targets/incidental_mongodb_parameter/defaults/main.yml
+++ /dev/null
@@ -1,21 +0,0 @@
----
-# defaults file for test_mongodb_user
-mongodb_admin_user: test_root
-mongodb_admin_password: saE_Rr9!gE6gh#e~R#nZ
-mongod_auth: false
-kill_signal: SIGTERM
-# Should be one of
-# --storageEngine wiredTiger --wiredTigerEngineConfigString="cache_size=200M"
-# --storageEngine mmapv1 --nojournal
-mongod_storage_engine_opts: "--storageEngine wiredTiger --wiredTigerEngineConfigString='cache_size=200M'"
-mongodb_user: mongodb
-mongodb_user_list:
- - { "name": "user1", "password": "password1", "roles": "read", "database": "test" }
- - { "name": "user2", "password": "password2", "roles": "readWrite", "database": "test" }
- - { "name": "user3", "password": "password3", "roles": "dbAdmin", "database": "test" }
- - { "name": "user4", "password": "password4", "roles": "userAdmin", "database": "test" }
- - { "name": "user5", "password": "password5", "roles": "clusterAdmin", "database": "admin" }
- - { "name": "user6", "password": "password6", "roles": "readAnyDatabase", "database": "admin" }
- - { "name": "user7", "password": "password7", "roles": "readWriteAnyDatabase", "database": "admin" }
- - { "name": "user8", "password": "password8", "roles": "userAdminAnyDatabase", "database": "admin" }
- - { "name": "user9", "password": "password9", "roles": "dbAdminAnyDatabase", "database": "admin" }
diff --git a/test/integration/targets/incidental_mongodb_parameter/tasks/main.yml b/test/integration/targets/incidental_mongodb_parameter/tasks/main.yml
deleted file mode 100644
index a0fda1dc..00000000
--- a/test/integration/targets/incidental_mongodb_parameter/tasks/main.yml
+++ /dev/null
@@ -1,143 +0,0 @@
-# test code for the mongodb_parameter module
-# (c) 2019, Rhys Campbell <rhys.james.campbell@googlemail.com>
-
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# ============================================================
-
-- name: Ensure tests home exists
- file:
- path: "{{ remote_tmp_dir }}/tests"
- state: directory
-
-- include_tasks: mongod_teardown.yml
-
-- include_tasks: mongod_singlenode.yml
-
-- name: Set syncdelay to 99
- mongodb_parameter:
- login_port: 3001
- param: syncdelay
- value: 99
- param_type: int
- register: sd_change
-
-- assert:
- that:
- - sd_change.before | int == 60
- - sd_change.after | int == 99
- - sd_change.changed == True
-
-- name: Set syncdelay to 99 (again)
- mongodb_parameter:
- login_port: 3001
- param: syncdelay
- value: 99
- param_type: int
- register: sd_change
-
-- assert:
- that:
- - sd_change.before | int == 99
- - sd_change.after | int == 99
- - sd_change.changed == False
-
-- name: Create admin user with module
- mongodb_user:
- login_port: 3001
- database: admin
- name: "{{ mongodb_admin_user }}"
- password: "{{ mongodb_admin_password }}"
- roles: root
- state: present
- register: mongodb_admin_user_created
-
-- assert:
- that:
- - mongodb_admin_user_created.changed == True
-
-- name: Kill all mongod processes
- command: pkill -{{ kill_signal }} mongod
- ignore_errors: true
-
-- name: Getting pids for mongod
- pids:
- name: mongod
- register: pids_of_mongod
-
-- name: Wait for all mongod processes to exit
- wait_for:
- path: "/proc/{{ item }}/status"
- state: absent
- delay: 3
- with_items: "{{ pids_of_mongod }}"
-
-- set_fact:
- mongod_auth: true
-
-- include_tasks: mongod_singlenode.yml
-# Tests with auth enabled
-
-- name: Set syncdelay to 59 with auth
- mongodb_parameter:
- login_port: 3001
- login_user: "{{ mongodb_admin_user }}"
- login_password: "{{ mongodb_admin_password }}"
- param: syncdelay
- value: 59
- param_type: int
- register: sd_change
-
-- assert:
- that:
- - sd_change.before | int == 60
- - sd_change.after | int == 59
- - sd_change.changed == True
-
-- name: Set syncdelay to 59 (again) with auth
- mongodb_parameter:
- login_port: 3001
- login_user: "{{ mongodb_admin_user }}"
- login_password: "{{ mongodb_admin_password }}"
- param: syncdelay
- value: 59
- param_type: int
- register: sd_change
-
-- assert:
- that:
- - sd_change.before | int == 59
- - sd_change.after | int == 59
- - sd_change.changed == False
-
-- name: Set authenticationMechanisms to MONGODB-X509 with auth (will fail)
- mongodb_parameter:
- login_port: 3001
- login_user: "{{ mongodb_admin_user }}"
- login_password: "{{ mongodb_admin_password }}"
- param: authenticationMechanisms
- value: "MONGODB-X509"
- param_type: str
- register: diag_change
- ignore_errors: yes
-
-- assert:
- that:
- - '"unable to change parameter" in diag_change.msg'
- - diag_change.failed == True
-
-# Clean up
-- include_tasks: mongod_teardown.yml
diff --git a/test/integration/targets/incidental_mongodb_parameter/tasks/mongod_singlenode.yml b/test/integration/targets/incidental_mongodb_parameter/tasks/mongod_singlenode.yml
deleted file mode 100644
index 291cb1c9..00000000
--- a/test/integration/targets/incidental_mongodb_parameter/tasks/mongod_singlenode.yml
+++ /dev/null
@@ -1,55 +0,0 @@
-- name: Set mongodb_user user for redhat
- set_fact:
- mongodb_user: "mongod"
- when: ansible_os_family == "RedHat"
-
-- set_fact:
- mongodb_nodes:
- - 3001
-
-- name: Create directories for mongod processes
- file:
- path: "{{ remote_tmp_dir }}/mongod{{ item }}"
- state: directory
- owner: "{{ mongodb_user }}"
- group: "{{ mongodb_user }}"
- mode: 0755
- recurse: yes
- with_items: "{{ mongodb_nodes }}"
-
-- name: Ensure {{ remote_tmp_dir }}/config dir exists
- file:
- path: "{{ remote_tmp_dir }}/config"
- state: directory
- owner: "{{ mongodb_user }}"
- group: "{{ mongodb_user }}"
- mode: 0755
-
-- name: Create keyfile
- copy:
- dest: "{{ remote_tmp_dir }}/my.key"
- content: |
- fd2CUrbXBJpB4rt74A6F
- owner: "{{ mongodb_user }}"
- group: "{{ mongodb_user }}"
- mode: 0600
- when: mongod_auth == True
-
-- name: Spawn mongod process without auth
- command: mongod --shardsvr --smallfiles {{ mongod_storage_engine_opts }} --dbpath mongod{{ item }} --port {{ item }} --logpath mongod{{ item }}/log.log --fork
- args:
- chdir: "{{ remote_tmp_dir }}"
- with_items: "{{ mongodb_nodes | sort }}"
- when: mongod_auth == False
-
-- name: Spawn mongod process with auth
- command: mongod --shardsvr --smallfiles {{ mongod_storage_engine_opts }} --dbpath mongod{{ item }} --port {{ item }} --logpath mongod{{ item }}/log.log --fork --auth --keyFile my.key
- args:
- chdir: "{{ remote_tmp_dir }}"
- with_items: "{{ mongodb_nodes | sort }}"
- when: mongod_auth == True
-
-- name: Wait for mongod to start responding
- wait_for:
- port: "{{ item }}"
- with_items: "{{ mongodb_nodes }}"
diff --git a/test/integration/targets/incidental_mongodb_parameter/tasks/mongod_teardown.yml b/test/integration/targets/incidental_mongodb_parameter/tasks/mongod_teardown.yml
deleted file mode 100644
index a904a718..00000000
--- a/test/integration/targets/incidental_mongodb_parameter/tasks/mongod_teardown.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-- name: Kill all mongod processes
- command: pkill -{{ kill_signal }} mongod
- ignore_errors: true
-
-- name: Getting pids for mongod
- pids:
- name: mongod
- register: pids_of_mongod
-
-- name: Wait for all mongod processes to exit
- wait_for:
- path: "/proc/{{ item }}/status"
- state: absent
- delay: 1
- with_items: "{{ pids_of_mongod }}"
-
-- name: Remove all mongod folders
- file:
- path: "{{ remote_tmp_dir }}/{{ item }}"
- state: absent
- with_items:
- - mongod3001
-
-- name: Remove all mongod sock files
- shell: rm -Rf /tmp/mongodb*.sock
diff --git a/test/integration/targets/incidental_setup_mongodb/aliases b/test/integration/targets/incidental_setup_mongodb/aliases
deleted file mode 100644
index 136c05e0..00000000
--- a/test/integration/targets/incidental_setup_mongodb/aliases
+++ /dev/null
@@ -1 +0,0 @@
-hidden
diff --git a/test/integration/targets/incidental_setup_mongodb/defaults/main.yml b/test/integration/targets/incidental_setup_mongodb/defaults/main.yml
deleted file mode 100644
index de0b76d3..00000000
--- a/test/integration/targets/incidental_setup_mongodb/defaults/main.yml
+++ /dev/null
@@ -1,46 +0,0 @@
-mongodb_version: "4.0"
-
-apt:
- keyserver: "keyserver.ubuntu.com"
- keyserver_id: "9DA31620334BD75D9DCB49F368818C72E52529D4"
- repo: "deb [ arch=amd64 ] http://repo.mongodb.org/apt/ubuntu {{ansible_distribution_release}}/mongodb-org/{{mongodb_version}} multiverse"
-
-mongodb_packages:
- mongod: mongodb-org-server
- mongos: mongodb-org-mongos
- mongo: mongodb-org-shell
-
-yum:
- name: mongodb-org
- description: "Official MongoDB {{mongodb_version}} yum repo"
- baseurl: https://repo.mongodb.org/yum/redhat/$releasever/mongodb-org/{{mongodb_version}}/x86_64/
- gpgcheck: 1
- gpgkey: https://www.mongodb.org/static/pgp/server-{{mongodb_version}}.asc
- redhat8url: https://repo.mongodb.org/yum/redhat/7/mongodb-org/{{mongodb_version}}/x86_64/
- fedoraurl: https://repo.mongodb.org/yum/amazon/2013.03/mongodb-org/{{mongodb_version}}/x86_64/
-
-debian_packages_py2:
- - python-dev
- - python-setuptools
- - python-pip
-
-debian_packages_py36:
- - python3.6-dev
- - python3-setuptools
- - python3-pip
-
-redhat_packages_py2:
- - python-devel
- - python-setuptools
- - python-pip
-
-redhat_packages_py3:
- - python3-devel
- - python3-setuptools
- - python3-pip
-
-# Do not install requests[security] via pip. It will cause test failures.
-# See https://github.com/ansible/ansible/pull/66319
-pip_packages:
- - psutil==5.8.0
- - pymongo==3.12.2
diff --git a/test/integration/targets/incidental_setup_mongodb/handlers/main.yml b/test/integration/targets/incidental_setup_mongodb/handlers/main.yml
deleted file mode 100644
index 1b73525e..00000000
--- a/test/integration/targets/incidental_setup_mongodb/handlers/main.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-- name: Remove debian_packages_py2
- apt:
- name: "{{ debian_packages_py2 }}"
- state: absent
-
-- name: Remove debian_packages_py36
- apt:
- name: "{{ debian_packages_py36 }}"
- state: absent
-
-- name: Remove redhat_packages_py2
- yum:
- name: "{{ redhat_packages_py36 }}"
- state: absent
-
-- name: Remove redhat_packages_py36
- yum:
- name: "{{ redhat_packages_py36 }}"
- state: absent
-
-- name: remove mongodb pip packages
- pip:
- name: "{{ pip_packages }}"
- state: absent
diff --git a/test/integration/targets/incidental_setup_mongodb/tasks/main.yml b/test/integration/targets/incidental_setup_mongodb/tasks/main.yml
deleted file mode 100644
index 3bd090ca..00000000
--- a/test/integration/targets/incidental_setup_mongodb/tasks/main.yml
+++ /dev/null
@@ -1,168 +0,0 @@
-# (c) 2019, Rhys Campbell <rhys.james.campbell@googlemail.com>
-
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# ============================================================
-
-# https://docs.mongodb.com/manual/tutorial/install-mongodb-on-ubuntu/
-# Support for Ubuntu 14.04 has been removed from MongoDB 4.0.10+, 3.6.13+, and 3.4.21+.
-# CentOS6 has python version issues
-# Ubuntu 20.04 does not yet have the required packages
-- meta: end_play
- when: (ansible_distribution == 'Ubuntu' and ansible_distribution_version == '14.04')
- or (ansible_distribution == 'Ubuntu' and ansible_distribution_version == '20.04')
- or (ansible_os_family == "RedHat" and ansible_distribution_major_version == '6')
- or ansible_os_family == "Suse"
- or ansible_distribution == 'Fedora'
- or (ansible_facts['distribution'] == "CentOS")
-
-# Ubuntu
-- name: Import MongoDB public GPG Key
- apt_key:
- keyserver: "{{ apt.keyserver }}"
- id: "{{ apt.keyserver_id }}"
- when:
- - ansible_distribution_version in ["16.04", "18.04"]
- - ansible_distribution == 'Ubuntu'
-
-- name: Add MongoDB repository into sources list
- apt_repository:
- repo: "{{ apt.repo }}"
- state: present
- update_cache: yes
- when:
- - ansible_distribution_version in ["16.04", "18.04"]
- - ansible_distribution == 'Ubuntu'
-
-# Need to handle various platforms here. Package name will not always be the same
-- name: Ensure mongod package is installed
- apt:
- name: "{{ mongodb_packages.mongod }}"
- state: present
- force: yes
- when:
- - ansible_distribution == 'Ubuntu'
-
-- name: Ensure mongos package is installed
- apt:
- name: "{{ mongodb_packages.mongos }}"
- state: present
- force: yes
- when:
- - ansible_distribution == 'Ubuntu'
-
-- name: Ensure mongo client is installed
- apt:
- name: "{{ mongodb_packages.mongo }}"
- state: present
- force: yes
- when:
- - ansible_distribution == 'Ubuntu'
-# EOF Ubuntu
-
-# Redhat
-- name: Add MongopDB repo
- yum_repository:
- name: "{{ yum.name }}"
- description: "{{ yum.description }}"
- baseurl: "{{ yum.baseurl }}"
- gpgcheck: "{{ yum.gpgcheck }}"
- gpgkey: "{{ yum.gpgkey }}"
- when:
- - ansible_os_family == "RedHat"
- - ansible_distribution_version.split('.')[0]|int <= 7
- - not ansible_distribution == "Fedora"
-
-
-- name: RedHat 8 repo not yet available so use 7 url
- yum_repository:
- name: "{{ yum.name }}"
- description: "{{ yum.description }}"
- baseurl: "{{ yum.redhat8url }}"
- gpgcheck: "{{ yum.gpgcheck }}"
- gpgkey: "{{ yum.gpgkey }}"
- when:
- - ansible_os_family == "RedHat"
- - ansible_distribution_version.split('.')[0]|int == 8
- - not ansible_distribution == "Fedora"
-
-- name: Another url for Fedora based systems
- yum_repository:
- name: "{{ yum.name }}"
- description: "{{ yum.description }}"
- baseurl: "{{ yum.fedoraurl }}"
- gpgcheck: "{{ yum.gpgcheck }}"
- gpgkey: "{{ yum.gpgkey }}"
- when:
- - ansible_distribution == "Fedora"
-
-- name: Ensure mongod package is installed
- yum:
- name: "{{ mongodb_packages.mongod }}"
- state: present
- when: ansible_os_family == "RedHat"
-
-- name: Ensure mongos package is installed
- yum:
- name: "{{ mongodb_packages.mongos }}"
- state: present
- when: ansible_os_family == "RedHat"
-
-- name: Ensure mongo client is installed
- yum:
- name: "{{ mongodb_packages.mongo }}"
- state: present
- when: ansible_os_family == "RedHat"
-# EOF Redhat
-
-- name: Install debian_packages
- apt:
- name: "{{ debian_packages_py2 }}"
- when:
- - ansible_os_family == "Debian"
- - ansible_distribution_version == "16.04"
- notify: Remove debian_packages_py2
-
-- name: Install debian_packages
- apt:
- name: "{{ debian_packages_py36 }}"
- when:
- - ansible_os_family == "Debian"
- - ansible_distribution_version == "18.04"
- notify: Remove debian_packages_py36
-
-- name: Install redhat_packages_py2
- yum:
- name: "{{ redhat_packages_py2 }}"
- when:
- - ansible_os_family == "RedHat"
- - ansible_distribution_version|float < 8
- - not (ansible_os_family == "RedHat" and ansible_distribution_version|float < 8)
- notify: Remove redhat_packages_py2
-
-- name: Install redhat_packages_py3
- yum:
- name: "{{ redhat_packages_py3 }}"
- when:
- - ansible_os_family == "RedHat"
- - ansible_distribution_version|float >= 8
- notify: Remove redhat_packages_py3
-
-- name: Install pip packages
- pip:
- name: "{{ pip_packages }}"
- state: present
- notify: remove mongodb pip packages
diff --git a/test/integration/targets/incidental_setup_tls/aliases b/test/integration/targets/incidental_setup_tls/aliases
deleted file mode 100644
index 136c05e0..00000000
--- a/test/integration/targets/incidental_setup_tls/aliases
+++ /dev/null
@@ -1 +0,0 @@
-hidden
diff --git a/test/integration/targets/incidental_setup_tls/files/ca_certificate.pem b/test/integration/targets/incidental_setup_tls/files/ca_certificate.pem
deleted file mode 100644
index a438d926..00000000
--- a/test/integration/targets/incidental_setup_tls/files/ca_certificate.pem
+++ /dev/null
@@ -1,19 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDAjCCAeqgAwIBAgIJANguFROhaWocMA0GCSqGSIb3DQEBCwUAMDExIDAeBgNV
-BAMMF1RMU0dlblNlbGZTaWduZWR0Um9vdENBMQ0wCwYDVQQHDAQkJCQkMB4XDTE5
-MDExMTA4MzMxNVoXDTI5MDEwODA4MzMxNVowMTEgMB4GA1UEAwwXVExTR2VuU2Vs
-ZlNpZ25lZHRSb290Q0ExDTALBgNVBAcMBCQkJCQwggEiMA0GCSqGSIb3DQEBAQUA
-A4IBDwAwggEKAoIBAQDqVt84czSxWnWW4Ng6hmKE3NarbLsycwtjrYBokV7Kk7Mp
-7PrBbYF05FOgSdJLvL6grlRSQK2VPsXdLfEv5uFXX6gyd2WQwKCiGGf4UY4ZIl4l
-JVpSDsBV2orR4pOIf1s1+iSwvcRQkX46SVjoKWbDUc4VLo1uy8UvavQI+DMioYyy
-0K2MbRs7oG2rdKks8zisfT0ymKnrFTdVeUjIrg0sStaMnf9VVkcEeYkfNY0vWqdn
-CV5wPfDBlnnxGMgqGdLSpzfyJ7qafFET+q+gOvjsEqzn7DvlPkmk86hIIWXKi3aM
-A9swknL3rnagJL6GioWRpYUwKdRKmZxdyr4I2JTTAgMBAAGjHTAbMAwGA1UdEwQF
-MAMBAf8wCwYDVR0PBAQDAgEGMA0GCSqGSIb3DQEBCwUAA4IBAQACTpPBf5WSwZ7r
-hrbPUN3qVh70HI0ZNK2jlK6b5fpSdw3JI/GQl0Kw3eGICLzwTByWvhD62U7IigL5
-0UWxWuEod310Y/qo/7OxRVPp5PH/0oNGoKHhEzas2ii0heQYGsHQUKGzYNNyVfjy
-nqBFz5AcKf067LcXivYqod6JDQHqFq/5/hWlIsHHrZIeijqqtthPq39GlGAYO+AB
-U66nzlH7YQgmfYfy6l7O4LsjXf/bz9rWvueO3NqCsmXV+FacDkOkwWA5Kf6rcgNL
-3G+2HAVTRIXDnO4ShnK6aYMW+UklpYRlVYBBUOdwoNIp5gI+BlSc1IuF6PdLVt3q
-VdjN1MjY
------END CERTIFICATE-----
diff --git a/test/integration/targets/incidental_setup_tls/files/ca_key.pem b/test/integration/targets/incidental_setup_tls/files/ca_key.pem
deleted file mode 100644
index 0a950eda..00000000
--- a/test/integration/targets/incidental_setup_tls/files/ca_key.pem
+++ /dev/null
@@ -1,28 +0,0 @@
------BEGIN PRIVATE KEY-----
-MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDqVt84czSxWnWW
-4Ng6hmKE3NarbLsycwtjrYBokV7Kk7Mp7PrBbYF05FOgSdJLvL6grlRSQK2VPsXd
-LfEv5uFXX6gyd2WQwKCiGGf4UY4ZIl4lJVpSDsBV2orR4pOIf1s1+iSwvcRQkX46
-SVjoKWbDUc4VLo1uy8UvavQI+DMioYyy0K2MbRs7oG2rdKks8zisfT0ymKnrFTdV
-eUjIrg0sStaMnf9VVkcEeYkfNY0vWqdnCV5wPfDBlnnxGMgqGdLSpzfyJ7qafFET
-+q+gOvjsEqzn7DvlPkmk86hIIWXKi3aMA9swknL3rnagJL6GioWRpYUwKdRKmZxd
-yr4I2JTTAgMBAAECggEBALpg9ZDUMCiOpc+mbNO/ZkP90M7u38Q0M+7HY8XHOPkt
-l+XUkWueSMRLhSeLDzMlnwf1HyN8RZLaJkzP6XAL1VXEwuXAiIskaZ4Cg07Arp/W
-8cHhf4CcMuUVuCtOZcC+ajD4Do5zn9vkm9yH0ap0o0LdoWa/a8WfU+luy0EHBsSW
-6qqI+nqNFmISluVbfWt7t3zp273+8sir6YeHQu9G91/jzggv8rHmu4EHhi3cnU0K
-vY6OPCGBL7nrg9Rv1LSFpH95TvlIM6/Cm0AjgW7m6XwWUTaI9p+GvKzrYUSLd9L/
-QxlmAwiu/sBTXLrsWyr8XEtj+lVGxQ6eFbf6E+lUm8ECgYEA+8Wgmhf3VsC3gvJz
-w2jApEoOioD5iGOWGClGVURkfaBhFELr4XCTVMdBuCtxT7LYTMHTAlBqIbdWDjB4
-m/E417hLGogSDy7j0R0Mx75OOGEitxYUhe0VGDNoytgCNd2UnTMt42lp+9vAHZag
-INhVDOnxRNdtNTf1yYkWUMEbh1sCgYEA7kZNJXPVYJtR78+km/Gcv64Umci7KUV+
-hYc7chR5xv3cXvXg5eojKa4G7CyMQTX7VnRa6CiQKdN73AbIAhS4Oy5UlCOKtmb8
-xnBiOAYwSpOfIeZhjq0RvEeZX0t6u7XsErBZ03rEPKXF2nNDo1x8byrlKPtlUzwJ
-gb5yjmK/mekCgYEA1TWQAs5m4+2Bun+tbv7nnHkmhT4hktGays0xRYYMf6Jwc6MU
-dC5MZg/zZI5Nf8uZhq7hDWWh6vmCA7QifxSxKWVlHIu8l2UDAhRSvVg4j2Aa8Obe
-7GdQZNUsWhLBFHKXpuQvaRTc7q8yqxvicM4igDQg4EZ6sgW4vDm+TxapRF8CgYAz
-n6mhPqpxRtWGxo8cdkmGwfmWpAXg2DykQ3teqQ8FTQUM0erLBWJe6mR3kONGUaLF
-xWnYuMkbNsW0EwgMY17S+6O5gMXR5RhJChpNlxGpZrhoiNiEJ/0atMyG9/x8ZNrj
-5a9ggU248hWe0bBK2YPgNgP2UBlQ4kYRBSkerkhi2QKBgF+tlpyqcU+0iY82qRS2
-wMf7oI2pWR8nX9LPAY/nnvwWvqwcAFJPMlSMTu8Ext6h7l9yu+7JGL6JWwsO57Lb
-Gm/RxbuZ/kG/13+lSNmZiyHrhj6hZhkAMeFM34fpT4+DBXqSxZuvdrmwBc5B2jYg
-F9Bv8gcmZlGhqONL23evr9Gu
------END PRIVATE KEY-----
diff --git a/test/integration/targets/incidental_setup_tls/files/client_certificate.pem b/test/integration/targets/incidental_setup_tls/files/client_certificate.pem
deleted file mode 100644
index 501d8389..00000000
--- a/test/integration/targets/incidental_setup_tls/files/client_certificate.pem
+++ /dev/null
@@ -1,20 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDRjCCAi6gAwIBAgIBAjANBgkqhkiG9w0BAQsFADAxMSAwHgYDVQQDDBdUTFNH
-ZW5TZWxmU2lnbmVkdFJvb3RDQTENMAsGA1UEBwwEJCQkJDAeFw0xOTAxMTEwODMz
-MThaFw0yOTAxMDgwODMzMThaMC0xGjAYBgNVBAMMEWFuc2libGUudGxzLnRlc3Rz
-MQ8wDQYDVQQKDAZjbGllbnQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
-AQCoM+OQ3HCnCUAAz9KGGTwWB9hQbUfAZXm/stlb2/uOAp3rNwxAlCs/giymBHE6
-Iu6mrK006Vn+Z9ibqIrD2LuCOxcu25y8goqG62TgdP5sa9wR+597s0XssnwnaY8y
-bJ3p2zWAJvMgqQ0iNW/ZynpWbO85K5SryUykF7FAeNU9ogGGlIwCPjHhPvnwjkqd
-yDqaA1VaJKDUWIF9joI7sV4VLgGhQvzXRrHULsTeIF2m0+ebL0PTNEWHQ0dtgLYX
-kW7YO4Y6+n3cjHNH4qTof8V30EK8pk8kTdJ/x6ubwf+klFCAyroOxNOaxUy299Oo
-yD6qIPJPnGkPhrKtWnWIhNzJAgMBAAGjbTBrMAkGA1UdEwQCMAAwCwYDVR0PBAQD
-AgWgMBMGA1UdJQQMMAoGCCsGAQUFBwMCMDwGA1UdEQQ1MDOCEWFuc2libGUudGxz
-LnRlc3RzghNNYWNCb29rLVByby00LmxvY2Fsgglsb2NhbGhvc3QwDQYJKoZIhvcN
-AQELBQADggEBAK214+VVXnGnsUlvd9Q6A2Ea6UGrr6b7xkmlnIaNd+6xoUsDsHob
-srHYm7UC0uLi1KwSunI7AU5ZELVEUfAmJzh3O4d6C5sQyqKYPqd5harWOQ3BOD0I
-plHpp7qMtsPDuJBtmE/bmvF85eto0H7pPz+cTTXRlOaVVeiHjMggFcXdy1MzGo9C
-X/4wLQmsFeypTfe+ZGqvDh99VV+ffNMIsMh+opWEloaKiHmDKB6S9aC/MsVVM4RR
-nHm/UKTOukaGE9QIPkSSaygv3sBkVnQ2SHMvvtnjPHVHlizNoq6+YTnuOvKpo4o5
-V7Bij+W7rkBQLsEfwv2IC+gzmRz2yxr2tXk=
------END CERTIFICATE-----
diff --git a/test/integration/targets/incidental_setup_tls/files/client_key.pem b/test/integration/targets/incidental_setup_tls/files/client_key.pem
deleted file mode 100644
index 850260a8..00000000
--- a/test/integration/targets/incidental_setup_tls/files/client_key.pem
+++ /dev/null
@@ -1,27 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-MIIEowIBAAKCAQEAqDPjkNxwpwlAAM/Shhk8FgfYUG1HwGV5v7LZW9v7jgKd6zcM
-QJQrP4IspgRxOiLupqytNOlZ/mfYm6iKw9i7gjsXLtucvIKKhutk4HT+bGvcEfuf
-e7NF7LJ8J2mPMmyd6ds1gCbzIKkNIjVv2cp6VmzvOSuUq8lMpBexQHjVPaIBhpSM
-Aj4x4T758I5Kncg6mgNVWiSg1FiBfY6CO7FeFS4BoUL810ax1C7E3iBdptPnmy9D
-0zRFh0NHbYC2F5Fu2DuGOvp93IxzR+Kk6H/Fd9BCvKZPJE3Sf8erm8H/pJRQgMq6
-DsTTmsVMtvfTqMg+qiDyT5xpD4ayrVp1iITcyQIDAQABAoIBAHPszzpXs4xr46Cr
-mvyxB6hnX76OkpUXWwGz0fptcsI9K3mhRuB7PhNXNE53YVIgITreZ8G/0jZ0e+VM
-E9dG2HS5JRE2ap/BmJfERJIuD+vJqrL6KMCondi0arz/E6I9GdjDK+xW69nmqRaa
-nawM0KQgD//m+WAsLJYrfg5hORZwI2SHaahawnCp0QaMmz3bdDWKRacM3q0UFX46
-Ze6CaZkUn+e1rHsTMcZBvxQWIVzysFNXh150idIB/PxL5YfCQqTSAj1c/nxaxz6a
-BvHFlpaYR3tvXXlexxfjglCwsGyckbvTyP1cBZqpv5oES+VKt2PrOve9Zyax+CYT
-0uQf6cECgYEA09+46QHXLfWh6jiJYu9skC9UrLU5czfCNB6PrUtFcjPFMYjZDcw9
-inJmcuTPXmfplxc47YDfpwotU+szTJDF+R8kknnfw9zVr/sIwZ5wsFfUQl/56Svn
-AIOVvHHvcvMX95XKGiuTsoCIJZNjJN3l3ztu/bRciuiVLyizglwIVrMCgYEAyzvK
-PFlWilbp3GPJlnW7x1bUxe1ziLE/Um+ujZx96+fy34hJLFdNdNzpNUjoOf3IDTGq
-6xl+vXcf12gimWMFcD3qNIGKHBDM9cIB2RDbb6YcqI8lOqopsmOyGmVLPkRpCoUK
-72kacQwvw6M9xjmpiG3dN8lE881jDmZi+hyCnJMCgYEAoIQnQAhP8Jbeo2dP1q+T
-bS0elnX532uH6xqYOW8EXwAPznZiEw0ANspzCWqGHHzXQMusKmtvhcq1CpXvWHt6
-MUHB4GMK/wVosxmZya5yq3bu7ZZu7JOBQCdwosMi6NB5AO7vnaIUFLFB9E3UWBLw
-243YicdCMU8B7yeD0ChPfPcCgYA1dYHKBBn+g8Q6Y8lIGaoOUmnfsok8gJtOfPAm
-ce6xmi7J29iboE9QmTeC+62Sa44u4ky6UNeE0QwAJnVLcb+hebfcneKNZWH0l1bT
-GVsPcFuDfzvkxZP4R782sERtmaMj0EFDHpuE9xatWIhMVyigKX4SSZAorXML+6S3
-c75rnwKBgBR+WU934wS+DbwTLlUB2mJWqJMEbOH/CUwPC7+VN4h1h3/i455iAeiU
-BizLS0SlD+MoSbC7URcZuquqGkmMlnJXoxF+NdxoWZK78tYNftryWoR87TloiVc/
-LhkxZxje4tgW/mTLqH3zKDoyyzDzG6Q6tAUN2ZTjJFEws7qF30Qe
------END RSA PRIVATE KEY-----
diff --git a/test/integration/targets/incidental_setup_tls/files/server_certificate.pem b/test/integration/targets/incidental_setup_tls/files/server_certificate.pem
deleted file mode 100644
index 4a0ebc6e..00000000
--- a/test/integration/targets/incidental_setup_tls/files/server_certificate.pem
+++ /dev/null
@@ -1,20 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDRjCCAi6gAwIBAgIBATANBgkqhkiG9w0BAQsFADAxMSAwHgYDVQQDDBdUTFNH
-ZW5TZWxmU2lnbmVkdFJvb3RDQTENMAsGA1UEBwwEJCQkJDAeFw0xOTAxMTEwODMz
-MTZaFw0yOTAxMDgwODMzMTZaMC0xGjAYBgNVBAMMEWFuc2libGUudGxzLnRlc3Rz
-MQ8wDQYDVQQKDAZzZXJ2ZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
-AQDIwErHwAesRBfd9HiZkmB3VYh28c1QkE9I8nYyHJKX2ZBUhAzK+h80BkcTJJ94
-265qWyACH/wl54Xe/ofFUFrGa4vz0qz4UkL/KI0OGw28Y4qnKdorb9DumbiIPB+9
-I9TJT9vhtXTxBNlBTpv3ONHL8EzdV6ZmuvELU11H27oQ4xoUYhfXPXLMLK0sOnXZ
-lt0BOMMd5fVpJVa8fvXiw3626a0aXCr4e/MWUsBFRnzrXfgoW+AjYoTjKKS2hLYo
-8//MM05h7ROIXrNe990sf9C1G+fOThmOMszK9sjMhu2xHranRcz5aA0UTfyOjTs8
-9WexUYhC5VorYyRWtVZu2mDjAgMBAAGjbTBrMAkGA1UdEwQCMAAwCwYDVR0PBAQD
-AgWgMBMGA1UdJQQMMAoGCCsGAQUFBwMBMDwGA1UdEQQ1MDOCEWFuc2libGUudGxz
-LnRlc3RzghNNYWNCb29rLVByby00LmxvY2Fsgglsb2NhbGhvc3QwDQYJKoZIhvcN
-AQELBQADggEBAFoPBeB6tQhFS1198sia5NDHDDrghDOIlE0QbaoA+MSKzsaIy8Mu
-mNcM2ewYpT600XXTBxcqF6/vuKL9OEbvivtRYQu1YfkifN1jzREoWTieUkR5ytzt
-8ATfFkgTWJmiRiOIb/fNgewvhd+aKxep0OGwDiSKKl1ab6F17Cp4iK8sDBWmnUb6
-0Wf7pfver1Gl0Gp8vRXGUuc8a7udA9a8mV70HJlLkMdMvR9U8Bqih0+iRaqNWXRZ
-7Lc6v5LbzrW/ntilmgU6F0lwxPydg49MY4UrSXcjYLZs9T4iYHwTfLxFjFMIgGwn
-peYMKRj18akP9i2mjj5O2mRu4K+ecuUSOGI=
------END CERTIFICATE-----
diff --git a/test/integration/targets/incidental_setup_tls/files/server_key.pem b/test/integration/targets/incidental_setup_tls/files/server_key.pem
deleted file mode 100644
index c79ab648..00000000
--- a/test/integration/targets/incidental_setup_tls/files/server_key.pem
+++ /dev/null
@@ -1,27 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-MIIEowIBAAKCAQEAyMBKx8AHrEQX3fR4mZJgd1WIdvHNUJBPSPJ2MhySl9mQVIQM
-yvofNAZHEySfeNuualsgAh/8JeeF3v6HxVBaxmuL89Ks+FJC/yiNDhsNvGOKpyna
-K2/Q7pm4iDwfvSPUyU/b4bV08QTZQU6b9zjRy/BM3VemZrrxC1NdR9u6EOMaFGIX
-1z1yzCytLDp12ZbdATjDHeX1aSVWvH714sN+tumtGlwq+HvzFlLARUZ86134KFvg
-I2KE4yiktoS2KPP/zDNOYe0TiF6zXvfdLH/QtRvnzk4ZjjLMyvbIzIbtsR62p0XM
-+WgNFE38jo07PPVnsVGIQuVaK2MkVrVWbtpg4wIDAQABAoIBAHw3wA3pnNXTLJGC
-fD1KfbZZjp9K76gyI10X6lsHow2i6dPiAah3LGecms4VkzfNdxcIW7303Kj3obZh
-+ND277RnR6oPakgdXqdUCDP6OX2gemMFWqIWBkodhDmIOntmeHw4le4LwdiBD42B
-frBy0B5JCsbLPYPDmPNRGh8krvVS+Eir4hb4tK95TPMSL0vEjvHYFbCxv7//Ri1p
-3CROGp2CGX0WZ+Zs0crRNoIhRRM6kLAhROcqejtnEy6o7l5CWpCAL2vxlE9y8/kL
-iRawSZRFZnz/zGnqpx0vswgvijkuPfcNGMSzdwaiDgQz8D0GkJ7s9VgzZJazNy+1
-ET/4YIECgYEA612rwP9Ar9qdYbmmMPaJzITnaIrNGfO2JvaQqZt+DG8sVgdxL7V5
-D6emcw406drKRZvFAxnW6ZW2bVpmit02osl0re2A/nOTXLNuo338Qkap/hG8YZrF
-bw7w75pFa/rwlDtedjBnGHO2KbRXeU5Hn5wLoKjYgJoF6Ht+PPdL0IsCgYEA2lnC
-pQEhM51iRMDqNdmVJyvsTNU1ikoO8HaXHq+LwOQETaKMnDwp4Bn14E815CTulAc/
-tsDTKSDk6umZ+IufG1a2v7CqgKVwkB4HkgxKFQs2gQdTFfoMi5eeHR+njuNtklp1
-9fWfKHsP/ddrg+iTVTRZBLWexgKK89IMHYalpAkCgYEAy0Q3a9NF81mTJ+3kOE8C
-zO1OyLtuzGXsvxOb9c6C+owctyNwPeq05a89EgqH6hr5K0qOx9HOCCcyyJgVDQJl
-CAuByB/gkmAQOTQBbhMFA9vxPanljknTDsnRjKwoHkw2712ig+Hjd3ufK79C+FGB
-i7eBVzva1p2uUowshsxv3mcCgYAOFiRciMofjlO8o8V4W+Undcn02vxtQ4HbOYte
-S2z0sMEmUQpJOghpkMMwCWwsn8VUf3M40w/MY3bhQNjSFA/br6hyjW8yhXnRkl5i
-qbBN0z9c66AMlukgSFPHBTfGHB4Bhxx9Fa+C6Q2LDs6839BBevMTPrRTie509GQb
-s4gUIQKBgAvE8wLcmozno0GLDnBdKRZP/C7tmVnAINuraITPUBTASwI+Qo8ILigQ
-LRLaDqF84BEpjb8vdzkYFQqRQSZ8BI8NydfuKEFSBfL27sBvSGMYQJVm6bryUmPq
-T3ayaeZ4Wb3FFDijgtM9dRKyf7p4hQPOqM44QrntAtb43b2Q5L7M
------END RSA PRIVATE KEY-----
diff --git a/test/integration/targets/incidental_setup_tls/tasks/main.yml b/test/integration/targets/incidental_setup_tls/tasks/main.yml
deleted file mode 100644
index c5b7a23a..00000000
--- a/test/integration/targets/incidental_setup_tls/tasks/main.yml
+++ /dev/null
@@ -1,21 +0,0 @@
----
-# Generated certificate with: https://github.com/michaelklishin/tls-gen
-# ~/tls-gen/basic# make PASSWORD=bunnies CN=ansible.tls.tests
-# verify with: make info
-
-- name: ensure target directory is present
- file:
- path: /tls
- state: directory
-
-- name: ensure TLS files are present
- copy:
- src: "{{ item }}"
- dest: "/tls/{{ item }}"
- loop:
- - ca_certificate.pem
- - ca_key.pem
- - client_certificate.pem
- - client_key.pem
- - server_certificate.pem
- - server_key.pem
diff --git a/test/integration/targets/incidental_win_copy/aliases b/test/integration/targets/incidental_win_copy/aliases
deleted file mode 100644
index a5fc90dc..00000000
--- a/test/integration/targets/incidental_win_copy/aliases
+++ /dev/null
@@ -1,2 +0,0 @@
-shippable/windows/incidental
-windows
diff --git a/test/integration/targets/incidental_win_copy/defaults/main.yml b/test/integration/targets/incidental_win_copy/defaults/main.yml
deleted file mode 100644
index 5d8a1d23..00000000
--- a/test/integration/targets/incidental_win_copy/defaults/main.yml
+++ /dev/null
@@ -1 +0,0 @@
-test_win_copy_path: C:\ansible\win_copy .ÅÑŚÌβŁÈ [$!@^&test(;)]
diff --git a/test/integration/targets/incidental_win_copy/files/foo.txt b/test/integration/targets/incidental_win_copy/files/foo.txt
deleted file mode 100644
index 7c6ded14..00000000
--- a/test/integration/targets/incidental_win_copy/files/foo.txt
+++ /dev/null
@@ -1 +0,0 @@
-foo.txt
diff --git a/test/integration/targets/incidental_win_copy/files/subdir/bar.txt b/test/integration/targets/incidental_win_copy/files/subdir/bar.txt
deleted file mode 100644
index 76018072..00000000
--- a/test/integration/targets/incidental_win_copy/files/subdir/bar.txt
+++ /dev/null
@@ -1 +0,0 @@
-baz
diff --git a/test/integration/targets/incidental_win_copy/files/subdir/subdir2/baz.txt b/test/integration/targets/incidental_win_copy/files/subdir/subdir2/baz.txt
deleted file mode 100644
index 76018072..00000000
--- a/test/integration/targets/incidental_win_copy/files/subdir/subdir2/baz.txt
+++ /dev/null
@@ -1 +0,0 @@
-baz
diff --git a/test/integration/targets/incidental_win_copy/files/subdir/subdir2/subdir3/subdir4/qux.txt b/test/integration/targets/incidental_win_copy/files/subdir/subdir2/subdir3/subdir4/qux.txt
deleted file mode 100644
index 78df5b06..00000000
--- a/test/integration/targets/incidental_win_copy/files/subdir/subdir2/subdir3/subdir4/qux.txt
+++ /dev/null
@@ -1 +0,0 @@
-qux \ No newline at end of file
diff --git a/test/integration/targets/incidental_win_copy/tasks/main.yml b/test/integration/targets/incidental_win_copy/tasks/main.yml
deleted file mode 100644
index b2ee103f..00000000
--- a/test/integration/targets/incidental_win_copy/tasks/main.yml
+++ /dev/null
@@ -1,34 +0,0 @@
----
-- name: create empty folder
- file:
- path: '{{role_path}}/files/subdir/empty'
- state: directory
- delegate_to: localhost
-
-# removes the cached zip module from the previous task so we can replicate
-# the below issue where win_copy would delete DEFAULT_LOCAL_TMP if it
-# had permission to
-# https://github.com/ansible/ansible/issues/35613
-- name: clear the local ansiballz cache
- file:
- path: "{{lookup('config', 'DEFAULT_LOCAL_TMP')}}/ansiballz_cache"
- state: absent
- delegate_to: localhost
-
-- name: create test folder
- win_file:
- path: '{{test_win_copy_path}}'
- state: directory
-
-- block:
- - name: run tests for local to remote
- include_tasks: tests.yml
-
- - name: run tests for remote to remote
- include_tasks: remote_tests.yml
-
- always:
- - name: remove test folder
- win_file:
- path: '{{test_win_copy_path}}'
- state: absent
diff --git a/test/integration/targets/incidental_win_copy/tasks/remote_tests.yml b/test/integration/targets/incidental_win_copy/tasks/remote_tests.yml
deleted file mode 100644
index 5abb5020..00000000
--- a/test/integration/targets/incidental_win_copy/tasks/remote_tests.yml
+++ /dev/null
@@ -1,471 +0,0 @@
----
-- name: fail when source does not exist remote
- win_copy:
- src: fakesource
- dest: fakedest
- remote_src: yes
- register: fail_remote_invalid_source
- failed_when: "'it does not exist' not in fail_remote_invalid_source.msg"
-
-- name: setup source folder for remote tests
- win_copy:
- src: files/
- dest: '{{test_win_copy_path}}\source\'
-
-- name: setup remote failure tests
- win_file:
- path: '{{item.path}}'
- state: '{{item.state}}'
- with_items:
- - { 'path': '{{test_win_copy_path}}\target\folder', 'state': 'directory' }
- - { 'path': '{{test_win_copy_path}}\target\file', 'state': 'touch' }
- - { 'path': '{{test_win_copy_path}}\target\subdir', 'state': 'touch' }
-
-- name: fail source is a file but dest is a folder
- win_copy:
- src: '{{test_win_copy_path}}\source\foo.txt'
- dest: '{{test_win_copy_path}}\target\folder'
- remote_src: yes
- register: fail_remote_file_to_folder
- failed_when: "'dest is already a folder' not in fail_remote_file_to_folder.msg"
-
-- name: fail source is a file but dest is a folder
- win_copy:
- src: '{{test_win_copy_path}}\source\'
- dest: '{{test_win_copy_path}}\target\'
- remote_src: yes
- register: fail_remote_folder_to_file
- failed_when: "'dest is already a file' not in fail_remote_folder_to_file.msg"
-
-- name: fail source is a file dest parent dir is also a file
- win_copy:
- src: '{{test_win_copy_path}}\source\foo.txt'
- dest: '{{test_win_copy_path}}\target\file\foo.txt'
- remote_src: yes
- register: fail_remote_file_parent_dir_file
- failed_when: "'is currently a file' not in fail_remote_file_parent_dir_file.msg"
-
-- name: fail source is a folder dest parent dir is also a file
- win_copy:
- src: '{{test_win_copy_path}}\source\subdir'
- dest: '{{test_win_copy_path}}\target\file'
- remote_src: yes
- register: fail_remote_folder_parent_dir_file
- failed_when: "'object at dest parent dir is not a folder' not in fail_remote_folder_parent_dir_file.msg"
-
-- name: fail to copy a remote file with parent dir that doesn't exist and filename is set
- win_copy:
- src: '{{test_win_copy_path}}\source\foo.txt'
- dest: '{{test_win_copy_path}}\missing-dir\foo.txt'
- remote_src: yes
- register: fail_remote_missing_parent_dir
- failed_when: "'does not exist' not in fail_remote_missing_parent_dir.msg"
-
-- name: remove target after remote failure tests
- win_file:
- path: '{{test_win_copy_path}}\target'
- state: absent
-
-- name: create remote target after cleaning
- win_file:
- path: '{{test_win_copy_path}}\target'
- state: directory
-
-- name: copy single file remote (check mode)
- win_copy:
- src: '{{test_win_copy_path}}\source\foo.txt'
- dest: '{{test_win_copy_path}}\target\foo-target.txt'
- remote_src: yes
- register: remote_copy_file_check
- check_mode: yes
-
-- name: get result of copy single file remote (check mode)
- win_stat:
- path: '{{test_win_copy_path}}\target\foo-target.txt'
- register: remote_copy_file_actual_check
-
-- name: assert copy single file remote (check mode)
- assert:
- that:
- - remote_copy_file_check is changed
- - remote_copy_file_actual_check.stat.exists == False
-
-- name: copy single file remote
- win_copy:
- src: '{{test_win_copy_path}}\source\foo.txt'
- dest: '{{test_win_copy_path}}\target\foo-target.txt'
- remote_src: yes
- register: remote_copy_file
-
-- name: get result of copy single file remote
- win_stat:
- path: '{{test_win_copy_path}}\target\foo-target.txt'
- register: remote_copy_file_actual
-
-- name: assert copy single file remote
- assert:
- that:
- - remote_copy_file is changed
- - remote_copy_file.operation == 'file_copy'
- - remote_copy_file.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
- - remote_copy_file.size == 8
- - remote_copy_file.original_basename == 'foo.txt'
- - remote_copy_file_actual.stat.exists == True
- - remote_copy_file_actual.stat.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
-
-- name: copy single file remote (idempotent)
- win_copy:
- src: '{{test_win_copy_path}}\source\foo.txt'
- dest: '{{test_win_copy_path}}\target\foo-target.txt'
- remote_src: yes
- register: remote_copy_file_again
-
-- name: assert copy single file remote (idempotent)
- assert:
- that:
- - remote_copy_file_again is not changed
-
-- name: copy single file into folder remote (check mode)
- win_copy:
- src: '{{test_win_copy_path}}\source\foo.txt'
- dest: '{{test_win_copy_path}}\target\'
- remote_src: yes
- register: remote_copy_file_to_folder_check
- check_mode: yes
-
-- name: get result of copy single file into folder remote (check mode)
- win_stat:
- path: '{{test_win_copy_path}}\target\foo.txt'
- register: remote_copy_file_to_folder_actual_check
-
-- name: assert copy single file into folder remote (check mode)
- assert:
- that:
- - remote_copy_file_to_folder_check is changed
- - remote_copy_file_to_folder_actual_check.stat.exists == False
-
-- name: copy single file into folder remote
- win_copy:
- src: '{{test_win_copy_path}}\source\foo.txt'
- dest: '{{test_win_copy_path}}\target\'
- remote_src: yes
- register: remote_copy_file_to_folder
-
-- name: get result of copy single file into folder remote
- win_stat:
- path: '{{test_win_copy_path}}\target\foo.txt'
- register: remote_copy_file_to_folder_actual
-
-- name: assert copy single file into folder remote
- assert:
- that:
- - remote_copy_file_to_folder is changed
- - remote_copy_file_to_folder.operation == 'file_copy'
- - remote_copy_file_to_folder.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
- - remote_copy_file_to_folder.size == 8
- - remote_copy_file_to_folder.original_basename == 'foo.txt'
- - remote_copy_file_to_folder_actual.stat.exists == True
- - remote_copy_file_to_folder_actual.stat.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
-
-- name: copy single file into folder remote (idempotent)
- win_copy:
- src: '{{test_win_copy_path}}\source\foo.txt'
- dest: '{{test_win_copy_path}}\target\'
- remote_src: yes
- register: remote_copy_file_to_folder_again
-
-- name: assert copy single file into folder remote
- assert:
- that:
- - remote_copy_file_to_folder_again is not changed
-
-- name: copy single file to missing folder (check mode)
- win_copy:
- src: '{{test_win_copy_path}}\source\foo.txt'
- dest: '{{test_win_copy_path}}\target\missing\'
- remote_src: yes
- register: remote_copy_file_to_missing_folder_check
- check_mode: yes
-
-- name: get result of copy single file to missing folder remote (check mode)
- win_stat:
- path: '{{test_win_copy_path}}\target\missing\foo.txt'
- register: remote_copy_file_to_missing_folder_actual_check
-
-- name: assert copy single file to missing folder remote (check mode)
- assert:
- that:
- - remote_copy_file_to_missing_folder_check is changed
- - remote_copy_file_to_missing_folder_check.operation == 'file_copy'
- - remote_copy_file_to_missing_folder_actual_check.stat.exists == False
-
-- name: copy single file to missing folder remote
- win_copy:
- src: '{{test_win_copy_path}}\source\foo.txt'
- dest: '{{test_win_copy_path}}\target\missing\'
- remote_src: yes
- register: remote_copy_file_to_missing_folder
-
-- name: get result of copy single file to missing folder remote
- win_stat:
- path: '{{test_win_copy_path}}\target\missing\foo.txt'
- register: remote_copy_file_to_missing_folder_actual
-
-- name: assert copy single file to missing folder remote
- assert:
- that:
- - remote_copy_file_to_missing_folder is changed
- - remote_copy_file_to_missing_folder.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
- - remote_copy_file_to_missing_folder.operation == 'file_copy'
- - remote_copy_file_to_missing_folder.size == 8
- - remote_copy_file_to_missing_folder_actual.stat.exists == True
- - remote_copy_file_to_missing_folder_actual.stat.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
-
-- name: clear target for folder to folder test
- win_file:
- path: '{{test_win_copy_path}}\target'
- state: absent
-
-- name: copy folder to folder remote (check mode)
- win_copy:
- src: '{{test_win_copy_path}}\source'
- dest: '{{test_win_copy_path}}\target'
- remote_src: yes
- register: remote_copy_folder_to_folder_check
- check_mode: yes
-
-- name: get result of copy folder to folder remote (check mode)
- win_stat:
- path: '{{test_win_copy_path}}\target'
- register: remote_copy_folder_to_folder_actual_check
-
-- name: assert copy folder to folder remote (check mode)
- assert:
- that:
- - remote_copy_folder_to_folder_check is changed
- - remote_copy_folder_to_folder_check.operation == 'folder_copy'
- - remote_copy_folder_to_folder_actual_check.stat.exists == False
-
-- name: copy folder to folder remote
- win_copy:
- src: '{{test_win_copy_path}}\source'
- dest: '{{test_win_copy_path}}\target'
- remote_src: yes
- register: remote_copy_folder_to_folder
-
-- name: get result of copy folder to folder remote
- win_find:
- paths: '{{test_win_copy_path}}\target'
- recurse: yes
- file_type: directory
- register: remote_copy_folder_to_folder_actual
-
-- name: assert copy folder to folder remote
- assert:
- that:
- - remote_copy_folder_to_folder is changed
- - remote_copy_folder_to_folder.operation == 'folder_copy'
- - remote_copy_folder_to_folder_actual.examined == 11
- - remote_copy_folder_to_folder_actual.matched == 6
- - remote_copy_folder_to_folder_actual.files[0].filename == 'source'
- - remote_copy_folder_to_folder_actual.files[1].filename == 'subdir'
- - remote_copy_folder_to_folder_actual.files[2].filename == 'empty'
- - remote_copy_folder_to_folder_actual.files[3].filename == 'subdir2'
- - remote_copy_folder_to_folder_actual.files[4].filename == 'subdir3'
- - remote_copy_folder_to_folder_actual.files[5].filename == 'subdir4'
-
-- name: copy folder to folder remote (idempotent)
- win_copy:
- src: '{{test_win_copy_path}}\source'
- dest: '{{test_win_copy_path}}\target'
- remote_src: yes
- register: remote_copy_folder_to_folder_again
-
-- name: assert copy folder to folder remote (idempotent)
- assert:
- that:
- - remote_copy_folder_to_folder_again is not changed
-
-- name: change remote file after folder to folder test
- win_copy:
- content: bar.txt
- dest: '{{test_win_copy_path}}\target\source\foo.txt'
-
-- name: remote remote folder after folder to folder test
- win_file:
- path: '{{test_win_copy_path}}\target\source\subdir\subdir2\subdir3\subdir4'
- state: absent
-
-- name: copy folder to folder remote after change
- win_copy:
- src: '{{test_win_copy_path}}\source'
- dest: '{{test_win_copy_path}}\target'
- remote_src: yes
- register: remote_copy_folder_to_folder_after_change
-
-- name: get result of copy folder to folder remote after change
- win_find:
- paths: '{{test_win_copy_path}}\target\source'
- recurse: yes
- patterns: ['foo.txt', 'qux.txt']
- register: remote_copy_folder_to_folder_after_change_actual
-
-- name: assert copy folder after changes
- assert:
- that:
- - remote_copy_folder_to_folder_after_change is changed
- - remote_copy_folder_to_folder_after_change_actual.matched == 2
- - remote_copy_folder_to_folder_after_change_actual.files[0].checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
- - remote_copy_folder_to_folder_after_change_actual.files[1].checksum == 'b54ba7f5621240d403f06815f7246006ef8c7d43'
-
-- name: clear target folder before folder contents to remote test
- win_file:
- path: '{{test_win_copy_path}}\target'
- state: absent
-
-- name: copy folder contents to folder remote with backslash (check mode)
- win_copy:
- src: '{{test_win_copy_path}}\source\'
- dest: '{{test_win_copy_path}}\target'
- remote_src: yes
- register: remote_copy_folder_content_backslash_check
- check_mode: yes
-
-- name: get result of copy folder contents to folder remote with backslash (check mode)
- win_stat:
- path: '{{test_win_copy_path}}\target'
- register: remote_copy_folder_content_backslash_actual_check
-
-- name: assert copy folder content to folder remote with backslash (check mode)
- assert:
- that:
- - remote_copy_folder_content_backslash_check is changed
- - remote_copy_folder_content_backslash_actual_check.stat.exists == False
-
-- name: copy folder contents to folder remote with backslash
- win_copy:
- src: '{{test_win_copy_path}}\source\'
- dest: '{{test_win_copy_path}}\target'
- remote_src: yes
- register: remote_copy_folder_content_backslash
-
-- name: get result of copy folder contents to folder remote with backslash
- win_find:
- paths: '{{test_win_copy_path}}\target'
- recurse: yes
- file_type: directory
- register: remote_copy_folder_content_backslash_actual
-
-- name: assert copy folder content to folder remote with backslash
- assert:
- that:
- - remote_copy_folder_content_backslash is changed
- - remote_copy_folder_content_backslash.operation == 'folder_copy'
- - remote_copy_folder_content_backslash_actual.examined == 10
- - remote_copy_folder_content_backslash_actual.matched == 5
- - remote_copy_folder_content_backslash_actual.files[0].filename == 'subdir'
- - remote_copy_folder_content_backslash_actual.files[1].filename == 'empty'
- - remote_copy_folder_content_backslash_actual.files[2].filename == 'subdir2'
- - remote_copy_folder_content_backslash_actual.files[3].filename == 'subdir3'
- - remote_copy_folder_content_backslash_actual.files[4].filename == 'subdir4'
-
-- name: copy folder contents to folder remote with backslash (idempotent)
- win_copy:
- src: '{{test_win_copy_path}}\source\'
- dest: '{{test_win_copy_path}}\target'
- remote_src: yes
- register: remote_copy_folder_content_backslash_again
-
-- name: assert copy folder content to folder remote with backslash (idempotent)
- assert:
- that:
- - remote_copy_folder_content_backslash_again is not changed
-
-- name: change remote file after folder content to folder test
- win_copy:
- content: bar.txt
- dest: '{{test_win_copy_path}}\target\foo.txt'
-
-- name: remote remote folder after folder content to folder test
- win_file:
- path: '{{test_win_copy_path}}\target\subdir\subdir2\subdir3\subdir4'
- state: absent
-
-- name: copy folder content to folder remote after change
- win_copy:
- src: '{{test_win_copy_path}}/source/'
- dest: '{{test_win_copy_path}}/target/'
- remote_src: yes
- register: remote_copy_folder_content_to_folder_after_change
-
-- name: get result of copy folder content to folder remote after change
- win_find:
- paths: '{{test_win_copy_path}}\target'
- recurse: yes
- patterns: ['foo.txt', 'qux.txt']
- register: remote_copy_folder_content_to_folder_after_change_actual
-
-- name: assert copy folder content to folder after changes
- assert:
- that:
- - remote_copy_folder_content_to_folder_after_change is changed
- - remote_copy_folder_content_to_folder_after_change_actual.matched == 2
- - remote_copy_folder_content_to_folder_after_change_actual.files[0].checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
- - remote_copy_folder_content_to_folder_after_change_actual.files[1].checksum == 'b54ba7f5621240d403f06815f7246006ef8c7d43'
-
-# https://github.com/ansible/ansible/issues/50077
-- name: create empty nested directory
- win_file:
- path: '{{ test_win_copy_path }}\source\empty-nested\nested-dir'
- state: directory
-
-- name: copy empty nested directory (check mode)
- win_copy:
- src: '{{ test_win_copy_path }}\source\empty-nested'
- dest: '{{ test_win_copy_path }}\target'
- remote_src: True
- check_mode: True
- register: copy_empty_dir_check
-
-- name: get result of copy empty nested directory (check mode)
- win_stat:
- path: '{{ test_win_copy_path }}\target\empty-nested'
- register: copy_empty_dir_actual_check
-
-- name: assert copy empty nested directory (check mode)
- assert:
- that:
- - copy_empty_dir_check is changed
- - copy_empty_dir_check.operation == "folder_copy"
- - not copy_empty_dir_actual_check.stat.exists
-
-- name: copy empty nested directory
- win_copy:
- src: '{{ test_win_copy_path }}\source\empty-nested'
- dest: '{{ test_win_copy_path }}\target'
- remote_src: True
- register: copy_empty_dir
-
-- name: get result of copy empty nested directory
- win_stat:
- path: '{{ test_win_copy_path }}\target\empty-nested\nested-dir'
- register: copy_empty_dir_actual
-
-- name: assert copy empty nested directory
- assert:
- that:
- - copy_empty_dir is changed
- - copy_empty_dir.operation == "folder_copy"
- - copy_empty_dir_actual.stat.exists
-
-- name: copy empty nested directory (idempotent)
- win_copy:
- src: '{{ test_win_copy_path }}\source\empty-nested'
- dest: '{{ test_win_copy_path }}\target'
- remote_src: True
- register: copy_empty_dir_again
-
-- name: assert copy empty nested directory (idempotent)
- assert:
- that:
- - not copy_empty_dir_again is changed
diff --git a/test/integration/targets/incidental_win_copy/tasks/tests.yml b/test/integration/targets/incidental_win_copy/tasks/tests.yml
deleted file mode 100644
index d15e71f6..00000000
--- a/test/integration/targets/incidental_win_copy/tasks/tests.yml
+++ /dev/null
@@ -1,535 +0,0 @@
----
-- name: fail no source or content
- win_copy:
- dest: dest
- register: fail_no_source_content
- failed_when: fail_no_source_content.msg != 'src (or content) and dest are required'
-
-- name: fail content but dest isn't a file, unix ending
- win_copy:
- content: a
- dest: a/
- register: fail_dest_not_file_unix
- failed_when: fail_dest_not_file_unix.msg != 'dest must be a file if content is defined'
-
-- name: fail content but dest isn't a file, windows ending
- win_copy:
- content: a
- dest: a\
- register: fail_dest_not_file_windows
- failed_when: fail_dest_not_file_windows.msg != 'dest must be a file if content is defined'
-
-- name: fail to copy a file with parent dir that doesn't exist and filename is set
- win_copy:
- src: foo.txt
- dest: '{{test_win_copy_path}}\missing-dir\foo.txt'
- register: fail_missing_parent_dir
- failed_when: "'does not exist' not in fail_missing_parent_dir.msg"
-
-- name: fail to copy an encrypted file without the password set
- win_copy:
- src: '{{role_path}}/files-different/vault/vault-file'
- dest: '{{test_win_copy_path}}\file'
- register: fail_copy_encrypted_file
- ignore_errors: yes # weird failed_when doesn't work in this case
-
-- name: assert failure message when copying an encrypted file without the password set
- assert:
- that:
- - fail_copy_encrypted_file is failed
- - fail_copy_encrypted_file.msg == 'A vault password or secret must be specified to decrypt {{role_path}}/files-different/vault/vault-file'
-
-- name: fail to copy a directory with an encrypted file without the password
- win_copy:
- src: '{{role_path}}/files-different/vault'
- dest: '{{test_win_copy_path}}'
- register: fail_copy_directory_with_enc_file
- ignore_errors: yes
-
-- name: assert failure message when copying a directory that contains an encrypted file without the password set
- assert:
- that:
- - fail_copy_directory_with_enc_file is failed
- - fail_copy_directory_with_enc_file.msg == 'A vault password or secret must be specified to decrypt {{role_path}}/files-different/vault/vault-file'
-
-- name: copy with content (check mode)
- win_copy:
- content: a
- dest: '{{test_win_copy_path}}\file'
- register: copy_content_check
- check_mode: yes
-
-- name: get result of copy with content (check mode)
- win_stat:
- path: '{{test_win_copy_path}}\file'
- register: copy_content_actual_check
-
-- name: assert copy with content (check mode)
- assert:
- that:
- - copy_content_check is changed
- - copy_content_check.checksum == '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8'
- - copy_content_check.operation == 'file_copy'
- - copy_content_check.size == 1
- - copy_content_actual_check.stat.exists == False
-
-- name: copy with content
- win_copy:
- content: a
- dest: '{{test_win_copy_path}}\file'
- register: copy_content
-
-- name: get result of copy with content
- win_stat:
- path: '{{test_win_copy_path}}\file'
- register: copy_content_actual
-
-- name: assert copy with content
- assert:
- that:
- - copy_content is changed
- - copy_content.checksum == '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8'
- - copy_content.operation == 'file_copy'
- - copy_content.size == 1
- - copy_content_actual.stat.exists == True
- - copy_content_actual.stat.checksum == '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8'
-
-- name: copy with content (idempotent)
- win_copy:
- content: a
- dest: '{{test_win_copy_path}}\file'
- register: copy_content_again
-
-- name: assert copy with content (idempotent)
- assert:
- that:
- - copy_content_again is not changed
-
-- name: copy with content change when missing
- win_copy:
- content: b
- dest: '{{test_win_copy_path}}\file'
- force: no
- register: copy_content_when_missing
-
-- name: assert copy with content change when missing
- assert:
- that:
- - copy_content_when_missing is not changed
-
-- name: copy single file (check mode)
- win_copy:
- src: foo.txt
- dest: '{{test_win_copy_path}}\foo-target.txt'
- register: copy_file_check
- check_mode: yes
-
-- name: get result of copy single file (check mode)
- win_stat:
- path: '{{test_win_copy_path}}\foo-target.txt'
- register: copy_file_actual_check
-
-- name: assert copy single file (check mode)
- assert:
- that:
- - copy_file_check is changed
- - copy_file_check.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
- - copy_file_check.dest == test_win_copy_path + '\\foo-target.txt'
- - copy_file_check.operation == 'file_copy'
- - copy_file_check.size == 8
- - copy_file_actual_check.stat.exists == False
-
-- name: copy single file
- win_copy:
- src: foo.txt
- dest: '{{test_win_copy_path}}\foo-target.txt'
- register: copy_file
-
-- name: get result of copy single file
- win_stat:
- path: '{{test_win_copy_path}}\foo-target.txt'
- register: copy_file_actual
-
-- name: assert copy single file
- assert:
- that:
- - copy_file is changed
- - copy_file.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
- - copy_file.dest == test_win_copy_path + '\\foo-target.txt'
- - copy_file.operation == 'file_copy'
- - copy_file.size == 8
- - copy_file_actual.stat.exists == True
- - copy_file_actual.stat.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
-
-- name: copy single file (idempotent)
- win_copy:
- src: foo.txt
- dest: '{{test_win_copy_path}}\foo-target.txt'
- register: copy_file_again
-
-- name: assert copy single file (idempotent)
- assert:
- that:
- - copy_file_again is not changed
-
-- name: copy single file (backup)
- win_copy:
- content: "{{ lookup('file', 'foo.txt') }}\nfoo bar"
- dest: '{{test_win_copy_path}}\foo-target.txt'
- backup: yes
- register: copy_file_backup
-
-- name: check backup_file
- win_stat:
- path: '{{ copy_file_backup.backup_file }}'
- register: backup_file
-
-- name: assert copy single file (backup)
- assert:
- that:
- - copy_file_backup is changed
- - backup_file.stat.exists == true
-
-- name: copy single file to folder (check mode)
- win_copy:
- src: foo.txt
- dest: '{{test_win_copy_path}}\'
- register: copy_file_to_folder_check
- check_mode: yes
-
-- name: get result of copy single file to folder (check mode)
- win_stat:
- path: '{{test_win_copy_path}}\foo.txt'
- register: copy_file_to_folder_actual_check
-
-- name: assert copy single file to folder (check mode)
- assert:
- that:
- - copy_file_to_folder_check is changed
- - copy_file_to_folder_check.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
- - copy_file_to_folder_check.dest == test_win_copy_path + '\\foo.txt'
- - copy_file_to_folder_check.operation == 'file_copy'
- - copy_file_to_folder_check.size == 8
- - copy_file_to_folder_actual_check.stat.exists == False
-
-- name: copy single file to folder
- win_copy:
- src: foo.txt
- dest: '{{test_win_copy_path}}\'
- register: copy_file_to_folder
-
-- name: get result of copy single file to folder
- win_stat:
- path: '{{test_win_copy_path}}\foo.txt'
- register: copy_file_to_folder_actual
-
-- name: assert copy single file to folder
- assert:
- that:
- - copy_file_to_folder is changed
- - copy_file_to_folder.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
- - copy_file_to_folder.dest == test_win_copy_path + '\\foo.txt'
- - copy_file_to_folder.operation == 'file_copy'
- - copy_file_to_folder.size == 8
- - copy_file_to_folder_actual.stat.exists == True
- - copy_file_to_folder_actual.stat.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
-
-- name: copy single file to folder (idempotent)
- win_copy:
- src: foo.txt
- dest: '{{test_win_copy_path}}\'
- register: copy_file_to_folder_again
-
-- name: assert copy single file to folder (idempotent)
- assert:
- that:
- - copy_file_to_folder_again is not changed
-
-- name: copy single file to missing folder (check mode)
- win_copy:
- src: foo.txt
- dest: '{{test_win_copy_path}}\missing\'
- register: copy_file_to_missing_folder_check
- check_mode: yes
-
-- name: get result of copy single file to missing folder (check mode)
- win_stat:
- path: '{{test_win_copy_path}}\missing\foo.txt'
- register: copy_file_to_missing_folder_actual_check
-
-- name: assert copy single file to missing folder (check mode)
- assert:
- that:
- - copy_file_to_missing_folder_check is changed
- - copy_file_to_missing_folder_check.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
- - copy_file_to_missing_folder_check.operation == 'file_copy'
- - copy_file_to_missing_folder_check.size == 8
- - copy_file_to_missing_folder_actual_check.stat.exists == False
-
-- name: copy single file to missing folder
- win_copy:
- src: foo.txt
- dest: '{{test_win_copy_path}}\missing\'
- register: copy_file_to_missing_folder
-
-- name: get result of copy single file to missing folder
- win_stat:
- path: '{{test_win_copy_path}}\missing\foo.txt'
- register: copy_file_to_missing_folder_actual
-
-- name: assert copy single file to missing folder
- assert:
- that:
- - copy_file_to_missing_folder is changed
- - copy_file_to_missing_folder.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
- - copy_file_to_missing_folder.operation == 'file_copy'
- - copy_file_to_missing_folder.size == 8
- - copy_file_to_missing_folder_actual.stat.exists == True
- - copy_file_to_missing_folder_actual.stat.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
-
-- name: copy folder (check mode)
- win_copy:
- src: files
- dest: '{{test_win_copy_path}}\recursive\folder'
- register: copy_folder_check
- check_mode: yes
-
-- name: get result of copy folder (check mode)
- win_stat:
- path: '{{test_win_copy_path}}\recursive\folder'
- register: copy_folder_actual_check
-
-- name: assert copy folder (check mode)
- assert:
- that:
- - copy_folder_check is changed
- - copy_folder_check.operation == 'folder_copy'
- - copy_folder_actual_check.stat.exists == False
-
-- name: copy folder
- win_copy:
- src: files
- dest: '{{test_win_copy_path}}\recursive\folder'
- register: copy_folder
-
-- name: get result of copy folder
- win_find:
- paths: '{{test_win_copy_path}}\recursive\folder'
- recurse: yes
- file_type: directory
- register: copy_folder_actual
-
-- name: assert copy folder
- assert:
- that:
- - copy_folder is changed
- - copy_folder.operation == 'folder_copy'
- - copy_folder_actual.examined == 11 # includes files and folders, the below is the nested order
- - copy_folder_actual.matched == 6
- - copy_folder_actual.files[0].filename == 'files'
- - copy_folder_actual.files[1].filename == 'subdir'
- - copy_folder_actual.files[2].filename == 'empty'
- - copy_folder_actual.files[3].filename == 'subdir2'
- - copy_folder_actual.files[4].filename == 'subdir3'
- - copy_folder_actual.files[5].filename == 'subdir4'
-
-- name: copy folder (idempotent)
- win_copy:
- src: files
- dest: '{{test_win_copy_path}}\recursive\folder'
- register: copy_folder_again
-
-- name: assert copy folder (idempotent)
- assert:
- that:
- - copy_folder_again is not changed
-
-- name: change the text of a file in the remote source
- win_copy:
- content: bar.txt
- dest: '{{test_win_copy_path}}\recursive\folder\files\foo.txt'
-
-- name: remove folder for test of recursive copy
- win_file:
- path: '{{test_win_copy_path}}\recursive\folder\files\subdir\subdir2\subdir3\subdir4'
- state: absent
-
-- name: copy folder after changes
- win_copy:
- src: files
- dest: '{{test_win_copy_path}}\recursive\folder'
- register: copy_folder_after_change
-
-- name: get result of copy folder after changes
- win_find:
- paths: '{{test_win_copy_path}}\recursive\folder\files'
- recurse: yes
- patterns: ['foo.txt', 'qux.txt']
- register: copy_folder_after_changes_actual
-
-- name: assert copy folder after changes
- assert:
- that:
- - copy_folder_after_change is changed
- - copy_folder_after_changes_actual.matched == 2
- - copy_folder_after_changes_actual.files[0].checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'
- - copy_folder_after_changes_actual.files[1].checksum == 'b54ba7f5621240d403f06815f7246006ef8c7d43'
-
-- name: copy folder's contents (check mode)
- win_copy:
- src: files/
- dest: '{{test_win_copy_path}}\recursive-contents\'
- register: copy_folder_contents_check
- check_mode: yes
-
-- name: get result of copy folder'scontents (check mode)
- win_stat:
- path: '{{test_win_copy_path}}\recursive-contents'
- register: copy_folder_contents_actual_check
-
-- name: assert copy folder's contents (check mode)
- assert:
- that:
- - copy_folder_contents_check is changed
- - copy_folder_contents_check.operation == 'folder_copy'
- - copy_folder_contents_actual_check.stat.exists == False
-
-- name: copy folder's contents
- win_copy:
- src: files/
- dest: '{{test_win_copy_path}}\recursive-contents\'
- register: copy_folder_contents
-
-- name: get result of copy folder
- win_find:
- paths: '{{test_win_copy_path}}\recursive-contents'
- recurse: yes
- file_type: directory
- register: copy_folder_contents_actual
-
-- name: assert copy folder
- assert:
- that:
- - copy_folder_contents is changed
- - copy_folder_contents.operation == 'folder_copy'
- - copy_folder_contents_actual.examined == 10 # includes files and folders, the below is the nested order
- - copy_folder_contents_actual.matched == 5
- - copy_folder_contents_actual.files[0].filename == 'subdir'
- - copy_folder_contents_actual.files[1].filename == 'empty'
- - copy_folder_contents_actual.files[2].filename == 'subdir2'
- - copy_folder_contents_actual.files[3].filename == 'subdir3'
- - copy_folder_contents_actual.files[4].filename == 'subdir4'
-
-- name: fail to copy file to a folder
- win_copy:
- src: foo.txt
- dest: '{{test_win_copy_path}}\recursive-contents'
- register: fail_file_to_folder
- failed_when: "'object at path is already a directory' not in fail_file_to_folder.msg"
-
-- name: fail to copy folder to a file
- win_copy:
- src: subdir/
- dest: '{{test_win_copy_path}}\recursive-contents\foo.txt'
- register: fail_folder_to_file
- failed_when: "'object at parent directory path is already a file' not in fail_folder_to_file.msg"
-
-# https://github.com/ansible/ansible/issues/31336
-- name: create file with colon in the name
- copy:
- dest: '{{role_path}}/files-different/colon:file'
- content: test
- delegate_to: localhost
-
-- name: copy a file with colon as a source
- win_copy:
- src: '{{role_path}}/files-different/colon:file'
- dest: '{{test_win_copy_path}}\colon.file'
- register: copy_file_with_colon
-
-- name: get result of file with colon as a source
- win_stat:
- path: '{{test_win_copy_path}}\colon.file'
- register: copy_file_with_colon_result
-
-- name: assert results of copy a file with colon as a source
- assert:
- that:
- - copy_file_with_colon is changed
- - copy_file_with_colon_result.stat.exists == True
- - copy_file_with_colon_result.stat.checksum == "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3"
-
-- name: remove file with colon in the name
- file:
- path: '{{role_path}}/files-different/colon:file'
- state: absent
- delegate_to: localhost
-
-- name: copy an encrypted file without decrypting
- win_copy:
- src: '{{role_path}}/files-different/vault/vault-file'
- dest: '{{test_win_copy_path}}\vault-file'
- decrypt: no
- register: copy_encrypted_file
-
-- name: get stat of copied encrypted file without decrypting
- win_stat:
- path: '{{test_win_copy_path}}\vault-file'
- register: copy_encrypted_file_result
-
-- name: assert result of copy an encrypted file without decrypting
- assert:
- that:
- - copy_encrypted_file is changed
- - copy_encrypted_file_result.stat.checksum == "74a89620002d253f38834ee5b06cddd28956a43d"
-
-- name: copy an encrypted file without decrypting (idempotent)
- win_copy:
- src: '{{role_path}}/files-different/vault/vault-file'
- dest: '{{test_win_copy_path}}\vault-file'
- decrypt: no
- register: copy_encrypted_file_again
-
-- name: assert result of copy an encrypted file without decrypting (idempotent)
- assert:
- that:
- - copy_encrypted_file_again is not changed
-
-- name: copy folder with encrypted files without decrypting
- win_copy:
- src: '{{role_path}}/files-different/vault/'
- dest: '{{test_win_copy_path}}\encrypted-test'
- decrypt: no
- register: copy_encrypted_file
-
-- name: get result of copy folder with encrypted files without decrypting
- win_find:
- paths: '{{test_win_copy_path}}\encrypted-test'
- recurse: yes
- patterns: '*vault*'
- register: copy_encrypted_file_result
-
-- name: assert result of copy folder with encrypted files without decrypting
- assert:
- that:
- - copy_encrypted_file is changed
- - copy_encrypted_file_result.files|count == 2
- - copy_encrypted_file_result.files[0].checksum == "834563c94127730ecfa42dfc1e1821bbda2e51da"
- - copy_encrypted_file_result.files[1].checksum == "74a89620002d253f38834ee5b06cddd28956a43d"
-
-- name: copy folder with encrypted files without decrypting (idempotent)
- win_copy:
- src: '{{role_path}}/files-different/vault/'
- dest: '{{test_win_copy_path}}\encrypted-test'
- decrypt: no
- register: copy_encrypted_file_again
-
-- name: assert result of copy folder with encrypted files without decrypting (idempotent)
- assert:
- that:
- - copy_encrypted_file_again is not changed
-
-- name: remove test folder after local to remote tests
- win_file:
- path: '{{test_win_copy_path}}'
- state: absent
diff --git a/test/integration/targets/incidental_win_data_deduplication/aliases b/test/integration/targets/incidental_win_data_deduplication/aliases
deleted file mode 100644
index c7657537..00000000
--- a/test/integration/targets/incidental_win_data_deduplication/aliases
+++ /dev/null
@@ -1,5 +0,0 @@
-shippable/windows/incidental
-windows
-skip/windows/2008
-skip/windows/2008-R2
-skip/windows/2012
diff --git a/test/integration/targets/incidental_win_data_deduplication/meta/main.yml b/test/integration/targets/incidental_win_data_deduplication/meta/main.yml
deleted file mode 100644
index 9f37e96c..00000000
--- a/test/integration/targets/incidental_win_data_deduplication/meta/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-dependencies:
-- setup_remote_tmp_dir
diff --git a/test/integration/targets/incidental_win_data_deduplication/tasks/main.yml b/test/integration/targets/incidental_win_data_deduplication/tasks/main.yml
deleted file mode 100644
index 83c7197c..00000000
--- a/test/integration/targets/incidental_win_data_deduplication/tasks/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-- import_tasks: pre_test.yml
diff --git a/test/integration/targets/incidental_win_data_deduplication/tasks/pre_test.yml b/test/integration/targets/incidental_win_data_deduplication/tasks/pre_test.yml
deleted file mode 100644
index 0d1c3d50..00000000
--- a/test/integration/targets/incidental_win_data_deduplication/tasks/pre_test.yml
+++ /dev/null
@@ -1,40 +0,0 @@
----
-- set_fact:
- AnsibleVhdx: '{{ remote_tmp_dir }}\AnsiblePart.vhdx'
-
-- name: Install FS-Data-Deduplication
- win_feature:
- name: FS-Data-Deduplication
- include_sub_features: true
- state: present
- register: data_dedup_feat_reg
-
-- name: Reboot windows after the feature has been installed
- win_reboot:
- reboot_timeout: 3600
- when:
- - data_dedup_feat_reg.success
- - data_dedup_feat_reg.reboot_required
-
-- name: Copy VHDX scripts
- win_template:
- src: "{{ item.src }}"
- dest: '{{ remote_tmp_dir }}\{{ item.dest }}'
- loop:
- - { src: partition_creation_script.j2, dest: partition_creation_script.txt }
- - { src: partition_deletion_script.j2, dest: partition_deletion_script.txt }
-
-- name: Create partition
- win_command: diskpart.exe /s {{ remote_tmp_dir }}\partition_creation_script.txt
-
-- name: Format T with NTFS
- win_format:
- drive_letter: T
- file_system: ntfs
-
-- name: Run tests
- block:
- - import_tasks: tests.yml
- always:
- - name: Detach disk
- win_command: diskpart.exe /s {{ remote_tmp_dir }}\partition_deletion_script.txt
diff --git a/test/integration/targets/incidental_win_data_deduplication/tasks/tests.yml b/test/integration/targets/incidental_win_data_deduplication/tasks/tests.yml
deleted file mode 100644
index 64a42927..00000000
--- a/test/integration/targets/incidental_win_data_deduplication/tasks/tests.yml
+++ /dev/null
@@ -1,47 +0,0 @@
----
-
-- name: Enable Data Deduplication on the T drive - check mode
- win_data_deduplication:
- drive_letter: "T"
- state: present
- settings:
- no_compress: true
- minimum_file_age_days: 2
- minimum_file_size: 0
- check_mode: yes
- register: win_data_deduplication_enable_check_mode
-
-- name: Check that it was successful with a change - check mode
- assert:
- that:
- - win_data_deduplication_enable_check_mode is changed
-
-- name: Enable Data Deduplication on the T drive
- win_data_deduplication:
- drive_letter: "T"
- state: present
- settings:
- no_compress: true
- minimum_file_age_days: 2
- minimum_file_size: 0
- register: win_data_deduplication_enable
-
-- name: Check that it was successful with a change
- assert:
- that:
- - win_data_deduplication_enable is changed
-
-- name: Enable Data Deduplication on the T drive
- win_data_deduplication:
- drive_letter: "T"
- state: present
- settings:
- no_compress: true
- minimum_file_age_days: 2
- minimum_file_size: 0
- register: win_data_deduplication_enable_again
-
-- name: Check that it was successful without a change
- assert:
- that:
- - win_data_deduplication_enable_again is not changed
diff --git a/test/integration/targets/incidental_win_data_deduplication/templates/partition_creation_script.j2 b/test/integration/targets/incidental_win_data_deduplication/templates/partition_creation_script.j2
deleted file mode 100644
index 8e47fda9..00000000
--- a/test/integration/targets/incidental_win_data_deduplication/templates/partition_creation_script.j2
+++ /dev/null
@@ -1,11 +0,0 @@
-create vdisk file="{{ AnsibleVhdx }}" maximum=2000 type=fixed
-
-select vdisk file="{{ AnsibleVhdx }}"
-
-attach vdisk
-
-convert mbr
-
-create partition primary
-
-assign letter="T"
diff --git a/test/integration/targets/incidental_win_data_deduplication/templates/partition_deletion_script.j2 b/test/integration/targets/incidental_win_data_deduplication/templates/partition_deletion_script.j2
deleted file mode 100644
index c2be9cd1..00000000
--- a/test/integration/targets/incidental_win_data_deduplication/templates/partition_deletion_script.j2
+++ /dev/null
@@ -1,3 +0,0 @@
-select vdisk file="{{ AnsibleVhdx }}"
-
-detach vdisk
diff --git a/test/integration/targets/incidental_win_dsc/aliases b/test/integration/targets/incidental_win_dsc/aliases
deleted file mode 100644
index 9114c742..00000000
--- a/test/integration/targets/incidental_win_dsc/aliases
+++ /dev/null
@@ -1,6 +0,0 @@
-shippable/windows/incidental
-windows
-skip/windows/2008
-skip/windows/2008-R2
-skip/windows/2012
-skip/windows/2012-R2
diff --git a/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.psm1 b/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.psm1
deleted file mode 100644
index dbf1ecf3..00000000
--- a/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.psm1
+++ /dev/null
@@ -1,41 +0,0 @@
-#Requires -Version 5.0 -Modules CimCmdlets
-
-Function Get-TargetResource
-{
- [CmdletBinding()]
- [OutputType([Hashtable])]
- param(
- [Parameter(Mandatory=$true)]
- [ValidateNotNullOrEmpty()]
- [String]$KeyParam
- )
- return @{Value = [bool]$global:DSCMachineStatus}
-}
-
-Function Set-TargetResource
-{
- [CmdletBinding()]
- param (
- [Parameter(Mandatory=$true)]
- [ValidateNotNullOrEmpty()]
- [String]$KeyParam,
- [Bool]$Value = $true
- )
- $global:DSCMachineStatus = [int]$Value
-}
-
-Function Test-TargetResource
-{
- [CmdletBinding()]
- [OutputType([Boolean])]
- param (
- [Parameter(Mandatory=$true)]
- [ValidateNotNullOrEmpty()]
- [String]$KeyParam,
- [Bool]$Value = $true
- )
- $false
-}
-
-Export-ModuleMember -Function *-TargetResource
-
diff --git a/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.schema.mof b/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.schema.mof
deleted file mode 100644
index 288b8877..00000000
--- a/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.schema.mof
+++ /dev/null
@@ -1,7 +0,0 @@
-[ClassVersion("1.0.0"), FriendlyName("xSetReboot")]
-class ANSIBLE_xSetReboot : OMI_BaseResource
-{
- [Key] String KeyParam;
- [Write] Boolean Value;
-};
-
diff --git a/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 b/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1
deleted file mode 100644
index 79f64969..00000000
--- a/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1
+++ /dev/null
@@ -1,214 +0,0 @@
-#Requires -Version 5.0 -Modules CimCmdlets
-
-Function ConvertFrom-CimInstance {
- param(
- [Parameter(Mandatory=$true)][CimInstance]$Instance
- )
- $hashtable = @{
- _cim_instance = $Instance.CimSystemProperties.ClassName
- }
- foreach ($prop in $Instance.CimInstanceProperties) {
- $hashtable."$($prop.Name)" = ConvertTo-OutputValue -Value $prop.Value
- }
- return $hashtable
-}
-
-Function ConvertTo-OutputValue {
- param($Value)
-
- if ($Value -is [DateTime[]]) {
- $Value = $Value | ForEach-Object { $_.ToString("o") }
- } elseif ($Value -is [DateTime]) {
- $Value = $Value.ToString("o")
- } elseif ($Value -is [Double]) {
- $Value = $Value.ToString() # To avoid Python 2 double parsing issues on test validation
- } elseif ($Value -is [Double[]]) {
- $Value = $Value | ForEach-Object { $_.ToString() }
- } elseif ($Value -is [PSCredential]) {
- $password = $null
- $password_ptr = [System.Runtime.InteropServices.Marshal]::SecureStringToGlobalAllocUnicode($Value.Password)
- try {
- $password = [System.Runtime.InteropServices.Marshal]::PtrToStringUni($password_ptr)
- } finally {
- [System.Runtime.InteropServices.Marshal]::ZeroFreeGlobalAllocUnicode($password_ptr)
- }
- $Value = @{
- username = $Value.Username
- password = $password
- }
- } elseif ($Value -is [CimInstance[]]) {
- $value_list = [System.Collections.Generic.List`1[Hashtable]]@()
- foreach ($cim_instance in $Value) {
- $value_list.Add((ConvertFrom-CimInstance -Instance $cim_instance))
- }
- $Value = $value_list.ToArray()
- } elseif ($Value -is [CimInstance]) {
- $Value = ConvertFrom-CimInstance -Instance $Value
- }
-
- return ,$Value
-}
-
-Function Get-TargetResource
-{
- [CmdletBinding()]
- [OutputType([Hashtable])]
- param(
- [Parameter(Mandatory = $true)]
- [ValidateSet("Present", "Absent")]
- [String] $Ensure = "Present",
-
- [Parameter(Mandatory = $true)]
- [ValidateNotNullOrEmpty()]
- [String] $Path
- )
- return @{
- Ensure = $Ensure
- Path = $Path
- }
-}
-
-Function Set-TargetResource
-{
- [CmdletBinding()]
- param
- (
- [Parameter(Mandatory = $true)]
- [ValidateSet("Present", "Absent")]
- [String] $Ensure = "Present",
-
- [Parameter(Mandatory = $true)]
- [ValidateNotNullOrEmpty()]
- [String] $Path,
-
- [String] $DefaultParam = "Default",
- [String] $StringParam,
- [String[]] $StringArrayParam,
- [SByte] $Int8Param,
- [SByte[]] $Int8ArrayParam,
- [Byte] $UInt8Param,
- [Byte[]] $UInt8ArrayParam,
- [Int16] $Int16Param,
- [Int16[]] $Int16ArrayParam,
- [UInt16] $UInt16Param,
- [UInt16[]] $UInt16ArrayParam,
- [Int32] $Int32Param,
- [Int32[]] $Int32ArrayParam,
- [UInt32] $UInt32Param,
- [UInt32[]] $UInt32ArrayParam,
- [Int64] $Int64Param,
- [Int64[]] $Int64ArrayParam,
- [UInt64] $UInt64Param,
- [UInt64[]] $UInt64ArrayParam,
- [Bool] $BooleanParam,
- [Bool[]] $BooleanArrayParam,
- [Char] $CharParam,
- [Char[]] $CharArrayParam,
- [Single] $SingleParam,
- [Single[]] $SingleArrayParam,
- [Double] $DoubleParam,
- [Double[]] $DoubleArrayParam,
- [DateTime] $DateTimeParam,
- [DateTime[]] $DateTimeArrayParam,
- [PSCredential] $PSCredentialParam,
- [CimInstance[]] $HashtableParam,
- [CimInstance] $CimInstanceParam,
- [CimInstance[]] $CimInstanceArrayParam,
- [CimInstance] $NestedCimInstanceParam,
- [CimInstance[]] $NestedCimInstanceArrayParam
- )
-
- $info = @{
- Version = "1.0.0"
- Ensure = @{
- Type = $Ensure.GetType().FullName
- Value = $Ensure
- }
- Path = @{
- Type = $Path.GetType().FullName
- Value = $Path
- }
- DefaultParam = @{
- Type = $DefaultParam.GetType().FullName
- Value = $DefaultParam
- }
- }
-
- foreach ($kvp in $PSCmdlet.MyInvocation.BoundParameters.GetEnumerator()) {
- $info."$($kvp.Key)" = @{
- Type = $kvp.Value.GetType().FullName
- Value = (ConvertTo-OutputValue -Value $kvp.Value)
- }
- }
-
- if (Test-Path -Path $Path) {
- Remove-Item -Path $Path -Force > $null
- }
- New-Item -Path $Path -ItemType File > $null
- Set-Content -Path $Path -Value (ConvertTo-Json -InputObject $info -Depth 10) > $null
- Write-Verbose -Message "set verbose"
- Write-Warning -Message "set warning"
-}
-
-Function Test-TargetResource
-{
- [CmdletBinding()]
- [OutputType([Boolean])]
- param
- (
- [Parameter(Mandatory = $true)]
- [ValidateSet("Present", "Absent")]
- [String] $Ensure = "Present",
-
- [Parameter(Mandatory = $true)]
- [ValidateNotNullOrEmpty()]
- [String] $Path,
-
- [String] $DefaultParam = "Default",
- [String] $StringParam,
- [String[]] $StringArrayParam,
- [SByte] $Int8Param,
- [SByte[]] $Int8ArrayParam,
- [Byte] $UInt8Param,
- [Byte[]] $UInt8ArrayParam,
- [Int16] $Int16Param,
- [Int16[]] $Int16ArrayParam,
- [UInt16] $UInt16Param,
- [UInt16[]] $UInt16ArrayParam,
- [Int32] $Int32Param,
- [Int32[]] $Int32ArrayParam,
- [UInt32] $UInt32Param,
- [UInt32[]] $UInt32ArrayParam,
- [Int64] $Int64Param,
- [Int64[]] $Int64ArrayParam,
- [UInt64] $UInt64Param,
- [UInt64[]] $UInt64ArrayParam,
- [Bool] $BooleanParam,
- [Bool[]] $BooleanArrayParam,
- [Char] $CharParam,
- [Char[]] $CharArrayParam,
- [Single] $SingleParam,
- [Single[]] $SingleArrayParam,
- [Double] $DoubleParam,
- [Double[]] $DoubleArrayParam,
- [DateTime] $DateTimeParam,
- [DateTime[]] $DateTimeArrayParam,
- [PSCredential] $PSCredentialParam,
- [CimInstance[]] $HashtableParam,
- [CimInstance] $CimInstanceParam,
- [CimInstance[]] $CimInstanceArrayParam,
- [CimInstance] $NestedCimInstanceParam,
- [CimInstance[]] $NestedCimInstanceArrayParam
- )
- Write-Verbose -Message "test verbose"
- Write-Warning -Message "test warning"
- $exists = Test-Path -LiteralPath $Path -PathType Leaf
- if ($Ensure -eq "Present") {
- $exists
- } else {
- -not $exists
- }
-}
-
-Export-ModuleMember -Function *-TargetResource
-
diff --git a/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.schema.mof b/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.schema.mof
deleted file mode 100644
index c61b2b1e..00000000
--- a/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.schema.mof
+++ /dev/null
@@ -1,60 +0,0 @@
-[ClassVersion("1.0.0")]
-class ANSIBLE_xTestClass
-{
- [Key] String Key;
- [Write] String StringValue;
- [Write] SInt32 IntValue;
- [Write] String StringArrayValue[];
-};
-
-[ClassVersion("1.0.0")]
-class ANSIBLE_xNestedClass
-{
- [Key] String KeyValue;
- [Write, EmbeddedInstance("ANSIBLE_xTestClass")] String CimValue;
- [Write, EmbeddedInstance("MSFT_KeyValuePair")] String HashValue[];
- [Write] SInt16 IntValue;
-};
-
-[ClassVersion("1.0.0"), FriendlyName("xTestResource")]
-class ANSIBLE_xTestResource : OMI_BaseResource
-{
- [Key] String Path;
- [Required, ValueMap{"Present", "Absent"}, Values{"Present", "Absent"}] String Ensure;
- [Read] String ReadParam;
- [Write] String DefaultParam;
- [Write] String StringParam;
- [Write] String StringArrayParam[];
- [Write] SInt8 Int8Param;
- [Write] SInt8 Int8ArrayParam[];
- [Write] UInt8 UInt8Param;
- [Write] UInt8 UInt8ArrayParam[];
- [Write] SInt16 Int16Param;
- [Write] SInt16 Int16ArrayParam[];
- [Write] UInt16 UInt16Param;
- [Write] UInt16 UInt16ArrayParam[];
- [Write] SInt32 Int32Param;
- [Write] SInt32 Int32ArrayParam[];
- [Write] UInt32 UInt32Param;
- [Write] UInt32 UInt32ArrayParam[];
- [Write] SInt64 Int64Param;
- [Write] SInt64 Int64ArrayParam[];
- [Write] UInt64 UInt64Param;
- [Write] UInt64 UInt64ArrayParam[];
- [Write] Boolean BooleanParam;
- [Write] Boolean BooleanArrayParam[];
- [Write] Char16 CharParam;
- [Write] Char16 CharArrayParam[];
- [Write] Real32 SingleParam;
- [Write] Real32 SingleArrayParam[];
- [Write] Real64 DoubleParam;
- [Write] Real64 DoubleArrayParam[];
- [Write] DateTime DateTimeParam;
- [Write] DateTime DateTimeArrayParam[];
- [Write, EmbeddedInstance("MSFT_Credential")] String PSCredentialParam;
- [Write, EmbeddedInstance("MSFT_KeyValuePair")] String HashtableParam[];
- [Write, EmbeddedInstance("ANSIBLE_xTestClass")] String CimInstanceArrayParam[];
- [Write, EmbeddedInstance("ANSIBLE_xNestedClass")] String NestedCimInstanceParam;
- [Write, EmbeddedInstance("ANSIBLE_xNestedClass")] String NestedCimInstanceArrayParam[];
-};
-
diff --git a/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/xTestDsc.psd1 b/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/xTestDsc.psd1
deleted file mode 100644
index 3d61611d..00000000
--- a/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/xTestDsc.psd1
+++ /dev/null
@@ -1,13 +0,0 @@
-@{
- ModuleVersion = '1.0.0'
- GUID = '80c895c4-de3f-4d6d-8fa4-c504c96b6f22'
- Author = 'Ansible'
- CompanyName = 'Ansible'
- Copyright = '(c) 2019'
- Description = 'Test DSC Resource for Ansible integration tests'
- PowerShellVersion = '5.0'
- CLRVersion = '4.0'
- FunctionsToExport = '*'
- CmdletsToExport = '*'
-}
-
diff --git a/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 b/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1
deleted file mode 100644
index d75256e1..00000000
--- a/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1
+++ /dev/null
@@ -1,214 +0,0 @@
-#Requires -Version 5.0 -Modules CimCmdlets
-
-Function ConvertFrom-CimInstance {
- param(
- [Parameter(Mandatory=$true)][CimInstance]$Instance
- )
- $hashtable = @{
- _cim_instance = $Instance.CimSystemProperties.ClassName
- }
- foreach ($prop in $Instance.CimInstanceProperties) {
- $hashtable."$($prop.Name)" = ConvertTo-OutputValue -Value $prop.Value
- }
- return $hashtable
-}
-
-Function ConvertTo-OutputValue {
- param($Value)
-
- if ($Value -is [DateTime[]]) {
- $Value = $Value | ForEach-Object { $_.ToString("o") }
- } elseif ($Value -is [DateTime]) {
- $Value = $Value.ToString("o")
- } elseif ($Value -is [Double]) {
- $Value = $Value.ToString() # To avoid Python 2 double parsing issues on test validation
- } elseif ($Value -is [Double[]]) {
- $Value = $Value | ForEach-Object { $_.ToString() }
- } elseif ($Value -is [PSCredential]) {
- $password = $null
- $password_ptr = [System.Runtime.InteropServices.Marshal]::SecureStringToGlobalAllocUnicode($Value.Password)
- try {
- $password = [System.Runtime.InteropServices.Marshal]::PtrToStringUni($password_ptr)
- } finally {
- [System.Runtime.InteropServices.Marshal]::ZeroFreeGlobalAllocUnicode($password_ptr)
- }
- $Value = @{
- username = $Value.Username
- password = $password
- }
- } elseif ($Value -is [CimInstance[]]) {
- $value_list = [System.Collections.Generic.List`1[Hashtable]]@()
- foreach ($cim_instance in $Value) {
- $value_list.Add((ConvertFrom-CimInstance -Instance $cim_instance))
- }
- $Value = $value_list.ToArray()
- } elseif ($Value -is [CimInstance]) {
- $Value = ConvertFrom-CimInstance -Instance $Value
- }
-
- return ,$Value
-}
-
-Function Get-TargetResource
-{
- [CmdletBinding()]
- [OutputType([Hashtable])]
- param(
- [Parameter(Mandatory = $true)]
- [ValidateSet("Present", "Absent")]
- [String] $Ensure = "Present",
-
- [Parameter(Mandatory = $true)]
- [ValidateNotNullOrEmpty()]
- [String] $Path
- )
- return @{
- Ensure = $Ensure
- Path = $Path
- }
-}
-
-Function Set-TargetResource
-{
- [CmdletBinding()]
- param
- (
- [Parameter(Mandatory = $true)]
- [ValidateSet("Present", "Absent")]
- [String] $Ensure = "Present",
-
- [Parameter(Mandatory = $true)]
- [ValidateNotNullOrEmpty()]
- [String] $Path,
-
- [String] $DefaultParam = "Default",
- [String] $StringParam,
- [String[]] $StringArrayParam,
- [SByte] $Int8Param,
- [SByte[]] $Int8ArrayParam,
- [Byte] $UInt8Param,
- [Byte[]] $UInt8ArrayParam,
- [Int16] $Int16Param,
- [Int16[]] $Int16ArrayParam,
- [UInt16] $UInt16Param,
- [UInt16[]] $UInt16ArrayParam,
- [Int32] $Int32Param,
- [Int32[]] $Int32ArrayParam,
- [UInt32] $UInt32Param,
- [UInt32[]] $UInt32ArrayParam,
- [Int64] $Int64Param,
- [Int64[]] $Int64ArrayParam,
- [UInt64] $UInt64Param,
- [UInt64[]] $UInt64ArrayParam,
- [Bool] $BooleanParam,
- [Bool[]] $BooleanArrayParam,
- [Char] $CharParam,
- [Char[]] $CharArrayParam,
- [Single] $SingleParam,
- [Single[]] $SingleArrayParam,
- [Double] $DoubleParam,
- [Double[]] $DoubleArrayParam,
- [DateTime] $DateTimeParam,
- [DateTime[]] $DateTimeArrayParam,
- [PSCredential] $PSCredentialParam,
- [CimInstance[]] $HashtableParam,
- [CimInstance] $CimInstanceParam,
- [CimInstance[]] $CimInstanceArrayParam,
- [CimInstance] $NestedCimInstanceParam,
- [CimInstance[]] $NestedCimInstanceArrayParam
- )
-
- $info = @{
- Version = "1.0.1"
- Ensure = @{
- Type = $Ensure.GetType().FullName
- Value = $Ensure
- }
- Path = @{
- Type = $Path.GetType().FullName
- Value = $Path
- }
- DefaultParam = @{
- Type = $DefaultParam.GetType().FullName
- Value = $DefaultParam
- }
- }
-
- foreach ($kvp in $PSCmdlet.MyInvocation.BoundParameters.GetEnumerator()) {
- $info."$($kvp.Key)" = @{
- Type = $kvp.Value.GetType().FullName
- Value = (ConvertTo-OutputValue -Value $kvp.Value)
- }
- }
-
- if (Test-Path -Path $Path) {
- Remove-Item -Path $Path -Force > $null
- }
- New-Item -Path $Path -ItemType File > $null
- Set-Content -Path $Path -Value (ConvertTo-Json -InputObject $info -Depth 10) > $null
- Write-Verbose -Message "set verbose"
- Write-Warning -Message "set warning"
-}
-
-Function Test-TargetResource
-{
- [CmdletBinding()]
- [OutputType([Boolean])]
- param
- (
- [Parameter(Mandatory = $true)]
- [ValidateSet("Present", "Absent")]
- [String] $Ensure = "Present",
-
- [Parameter(Mandatory = $true)]
- [ValidateNotNullOrEmpty()]
- [String] $Path,
-
- [String] $DefaultParam = "Default",
- [String] $StringParam,
- [String[]] $StringArrayParam,
- [SByte] $Int8Param,
- [SByte[]] $Int8ArrayParam,
- [Byte] $UInt8Param,
- [Byte[]] $UInt8ArrayParam,
- [Int16] $Int16Param,
- [Int16[]] $Int16ArrayParam,
- [UInt16] $UInt16Param,
- [UInt16[]] $UInt16ArrayParam,
- [Int32] $Int32Param,
- [Int32[]] $Int32ArrayParam,
- [UInt32] $UInt32Param,
- [UInt32[]] $UInt32ArrayParam,
- [Int64] $Int64Param,
- [Int64[]] $Int64ArrayParam,
- [UInt64] $UInt64Param,
- [UInt64[]] $UInt64ArrayParam,
- [Bool] $BooleanParam,
- [Bool[]] $BooleanArrayParam,
- [Char] $CharParam,
- [Char[]] $CharArrayParam,
- [Single] $SingleParam,
- [Single[]] $SingleArrayParam,
- [Double] $DoubleParam,
- [Double[]] $DoubleArrayParam,
- [DateTime] $DateTimeParam,
- [DateTime[]] $DateTimeArrayParam,
- [PSCredential] $PSCredentialParam,
- [CimInstance[]] $HashtableParam,
- [CimInstance] $CimInstanceParam,
- [CimInstance[]] $CimInstanceArrayParam,
- [CimInstance] $NestedCimInstanceParam,
- [CimInstance[]] $NestedCimInstanceArrayParam
- )
- Write-Verbose -Message "test verbose"
- Write-Warning -Message "test warning"
- $exists = Test-Path -LiteralPath $Path -PathType Leaf
- if ($Ensure -eq "Present") {
- $exists
- } else {
- -not $exists
- }
-}
-
-Export-ModuleMember -Function *-TargetResource
-
diff --git a/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.schema.mof b/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.schema.mof
deleted file mode 100644
index 9301664b..00000000
--- a/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.schema.mof
+++ /dev/null
@@ -1,63 +0,0 @@
-[ClassVersion("1.0.1")]
-class ANSIBLE_xTestClass
-{
- [Key] String KeyValue;
- [Write, ValueMap{"Choice1", "Choice2"}, Values{"Choice1", "Choice2"}] String Choice;
- [Write] String StringValue;
- [Write] SInt32 IntValue;
- [Write] String StringArrayValue[];
-};
-
-[ClassVersion("1.0.1")]
-class ANSIBLE_xNestedClass
-{
- [Key] String KeyValue;
- [Write, EmbeddedInstance("ANSIBLE_xTestClass")] String CimValue;
- [Write, EmbeddedInstance("ANSIBLE_xTestClass")] String CimArrayValue[];
- [Write, EmbeddedInstance("MSFT_KeyValuePair")] String HashValue[];
- [Write] SInt16 IntValue;
-};
-
-[ClassVersion("1.0.1"), FriendlyName("xTestResource")]
-class ANSIBLE_xTestResource : OMI_BaseResource
-{
- [Key] String Path;
- [Required, ValueMap{"Present", "Absent"}, Values{"Present", "Absent"}] String Ensure;
- [Read] String ReadParam;
- [Write] String DefaultParam;
- [Write] String StringParam;
- [Write] String StringArrayParam[];
- [Write] SInt8 Int8Param;
- [Write] SInt8 Int8ArrayParam[];
- [Write] UInt8 UInt8Param;
- [Write] UInt8 UInt8ArrayParam[];
- [Write] SInt16 Int16Param;
- [Write] SInt16 Int16ArrayParam[];
- [Write] UInt16 UInt16Param;
- [Write] UInt16 UInt16ArrayParam[];
- [Write] SInt32 Int32Param;
- [Write] SInt32 Int32ArrayParam[];
- [Write] UInt32 UInt32Param;
- [Write] UInt32 UInt32ArrayParam[];
- [Write] SInt64 Int64Param;
- [Write] SInt64 Int64ArrayParam[];
- [Write] UInt64 UInt64Param;
- [Write] UInt64 UInt64ArrayParam[];
- [Write] Boolean BooleanParam;
- [Write] Boolean BooleanArrayParam[];
- [Write] Char16 CharParam;
- [Write] Char16 CharArrayParam[];
- [Write] Real32 SingleParam;
- [Write] Real32 SingleArrayParam[];
- [Write] Real64 DoubleParam;
- [Write] Real64 DoubleArrayParam[];
- [Write] DateTime DateTimeParam;
- [Write] DateTime DateTimeArrayParam[];
- [Write, EmbeddedInstance("MSFT_Credential")] String PSCredentialParam;
- [Write, EmbeddedInstance("MSFT_KeyValuePair")] String HashtableParam[];
- [Write, EmbeddedInstance("ANSIBLE_xTestClass")] String CimInstanceParam;
- [Write, EmbeddedInstance("ANSIBLE_xTestClass")] String CimInstanceArrayParam[];
- [Write, EmbeddedInstance("ANSIBLE_xNestedClass")] String NestedCimInstanceParam;
- [Write, EmbeddedInstance("ANSIBLE_xNestedClass")] String NestedCimInstanceArrayParam[];
-};
-
diff --git a/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/xTestDsc.psd1 b/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/xTestDsc.psd1
deleted file mode 100644
index 0c43b852..00000000
--- a/test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/xTestDsc.psd1
+++ /dev/null
@@ -1,13 +0,0 @@
-@{
- ModuleVersion = '1.0.1'
- GUID = '80c895c4-de3f-4d6d-8fa4-c504c96b6f22'
- Author = 'Ansible'
- CompanyName = 'Ansible'
- Copyright = '(c) 2019'
- Description = 'Test DSC Resource for Ansible integration tests'
- PowerShellVersion = '5.0'
- CLRVersion = '4.0'
- FunctionsToExport = '*'
- CmdletsToExport = '*'
-}
-
diff --git a/test/integration/targets/incidental_win_dsc/meta/main.yml b/test/integration/targets/incidental_win_dsc/meta/main.yml
deleted file mode 100644
index 9f37e96c..00000000
--- a/test/integration/targets/incidental_win_dsc/meta/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-dependencies:
-- setup_remote_tmp_dir
diff --git a/test/integration/targets/incidental_win_dsc/tasks/main.yml b/test/integration/targets/incidental_win_dsc/tasks/main.yml
deleted file mode 100644
index f37295ab..00000000
--- a/test/integration/targets/incidental_win_dsc/tasks/main.yml
+++ /dev/null
@@ -1,39 +0,0 @@
----
-- name: get powershell version
- win_shell: $PSVersionTable.PSVersion.Major
- register: powershell_version
-
-- name: expect failure when running on old PS hosts
- win_dsc:
- resource_name: File
- register: fail_dsc_old
- failed_when: '"This module cannot run as it requires a minimum PowerShell version of 5.0" not in fail_dsc_old.msg'
- when: powershell_version.stdout_lines[0]|int < 5
-
-- name: run tests when PSv5+
- when: powershell_version.stdout_lines[0]|int >= 5
- block:
- - name: add remote temp dir to PSModulePath
- win_path:
- name: PSModulePath
- state: present
- scope: machine
- elements:
- - '{{ remote_tmp_dir }}'
-
- - name: copy custom DSC resources to remote temp dir
- win_copy:
- src: xTestDsc
- dest: '{{ remote_tmp_dir }}'
-
- - name: run tests
- include_tasks: tests.yml
-
- always:
- - name: remove remote tmp dir from PSModulePath
- win_path:
- name: PSModulePath
- state: absent
- scope: machine
- elements:
- - '{{ remote_tmp_dir }}'
diff --git a/test/integration/targets/incidental_win_dsc/tasks/tests.yml b/test/integration/targets/incidental_win_dsc/tasks/tests.yml
deleted file mode 100644
index d2a6802f..00000000
--- a/test/integration/targets/incidental_win_dsc/tasks/tests.yml
+++ /dev/null
@@ -1,544 +0,0 @@
----
-- name: fail with incorrect DSC resource name
- win_dsc:
- resource_name: FakeResource
- register: fail_invalid_resource
- failed_when: fail_invalid_resource.msg != "Resource 'FakeResource' not found."
-
-- name: fail with invalid DSC version
- win_dsc:
- resource_name: xTestResource
- module_version: 0.0.1
- register: fail_invalid_version
- failed_when: 'fail_invalid_version.msg != "Resource ''xTestResource'' with version ''0.0.1'' not found. Versions installed: ''1.0.0'', ''1.0.1''."'
-
-- name: fail with mandatory option not set
- win_dsc:
- resource_name: xSetReboot
- Value: yes
- register: fail_man_key
- failed_when: 'fail_man_key.msg != "missing required arguments: KeyParam"'
-
-- name: fail with mandatory option not set in sub dict
- win_dsc:
- resource_name: xTestResource
- Path: C:\path
- Ensure: Present
- CimInstanceParam: # Missing KeyValue in dict
- Choice: Choice1
- register: fail_man_key_sub_dict
- failed_when: 'fail_man_key_sub_dict.msg != "missing required arguments: KeyValue found in CimInstanceParam"'
-
-- name: fail invalid option
- win_dsc:
- resource_name: xSetReboot
- KeyParam: key
- OtherParam: invalid
- register: fail_invalid_option
- failed_when: 'fail_invalid_option.msg != "Unsupported parameters for (win_dsc) module: OtherParam. Supported parameters include: KeyParam, PsDscRunAsCredential_username, module_version, Value, PsDscRunAsCredential_password, resource_name, DependsOn"'
-
-- name: fail invalid option in sub dict
- win_dsc:
- resource_name: xTestResource
- Path: C:\path
- Ensure: Present
- NestedCimInstanceParam:
- KeyValue: key
- CimValue:
- KeyValue: other key
- InvalidKey: invalid
- register: fail_invalid_option_sub_dict
- failed_when: 'fail_invalid_option_sub_dict.msg != "Unsupported parameters for (win_dsc) module: InvalidKey found in NestedCimInstanceParam -> CimValue. Supported parameters include: IntValue, KeyValue, StringArrayValue, Choice, StringValue"'
-
-- name: fail invalid read only option
- win_dsc:
- resource_name: xTestResource
- Path: C:\path
- Ensure: Present
- ReadParam: abc
- register: fail_invalid_option_read_only
- failed_when: '"Unsupported parameters for (win_dsc) module: ReadParam" not in fail_invalid_option_read_only.msg'
-
-- name: fail invalid choice
- win_dsc:
- resource_name: xTestResource
- Path: C:\path
- Ensure: invalid
- register: fail_invalid_choice
- failed_when: 'fail_invalid_choice.msg != "value of Ensure must be one of: Present, Absent. Got no match for: invalid"'
-
-- name: fail invalid choice in sub dict
- win_dsc:
- resource_name: xTestResource
- Path: C:\path
- Ensure: Present
- CimInstanceArrayParam:
- - KeyValue: key
- - KeyValue: key2
- Choice: Choice3
- register: fail_invalid_choice_sub_dict
- failed_when: 'fail_invalid_choice_sub_dict.msg != "value of Choice must be one of: Choice1, Choice2. Got no match for: Choice3 found in CimInstanceArrayParam"'
-
-- name: fail old version missing new option
- win_dsc:
- resource_name: xTestResource
- module_version: 1.0.0
- Path: C:\path
- Ensure: Present
- CimInstanceParam: # CimInstanceParam does not exist in the 1.0.0 version
- Key: key
- register: fail_invalid_option_old
- failed_when: '"Unsupported parameters for (win_dsc) module: CimInstanceParam" not in fail_invalid_option_old.msg'
-
-- name: fail old version missing new option sub dict
- win_dsc:
- resource_name: xTestResource
- module_version: 1.0.0
- Path: C:\path
- Ensure: Present
- CimInstanceArrayParam:
- - Key: key
- Choice: Choice1
- register: fail_invalid_option_old_sub_dict
- failed_when: 'fail_invalid_option_old_sub_dict.msg != "Unsupported parameters for (win_dsc) module: Choice found in CimInstanceArrayParam. Supported parameters include: Key, IntValue, StringArrayValue, StringValue"'
-
-- name: create test file (check mode)
- win_dsc:
- resource_name: File
- DestinationPath: '{{ remote_tmp_dir }}\dsc-file'
- Contents: file contents
- Attributes:
- - Hidden
- - ReadOnly
- Ensure: Present
- Type: File
- register: create_file_check
- check_mode: yes
-
-- name: get result of create test file (check mode)
- win_stat:
- path: '{{ remote_tmp_dir }}\dsc-file'
- register: create_file_actual_check
-
-- name: assert create test file (check mode)
- assert:
- that:
- - create_file_check is changed
- - create_file_check.module_version == None # Some built in modules don't have a version set
- - not create_file_check.reboot_required
- - not create_file_actual_check.stat.exists
-
-- name: assert create test file verbosity (check mode)
- assert:
- that:
- - create_file_check.verbose_test is defined
- - not create_file_check.verbose_set is defined
- when: ansible_verbosity >= 3
-
-- name: create test file
- win_dsc:
- resource_name: File
- DestinationPath: '{{ remote_tmp_dir }}\dsc-file'
- Contents: file contents
- Attributes:
- - Hidden
- - ReadOnly
- Ensure: Present
- Type: File
- register: create_file
-
-- name: get result of create test file
- win_stat:
- path: '{{ remote_tmp_dir }}\dsc-file'
- register: create_file_actual
-
-- name: assert create test file verbosity
- assert:
- that:
- - create_file.verbose_test is defined
- - create_file.verbose_set is defined
- when: ansible_verbosity >= 3
-
-- name: assert create test file
- assert:
- that:
- - create_file is changed
- - create_file.module_version == None
- - not create_file.reboot_required
- - create_file_actual.stat.exists
- - create_file_actual.stat.attributes == "ReadOnly, Hidden, Archive"
- - create_file_actual.stat.checksum == 'd48daab51112b49ecabd917adc345b8ba257055e'
-
-- name: create test file (idempotent)
- win_dsc:
- resource_name: File
- DestinationPath: '{{ remote_tmp_dir }}\dsc-file'
- Contents: file contents
- Attributes:
- - Hidden
- - ReadOnly
- Ensure: Present
- Type: File
- register: create_file_again
-
-- name: assert create test file (idempotent)
- assert:
- that:
- - not create_file_again is changed
- - create_file.module_version == None
- - not create_file.reboot_required
-
-- name: get SID of the current Ansible user
- win_shell: |
- Add-Type -AssemblyName System.DirectoryServices.AccountManagement
- [System.DirectoryServices.AccountManagement.UserPrincipal]::Current.Sid.Value
- register: actual_sid
-
-- name: run DSC process as another user
- win_dsc:
- resource_name: Script
- GetScript: '@{ Result= "" }'
- SetScript: |
- Add-Type -AssemblyName System.DirectoryServices.AccountManagement
- $sid = [System.DirectoryServices.AccountManagement.UserPrincipal]::Current.Sid.Value
- Set-Content -Path "{{ remote_tmp_dir }}\runas.txt" -Value $sid
- TestScript: $false
- PsDscRunAsCredential_username: '{{ ansible_user }}'
- PsDscRunAsCredential_password: '{{ ansible_password }}'
- register: runas_user
-
-- name: get result of run DSC process as another user
- slurp:
- path: '{{ remote_tmp_dir }}\runas.txt'
- register: runas_user_result
-
-- name: assert run DSC process as another user
- assert:
- that:
- - runas_user is changed
- - runas_user.module_version != None # Can't reliably set the version but we can test it is set
- - not runas_user.reboot_required
- - runas_user_result.content|b64decode == actual_sid.stdout
-
-- name: run DSC that sets reboot_required with defaults
- win_dsc:
- resource_name: xSetReboot
- KeyParam: value # Just to satisfy the Resource with key validation
- register: set_reboot_defaults
-
-- name: assert run DSC that sets reboot_required with defaults
- assert:
- that:
- - set_reboot_defaults.reboot_required
-
-- name: run DSC that sets reboot_required with False
- win_dsc:
- resource_name: xSetReboot
- KeyParam: value
- Value: no
- register: set_reboot_false
-
-- name: assert run DSC that sets reboot_required with False
- assert:
- that:
- - not set_reboot_false.reboot_required
-
-- name: run DSC that sets reboot_required with True
- win_dsc:
- resource_name: xSetReboot
- KeyParam: value
- Value: yes
- register: set_reboot_true
-
-- name: assert run DSC that sets reboot_required with True
- assert:
- that:
- - set_reboot_true.reboot_required
-
-- name: test DSC with all types
- win_dsc:
- resource_name: xTestResource
- Path: '{{ remote_tmp_dir }}\test-types.json'
- Ensure: Present
- StringParam: string param
- StringArrayParam:
- - string 1
- - string 2
- Int8Param: 127 # [SByte]::MaxValue
- Int8ArrayParam:
- - 127
- - '127'
- UInt8Param: 255 # [Byte]::MaxValue
- UInt8ArrayParam:
- - 255
- - '255'
- Int16Param: 32767 # [Int16]::MaxValue
- Int16ArrayParam: 32767, 32767
- UInt16Param: '65535' # [UInt16]::MaxValue
- UInt16ArrayParam: 65535
- Int32Param: 2147483647 # [Int32]::MaxValue
- Int32ArrayParam: '2147483647'
- UInt32Param: '4294967295' # [UInt32]::MaxValue
- UInt32ArrayParam:
- - '4294967295'
- - 4294967295
- Int64Param: 9223372036854775807 # [Int64]::MaxValue
- Int64ArrayParam:
- - -9223372036854775808 # [Int64]::MinValue
- - 9223372036854775807
- UInt64Param: 18446744073709551615 # [UInt64]::MaxValue
- UInt64ArrayParam:
- - 0 # [UInt64]::MinValue
- - 18446744073709551615
- BooleanParam: True
- BooleanArrayParam:
- - True
- - 'True'
- - 'true'
- - 'y'
- - 'yes'
- - 1
- - False
- - 'False'
- - 'false'
- - 'n'
- - 'no'
- - 0
- CharParam: c
- CharArrayParam:
- - c
- - h
- - a
- - r
- SingleParam: 3.402823E+38
- SingleArrayParam:
- - '3.402823E+38'
- - 1.2393494
- DoubleParam: 1.79769313486232E+300
- DoubleArrayParam:
- - '1.79769313486232E+300'
- - 3.56821831681516
- DateTimeParam: '2019-02-22T13:57:31.2311892-04:00'
- DateTimeArrayParam:
- - '2019-02-22T13:57:31.2311892+00:00'
- - '2019-02-22T13:57:31.2311892+04:00'
- PSCredentialParam_username: username1
- PSCredentialParam_password: password1
- HashtableParam:
- key1: string 1
- key2: ''
- key3: 1
- CimInstanceParam:
- KeyValue: a
- CimInstanceArrayParam:
- - KeyValue: b
- Choice: Choice1
- StringValue: string 1
- IntValue: 1
- StringArrayValue:
- - abc
- - def
- - KeyValue: c
- Choice: Choice2
- StringValue: string 2
- IntValue: '2'
- StringArrayValue:
- - ghi
- - jkl
- NestedCimInstanceParam:
- KeyValue: key value
- CimValue:
- KeyValue: d
- CimArrayValue:
- - KeyValue: e
- Choice: Choice2
- HashValue:
- a: a
- IntValue: '300'
- register: dsc_types
-
-- name: get result of test DSC with all types
- slurp:
- path: '{{ remote_tmp_dir }}\test-types.json'
- register: dsc_types_raw
-
-- name: convert result of test DSC with all types to dict
- set_fact:
- dsc_types_actual: '{{ dsc_types_raw.content | b64decode | from_json }}'
-
-- name: assert test DSC with all types
- assert:
- that:
- - dsc_types is changed
- - dsc_types.module_version == '1.0.1'
- - not dsc_types.reboot_required
- - dsc_types_actual.Version == '1.0.1'
- - dsc_types_actual.Verbose.Value.IsPresent
- - dsc_types_actual.DefaultParam.Value == 'Default' # ensures that the default is set in the engine if we don't set it outselves
- - dsc_types_actual.Ensure.Value == 'Present'
- - dsc_types_actual.Path.Value == remote_tmp_dir + "\\test-types.json"
- - dsc_types_actual.StringParam.Type == 'System.String'
- - dsc_types_actual.StringParam.Value == 'string param'
- - dsc_types_actual.StringArrayParam.Type == 'System.String[]'
- - dsc_types_actual.StringArrayParam.Value == ['string 1', 'string 2']
- - dsc_types_actual.Int8Param.Type == 'System.SByte'
- - dsc_types_actual.Int8Param.Value == 127
- - dsc_types_actual.Int8ArrayParam.Type == 'System.SByte[]'
- - dsc_types_actual.Int8ArrayParam.Value == [127, 127]
- - dsc_types_actual.UInt8Param.Type == 'System.Byte'
- - dsc_types_actual.UInt8Param.Value == 255
- - dsc_types_actual.UInt8ArrayParam.Type == 'System.Byte[]'
- - dsc_types_actual.UInt8ArrayParam.Value == [255, 255]
- - dsc_types_actual.Int16Param.Type == 'System.Int16'
- - dsc_types_actual.Int16Param.Value == 32767
- - dsc_types_actual.Int16ArrayParam.Type == 'System.Int16[]'
- - dsc_types_actual.Int16ArrayParam.Value == [32767, 32767]
- - dsc_types_actual.UInt16Param.Type == 'System.UInt16'
- - dsc_types_actual.UInt16Param.Value == 65535
- - dsc_types_actual.UInt16ArrayParam.Type == 'System.UInt16[]'
- - dsc_types_actual.UInt16ArrayParam.Value == [65535]
- - dsc_types_actual.Int32Param.Type == 'System.Int32'
- - dsc_types_actual.Int32Param.Value == 2147483647
- - dsc_types_actual.Int32ArrayParam.Type == 'System.Int32[]'
- - dsc_types_actual.Int32ArrayParam.Value == [2147483647]
- - dsc_types_actual.UInt32Param.Type == 'System.UInt32'
- - dsc_types_actual.UInt32Param.Value == 4294967295
- - dsc_types_actual.UInt32ArrayParam.Type == 'System.UInt32[]'
- - dsc_types_actual.UInt32ArrayParam.Value == [4294967295, 4294967295]
- - dsc_types_actual.Int64Param.Type == 'System.Int64'
- - dsc_types_actual.Int64Param.Value == 9223372036854775807
- - dsc_types_actual.Int64ArrayParam.Type == 'System.Int64[]'
- - dsc_types_actual.Int64ArrayParam.Value == [-9223372036854775808, 9223372036854775807]
- - dsc_types_actual.UInt64Param.Type == 'System.UInt64'
- - dsc_types_actual.UInt64Param.Value == 18446744073709551615
- - dsc_types_actual.UInt64ArrayParam.Type == 'System.UInt64[]'
- - dsc_types_actual.UInt64ArrayParam.Value == [0, 18446744073709551615]
- - dsc_types_actual.BooleanParam.Type == 'System.Boolean'
- - dsc_types_actual.BooleanParam.Value == True
- - dsc_types_actual.BooleanArrayParam.Type == 'System.Boolean[]'
- - dsc_types_actual.BooleanArrayParam.Value == [True, True, True, True, True, True, False, False, False, False, False, False]
- - dsc_types_actual.CharParam.Type == 'System.Char'
- - dsc_types_actual.CharParam.Value == 'c'
- - dsc_types_actual.CharArrayParam.Type == 'System.Char[]'
- - dsc_types_actual.CharArrayParam.Value == ['c', 'h', 'a', 'r']
- - dsc_types_actual.SingleParam.Type == 'System.Single'
- - dsc_types_actual.SingleParam.Value|string == '3.402823e+38'
- - dsc_types_actual.SingleArrayParam.Type == 'System.Single[]'
- - dsc_types_actual.SingleArrayParam.Value|length == 2
- - dsc_types_actual.SingleArrayParam.Value[0]|string == '3.402823e+38'
- - dsc_types_actual.SingleArrayParam.Value[1]|string == '1.23934937'
- - dsc_types_actual.DoubleParam.Type == 'System.Double'
- - dsc_types_actual.DoubleParam.Value == '1.79769313486232E+300'
- - dsc_types_actual.DoubleArrayParam.Type == 'System.Double[]'
- - dsc_types_actual.DoubleArrayParam.Value|length == 2
- - dsc_types_actual.DoubleArrayParam.Value[0] == '1.79769313486232E+300'
- - dsc_types_actual.DoubleArrayParam.Value[1] == '3.56821831681516'
- - dsc_types_actual.DateTimeParam.Type == 'System.DateTime'
- - dsc_types_actual.DateTimeParam.Value == '2019-02-22T17:57:31.2311890+00:00'
- - dsc_types_actual.DateTimeArrayParam.Type == 'System.DateTime[]'
- - dsc_types_actual.DateTimeArrayParam.Value == ['2019-02-22T13:57:31.2311890+00:00', '2019-02-22T09:57:31.2311890+00:00']
- - dsc_types_actual.PSCredentialParam.Type == 'System.Management.Automation.PSCredential'
- - dsc_types_actual.PSCredentialParam.Value.username == 'username1'
- - dsc_types_actual.PSCredentialParam.Value.password == 'password1'
- # Hashtable is actually a CimInstance[] of MSFT_KeyValuePairs
- - dsc_types_actual.HashtableParam.Type == 'Microsoft.Management.Infrastructure.CimInstance[]'
- - dsc_types_actual.HashtableParam.Value|length == 3
- # Can't guarantee the order of the keys so just check they are the values they could be
- - dsc_types_actual.HashtableParam.Value[0].Key in ["key1", "key2", "key3"]
- - dsc_types_actual.HashtableParam.Value[0].Value in ["string 1", "1", ""]
- - dsc_types_actual.HashtableParam.Value[0]._cim_instance == 'MSFT_KeyValuePair'
- - dsc_types_actual.HashtableParam.Value[1].Key in ["key1", "key2", "key3"]
- - dsc_types_actual.HashtableParam.Value[1].Value in ["string 1", "1", ""]
- - dsc_types_actual.HashtableParam.Value[1]._cim_instance == 'MSFT_KeyValuePair'
- - dsc_types_actual.HashtableParam.Value[2].Key in ["key1", "key2", "key3"]
- - dsc_types_actual.HashtableParam.Value[2].Value in ["string 1", "1", ""]
- - dsc_types_actual.HashtableParam.Value[2]._cim_instance == 'MSFT_KeyValuePair'
- - dsc_types_actual.CimInstanceParam.Type == 'Microsoft.Management.Infrastructure.CimInstance'
- - dsc_types_actual.CimInstanceParam.Value.Choice == None
- - dsc_types_actual.CimInstanceParam.Value.IntValue == None
- - dsc_types_actual.CimInstanceParam.Value.KeyValue == 'a'
- - dsc_types_actual.CimInstanceParam.Value.StringArrayValue == None
- - dsc_types_actual.CimInstanceParam.Value.StringValue == None
- - dsc_types_actual.CimInstanceParam.Value._cim_instance == "ANSIBLE_xTestClass"
- - dsc_types_actual.CimInstanceArrayParam.Type == 'Microsoft.Management.Infrastructure.CimInstance[]'
- - dsc_types_actual.CimInstanceArrayParam.Value|length == 2
- - dsc_types_actual.CimInstanceArrayParam.Value[0].Choice == 'Choice1'
- - dsc_types_actual.CimInstanceArrayParam.Value[0].IntValue == 1
- - dsc_types_actual.CimInstanceArrayParam.Value[0].KeyValue == 'b'
- - dsc_types_actual.CimInstanceArrayParam.Value[0].StringArrayValue == ['abc', 'def']
- - dsc_types_actual.CimInstanceArrayParam.Value[0].StringValue == 'string 1'
- - dsc_types_actual.CimInstanceArrayParam.Value[0]._cim_instance == 'ANSIBLE_xTestClass'
- - dsc_types_actual.CimInstanceArrayParam.Value[1].Choice == 'Choice2'
- - dsc_types_actual.CimInstanceArrayParam.Value[1].IntValue == 2
- - dsc_types_actual.CimInstanceArrayParam.Value[1].KeyValue == 'c'
- - dsc_types_actual.CimInstanceArrayParam.Value[1].StringArrayValue == ['ghi', 'jkl']
- - dsc_types_actual.CimInstanceArrayParam.Value[1].StringValue == 'string 2'
- - dsc_types_actual.CimInstanceArrayParam.Value[1]._cim_instance == 'ANSIBLE_xTestClass'
- - dsc_types_actual.NestedCimInstanceParam.Type == 'Microsoft.Management.Infrastructure.CimInstance'
- - dsc_types_actual.NestedCimInstanceParam.Value.CimArrayValue|length == 1
- - dsc_types_actual.NestedCimInstanceParam.Value.CimArrayValue[0].Choice == 'Choice2'
- - dsc_types_actual.NestedCimInstanceParam.Value.CimArrayValue[0].IntValue == None
- - dsc_types_actual.NestedCimInstanceParam.Value.CimArrayValue[0].KeyValue == 'e'
- - dsc_types_actual.NestedCimInstanceParam.Value.CimArrayValue[0].StringArrayValue == None
- - dsc_types_actual.NestedCimInstanceParam.Value.CimArrayValue[0].StringValue == None
- - dsc_types_actual.NestedCimInstanceParam.Value.CimArrayValue[0]._cim_instance == 'ANSIBLE_xTestClass'
- - dsc_types_actual.NestedCimInstanceParam.Value.CimValue.Choice == None
- - dsc_types_actual.NestedCimInstanceParam.Value.CimValue.IntValue == None
- - dsc_types_actual.NestedCimInstanceParam.Value.CimValue.KeyValue == 'd'
- - dsc_types_actual.NestedCimInstanceParam.Value.CimValue.StringArrayValue == None
- - dsc_types_actual.NestedCimInstanceParam.Value.CimValue.StringValue == None
- - dsc_types_actual.NestedCimInstanceParam.Value.CimValue._cim_instance == 'ANSIBLE_xTestClass'
- - dsc_types_actual.NestedCimInstanceParam.Value.HashValue|length == 1
- - dsc_types_actual.NestedCimInstanceParam.Value.HashValue[0].Key == 'a'
- - dsc_types_actual.NestedCimInstanceParam.Value.HashValue[0].Value == 'a'
- - dsc_types_actual.NestedCimInstanceParam.Value.HashValue[0]._cim_instance == 'MSFT_KeyValuePair'
- - dsc_types_actual.NestedCimInstanceParam.Value.IntValue == 300
- - dsc_types_actual.NestedCimInstanceParam.Value.KeyValue == 'key value'
- - dsc_types_actual.NestedCimInstanceParam.Value._cim_instance == 'ANSIBLE_xNestedClass'
-
-- name: test DSC with all types older version
- win_dsc:
- resource_name: xTestResource
- module_version: 1.0.0
- Path: '{{ remote_tmp_dir }}\test-types.json'
- Ensure: Absent
- StringParam: string param old
- CimInstanceArrayParam:
- - Key: old key
- StringValue: string old 1
- IntValue: 0
- StringArrayValue:
- - zyx
- - wvu
- register: dsc_types_old
-
-- name: get result of test DSC with all types older version
- slurp:
- path: '{{ remote_tmp_dir }}\test-types.json'
- register: dsc_types_old_raw
-
-- name: convert result of test DSC with all types to dict
- set_fact:
- dsc_types_old_actual: '{{ dsc_types_old_raw.content | b64decode | from_json }}'
-
-- name: assert test DSC with all types older version
- assert:
- that:
- - dsc_types_old is changed
- - dsc_types_old.module_version == '1.0.0'
- - not dsc_types_old.reboot_required
- - dsc_types_old_actual.Version == '1.0.0'
- - dsc_types_old_actual.Verbose.Value.IsPresent
- - dsc_types_old_actual.DefaultParam.Value == 'Default'
- - dsc_types_old_actual.Ensure.Value == 'Absent'
- - dsc_types_old_actual.Path.Value == remote_tmp_dir + "\\test-types.json"
- - dsc_types_old_actual.StringParam.Type == 'System.String'
- - dsc_types_old_actual.StringParam.Value == 'string param old'
- - dsc_types_old_actual.CimInstanceArrayParam.Type == 'Microsoft.Management.Infrastructure.CimInstance[]'
- - dsc_types_old_actual.CimInstanceArrayParam.Value|length == 1
- - not dsc_types_old_actual.CimInstanceArrayParam.Value[0].Choice is defined # 1.0.0 does not have a Choice option
- - dsc_types_old_actual.CimInstanceArrayParam.Value[0].IntValue == 0
- - dsc_types_old_actual.CimInstanceArrayParam.Value[0].Key == 'old key'
- - dsc_types_old_actual.CimInstanceArrayParam.Value[0].StringArrayValue == ['zyx', 'wvu']
- - dsc_types_old_actual.CimInstanceArrayParam.Value[0].StringValue == 'string old 1'
- - dsc_types_old_actual.CimInstanceArrayParam.Value[0]._cim_instance == 'ANSIBLE_xTestClass'
diff --git a/test/integration/targets/incidental_win_lineinfile/aliases b/test/integration/targets/incidental_win_lineinfile/aliases
deleted file mode 100644
index 194cbc3f..00000000
--- a/test/integration/targets/incidental_win_lineinfile/aliases
+++ /dev/null
@@ -1,3 +0,0 @@
-shippable/windows/incidental
-windows
-skip/windows/2016 # Host takes a while to run and module isn't OS dependent
diff --git a/test/integration/targets/incidental_win_lineinfile/files/test.txt b/test/integration/targets/incidental_win_lineinfile/files/test.txt
deleted file mode 100644
index 8187db9f..00000000
--- a/test/integration/targets/incidental_win_lineinfile/files/test.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-This is line 1
-This is line 2
-REF this is a line for backrefs REF
-This is line 4
-This is line 5
diff --git a/test/integration/targets/incidental_win_lineinfile/files/test_quoting.txt b/test/integration/targets/incidental_win_lineinfile/files/test_quoting.txt
deleted file mode 100644
index e69de29b..00000000
--- a/test/integration/targets/incidental_win_lineinfile/files/test_quoting.txt
+++ /dev/null
diff --git a/test/integration/targets/incidental_win_lineinfile/files/testempty.txt b/test/integration/targets/incidental_win_lineinfile/files/testempty.txt
deleted file mode 100644
index e69de29b..00000000
--- a/test/integration/targets/incidental_win_lineinfile/files/testempty.txt
+++ /dev/null
diff --git a/test/integration/targets/incidental_win_lineinfile/files/testnoeof.txt b/test/integration/targets/incidental_win_lineinfile/files/testnoeof.txt
deleted file mode 100644
index 152780b9..00000000
--- a/test/integration/targets/incidental_win_lineinfile/files/testnoeof.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-This is line 1
-This is line 2 \ No newline at end of file
diff --git a/test/integration/targets/incidental_win_lineinfile/meta/main.yml b/test/integration/targets/incidental_win_lineinfile/meta/main.yml
deleted file mode 100644
index e0ff46db..00000000
--- a/test/integration/targets/incidental_win_lineinfile/meta/main.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-dependencies:
- - incidental_win_prepare_tests
diff --git a/test/integration/targets/incidental_win_lineinfile/tasks/main.yml b/test/integration/targets/incidental_win_lineinfile/tasks/main.yml
deleted file mode 100644
index e5f047be..00000000
--- a/test/integration/targets/incidental_win_lineinfile/tasks/main.yml
+++ /dev/null
@@ -1,708 +0,0 @@
-# Test code for the win_lineinfile module, adapted from the standard lineinfile module tests
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-
-- name: deploy the test file for lineinfile
- win_copy: src=test.txt dest={{win_output_dir}}/test.txt
- register: result
-
-- name: assert that the test file was deployed
- assert:
- that:
- - "result.changed == true"
-
-- name: stat the test file
- win_stat: path={{win_output_dir}}/test.txt
- register: result
-
-- name: check win_stat file result
- assert:
- that:
- - "result.stat.exists"
- - "not result.stat.isdir"
- - "result.stat.checksum == '5feac65e442c91f557fc90069ce6efc4d346ab51'"
- - "result is not failed"
- - "result is not changed"
-
-
-- name: insert a line at the beginning of the file, and back it up
- win_lineinfile: dest={{win_output_dir}}/test.txt state=present line="New line at the beginning" insertbefore="BOF" backup=yes
- register: result
-
-- name: check backup_file
- win_stat:
- path: '{{ result.backup_file }}'
- register: backup_file
-
-- name: assert that the line was inserted at the head of the file
- assert:
- that:
- - result.changed == true
- - result.msg == 'line added'
- - backup_file.stat.exists == true
-
-- name: stat the backup file
- win_stat: path={{result.backup}}
- register: result
-
-- name: assert the backup file matches the previous hash
- assert:
- that:
- - "result.stat.checksum == '5feac65e442c91f557fc90069ce6efc4d346ab51'"
-
-- name: stat the test after the insert at the head
- win_stat: path={{win_output_dir}}/test.txt
- register: result
-
-- name: assert test hash is what we expect for the file with the insert at the head
- assert:
- that:
- - "result.stat.checksum == 'b526e2e044defc64dfb0fad2f56e105178f317d8'"
-
-- name: insert a line at the end of the file
- win_lineinfile: dest={{win_output_dir}}/test.txt state=present line="New line at the end" insertafter="EOF"
- register: result
-
-- name: assert that the line was inserted at the end of the file
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
-
-- name: stat the test after the insert at the end
- win_stat: path={{win_output_dir}}/test.txt
- register: result
-
-- name: assert test checksum matches after the insert at the end
- assert:
- that:
- - "result.stat.checksum == 'dd5e207e28ce694ab18e41c2b16deb74fde93b14'"
-
-- name: insert a line after the first line
- win_lineinfile: dest={{win_output_dir}}/test.txt state=present line="New line after line 1" insertafter="^This is line 1$"
- register: result
-
-- name: assert that the line was inserted after the first line
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
-
-- name: stat the test after insert after the first line
- win_stat: path={{win_output_dir}}/test.txt
- register: result
-
-- name: assert test checksum matches after the insert after the first line
- assert:
- that:
- - "result.stat.checksum == '604b17405f2088e6868af9680b7834087acdc8f4'"
-
-- name: insert a line before the last line
- win_lineinfile: dest={{win_output_dir}}/test.txt state=present line="New line before line 5" insertbefore="^This is line 5$"
- register: result
-
-- name: assert that the line was inserted before the last line
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
-
-- name: stat the test after the insert before the last line
- win_stat: path={{win_output_dir}}/test.txt
- register: result
-
-- name: assert test checksum matches after the insert before the last line
- assert:
- that:
- - "result.stat.checksum == '8f5b30e8f01578043d782e5a68d4c327e75a6e34'"
-
-- name: replace a line with backrefs
- win_lineinfile: dest={{win_output_dir}}/test.txt state=present line="This is line 3" backrefs=yes regexp="^(REF).*$"
- register: result
-
-- name: assert that the line with backrefs was changed
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line replaced'"
-
-- name: stat the test after the backref line was replaced
- win_stat: path={{win_output_dir}}/test.txt
- register: result
-
-- name: assert test checksum matches after backref line was replaced
- assert:
- that:
- - "result.stat.checksum == 'ef6b02645908511a2cfd2df29d50dd008897c580'"
-
-- name: remove the middle line
- win_lineinfile: dest={{win_output_dir}}/test.txt state=absent regexp="^This is line 3$"
- register: result
-
-- name: assert that the line was removed
- assert:
- that:
- - "result.changed == true"
- - "result.msg == '1 line(s) removed'"
-
-- name: stat the test after the middle line was removed
- win_stat: path={{win_output_dir}}/test.txt
- register: result
-
-- name: assert test checksum matches after the middle line was removed
- assert:
- that:
- - "result.stat.checksum == '11695efa472be5c31c736bc43e055f8ac90eabdf'"
-
-- name: run a validation script that succeeds
- win_lineinfile: dest={{win_output_dir}}/test.txt state=absent regexp="^This is line 5$" validate="sort.exe %s"
- register: result
-
-- name: assert that the file validated after removing a line
- assert:
- that:
- - "result.changed == true"
- - "result.msg == '1 line(s) removed'"
-
-- name: stat the test after the validation succeeded
- win_stat: path={{win_output_dir}}/test.txt
- register: result
-
-- name: assert test checksum matches after the validation succeeded
- assert:
- that:
- - "result.stat.checksum == '39c38a30aa6ac6af9ec41f54c7ed7683f1249347'"
-
-- name: run a validation script that fails
- win_lineinfile: dest={{win_output_dir}}/test.txt state=absent regexp="^This is line 1$" validate="sort.exe %s.foo"
- register: result
- ignore_errors: yes
-
-- name: assert that the validate failed
- assert:
- that:
- - "result.failed == true"
-
-- name: stat the test after the validation failed
- win_stat: path={{win_output_dir}}/test.txt
- register: result
-
-- name: assert test checksum matches the previous after the validation failed
- assert:
- that:
- - "result.stat.checksum == '39c38a30aa6ac6af9ec41f54c7ed7683f1249347'"
-
-- name: use create=yes
- win_lineinfile: dest={{win_output_dir}}/new_test.txt create=yes insertbefore=BOF state=present line="This is a new file"
- register: result
-
-- name: assert that the new file was created
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
-
-- name: validate that the newly created file exists
- win_stat: path={{win_output_dir}}/new_test.txt
- register: result
- ignore_errors: yes
-
-- name: assert the newly created test checksum matches
- assert:
- that:
- - "result.stat.checksum == '84faac1183841c57434693752fc3debc91b9195d'"
-
-# Test EOF in cases where file has no newline at EOF
-- name: testnoeof deploy the file for lineinfile
- win_copy: src=testnoeof.txt dest={{win_output_dir}}/testnoeof.txt
- register: result
-
-- name: testnoeof insert a line at the end of the file
- win_lineinfile: dest={{win_output_dir}}/testnoeof.txt state=present line="New line at the end" insertafter="EOF"
- register: result
-
-- name: testempty assert that the line was inserted at the end of the file
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
-
-- name: testnoeof stat the no newline EOF test after the insert at the end
- win_stat: path={{win_output_dir}}/testnoeof.txt
- register: result
-
-- name: testnoeof assert test checksum matches after the insert at the end
- assert:
- that:
- - "result.stat.checksum == '229852b09f7e9921fbcbb0ee0166ba78f7f7f261'"
-
-- name: add multiple lines at the end of the file
- win_lineinfile: dest={{win_output_dir}}/test.txt state=present line="This is a line\r\nwith newline character" insertafter="EOF"
- register: result
-
-- name: assert that the multiple lines was inserted
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
-
-- name: stat file after adding multiple lines
- win_stat: path={{win_output_dir}}/test.txt
- register: result
-
-- name: assert test checksum matches after inserting multiple lines
- assert:
- that:
- - "result.stat.checksum == '1401413cd4eac732be66cd6aceddd334c4240f86'"
-
-
-
-# Test EOF with empty file to make sure no unnecessary newline is added
-- name: testempty deploy the testempty file for lineinfile
- win_copy: src=testempty.txt dest={{win_output_dir}}/testempty.txt
- register: result
-
-- name: testempty insert a line at the end of the file
- win_lineinfile: dest={{win_output_dir}}/testempty.txt state=present line="New line at the end" insertafter="EOF"
- register: result
-
-- name: testempty assert that the line was inserted at the end of the file
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
-
-- name: testempty stat the test after the insert at the end
- win_stat: path={{win_output_dir}}/testempty.txt
- register: result
-
-- name: testempty assert test checksum matches after the insert at the end
- assert:
- that:
- - "result.stat.checksum == 'd3d34f11edda51be7ca5dcb0757cf3e1257c0bfe'"
-
-
-
-- name: replace a line with backrefs included in the line
- win_lineinfile: dest={{win_output_dir}}/test.txt state=present line="New $1 created with the backref" backrefs=yes regexp="^This is (line 4)$"
- register: result
-
-- name: assert that the line with backrefs was changed
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line replaced'"
-
-- name: stat the test after the backref line was replaced
- win_stat: path={{win_output_dir}}/test.txt
- register: result
-
-- name: assert test checksum matches after backref line was replaced
- assert:
- that:
- - "result.stat.checksum == 'e6ff42e926dac2274c93dff0b8a323e07ae09149'"
-
-###################################################################
-# issue 8535
-
-- name: create a new file for testing quoting issues
- win_copy: src=test_quoting.txt dest={{win_output_dir}}/test_quoting.txt
- register: result
-
-- name: assert the new file was created
- assert:
- that:
- - result.changed
-
-- name: use with_items to add code-like strings to the quoting txt file
- win_lineinfile: >
- dest={{win_output_dir}}/test_quoting.txt
- line="{{ item }}"
- insertbefore="BOF"
- with_items:
- - "'foo'"
- - "dotenv.load();"
- - "var dotenv = require('dotenv');"
- register: result
-
-- name: assert the quote test file was modified correctly
- assert:
- that:
- - result.results|length == 3
- - result.results[0].changed
- - result.results[0].item == "'foo'"
- - result.results[1].changed
- - result.results[1].item == "dotenv.load();"
- - result.results[2].changed
- - result.results[2].item == "var dotenv = require('dotenv');"
-
-- name: stat the quote test file
- win_stat: path={{win_output_dir}}/test_quoting.txt
- register: result
-
-- name: assert test checksum matches for quote test file
- assert:
- that:
- - "result.stat.checksum == 'f3bccdbdfa1d7176c497ef87d04957af40ab48d2'"
-
-- name: append a line into the quoted file with a single quote
- win_lineinfile: dest={{win_output_dir}}/test_quoting.txt line="import g'"
- register: result
-
-- name: assert that the quoted file was changed
- assert:
- that:
- - result.changed
-
-- name: stat the quote test file
- win_stat: path={{win_output_dir}}/test_quoting.txt
- register: result
-
-- name: assert test checksum matches adding line with single quote
- assert:
- that:
- - "result.stat.checksum == 'dabf4cbe471e1797d8dcfc773b6b638c524d5237'"
-
-- name: insert a line into the quoted file with many double quotation strings
- win_lineinfile: dest={{win_output_dir}}/test_quoting.txt line='"quote" and "unquote"'
- register: result
-
-- name: assert that the quoted file was changed
- assert:
- that:
- - result.changed
-
-- name: stat the quote test file
- win_stat: path={{win_output_dir}}/test_quoting.txt
- register: result
-
-- name: assert test checksum matches quoted line added
- assert:
- that:
- - "result.stat.checksum == '9dc1fc1ff19942e2936564102ad37134fa83b91d'"
-
-
-# Windows vs. Unix line separator test cases
-
-- name: Create windows test file with initial line
- win_lineinfile: dest={{win_output_dir}}/test_windows_sep.txt create=yes insertbefore=BOF state=present line="This is a new file"
- register: result
-
-- name: assert that the new file was created
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
-
-- name: validate that the newly created file exists
- win_stat: path={{win_output_dir}}/test_windows_sep.txt
- register: result
-
-- name: assert the newly created file checksum matches
- assert:
- that:
- - "result.stat.checksum == '84faac1183841c57434693752fc3debc91b9195d'"
-
-- name: Test appending to the file using the default (windows) line separator
- win_lineinfile: dest={{win_output_dir}}/test_windows_sep.txt insertbefore=EOF state=present line="This is the last line"
- register: result
-
-- name: assert that the new line was added
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
-
-- name: stat the file
- win_stat: path={{win_output_dir}}/test_windows_sep.txt
- register: result
-
-- name: assert the file checksum matches expected checksum
- assert:
- that:
- - "result.stat.checksum == '71a17ddd1d57ed7c7912e4fd11ecb2ead0b27033'"
-
-
-- name: Create unix test file with initial line
- win_lineinfile: dest={{win_output_dir}}/test_unix_sep.txt create=yes insertbefore=BOF state=present line="This is a new file"
- register: result
-
-- name: assert that the new file was created
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
-
-- name: validate that the newly created file exists
- win_stat: path={{win_output_dir}}/test_unix_sep.txt
- register: result
-
-- name: assert the newly created file checksum matches
- assert:
- that:
- - "result.stat.checksum == '84faac1183841c57434693752fc3debc91b9195d'"
-
-- name: Test appending to the file using unix line separator
- win_lineinfile: dest={{win_output_dir}}/test_unix_sep.txt insertbefore=EOF state=present line="This is the last line" newline="unix"
- register: result
-
-- name: assert that the new line was added
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
-
-- name: stat the file
- win_stat: path={{win_output_dir}}/test_unix_sep.txt
- register: result
-
-- name: assert the file checksum matches expected checksum
- assert:
- that:
- - "result.stat.checksum == 'f1f634a37ab1c73efb77a71a5ad2cc87b61b17ae'"
-
-
-# Encoding management test cases
-
-# Default (auto) encoding should use utf-8 with no BOM
-- name: Test create file without explicit encoding results in utf-8 without BOM
- win_lineinfile: dest={{win_output_dir}}/test_auto_utf8.txt create=yes insertbefore=BOF state=present line="This is a new utf-8 file"
- register: result
-
-- name: assert that the new file was created
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
- - "result.encoding == 'utf-8'"
-
-- name: validate that the newly created file exists
- win_stat: path={{win_output_dir}}/test_auto_utf8.txt
- register: result
-
-- name: assert the newly created file checksum matches
- assert:
- that:
- - "result.stat.checksum == 'b69fcbacca8291a4668f57fba91d7c022f1c3dc7'"
-
-- name: Test appending to the utf-8 without BOM file - should autodetect UTF-8 no BOM
- win_lineinfile: dest={{win_output_dir}}/test_auto_utf8.txt insertbefore=EOF state=present line="This is the last line"
- register: result
-
-- name: assert that the new line was added and encoding did not change
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
- - "result.encoding == 'utf-8'"
-
-- name: stat the file
- win_stat: path={{win_output_dir}}/test_auto_utf8.txt
- register: result
-
-- name: assert the file checksum matches
- assert:
- that:
- - "result.stat.checksum == '64d747f1ebf8c9d793dbfd27126e4152d39a3848'"
-
-
-# UTF-8 explicit (with BOM)
-- name: Test create file with explicit utf-8 encoding results in utf-8 with a BOM
- win_lineinfile: dest={{win_output_dir}}/test_utf8.txt create=yes encoding="utf-8" insertbefore=BOF state=present line="This is a new utf-8 file"
- register: result
-
-- name: assert that the new file was created
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
- - "result.encoding == 'utf-8'"
-
-- name: validate that the newly created file exists
- win_stat: path={{win_output_dir}}/test_utf8.txt
- register: result
-
-- name: assert the newly created file checksum matches
- assert:
- that:
- - "result.stat.checksum == 'd45344b2b3bf1cf90eae851b40612f5f37a88bbb'"
-
-- name: Test appending to the utf-8 with BOM file - should autodetect utf-8 with BOM encoding
- win_lineinfile: dest={{win_output_dir}}/test_utf8.txt insertbefore=EOF state=present line="This is the last line"
- register: result
-
-- name: assert that the new line was added and encoding did not change
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
- - "result.encoding == 'utf-8'"
-
-- name: stat the file
- win_stat: path={{win_output_dir}}/test_utf8.txt
- register: result
-
-- name: assert the file checksum matches
- assert:
- that:
- - "result.stat.checksum == '9b84254489f40f258871a4c6573cacc65895ee1a'"
-
-
-# UTF-16 explicit
-- name: Test create file with explicit utf-16 encoding
- win_lineinfile: dest={{win_output_dir}}/test_utf16.txt create=yes encoding="utf-16" insertbefore=BOF state=present line="This is a new utf-16 file"
- register: result
-
-- name: assert that the new file was created
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
- - "result.encoding == 'utf-16'"
-
-- name: validate that the newly created file exists
- win_stat: path={{win_output_dir}}/test_utf16.txt
- register: result
-
-- name: assert the newly created file checksum matches
- assert:
- that:
- - "result.stat.checksum == '785b0693cec13b60e2c232782adeda2f8a967434'"
-
-- name: Test appending to the utf-16 file - should autodetect utf-16 encoding
- win_lineinfile: dest={{win_output_dir}}/test_utf16.txt insertbefore=EOF state=present line="This is the last line"
- register: result
-
-- name: assert that the new line was added and encoding did not change
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
- - "result.encoding == 'utf-16'"
-
-- name: stat the file
- win_stat: path={{win_output_dir}}/test_utf16.txt
- register: result
-
-- name: assert the file checksum matches
- assert:
- that:
- - "result.stat.checksum == '70e4eb3ba795e1ba94d262db47e4fd17c64b2e73'"
-
-# UTF-32 explicit
-- name: Test create file with explicit utf-32 encoding
- win_lineinfile: dest={{win_output_dir}}/test_utf32.txt create=yes encoding="utf-32" insertbefore=BOF state=present line="This is a new utf-32 file"
- register: result
-
-- name: assert that the new file was created
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
- - "result.encoding == 'utf-32'"
-
-- name: validate that the newly created file exists
- win_stat: path={{win_output_dir}}/test_utf32.txt
- register: result
-
-- name: assert the newly created file checksum matches
- assert:
- that:
- - "result.stat.checksum == '7a6e3f3604c0def431aaa813173a4ddaa10fd1fb'"
-
-- name: Test appending to the utf-32 file - should autodetect utf-32 encoding
- win_lineinfile: dest={{win_output_dir}}/test_utf32.txt insertbefore=EOF state=present line="This is the last line"
- register: result
-
-- name: assert that the new line was added and encoding did not change
- assert:
- that:
- - "result.changed == true"
- - "result.msg == 'line added'"
- - "result.encoding == 'utf-32'"
-
-- name: stat the file
- win_stat: path={{win_output_dir}}/test_utf32.txt
- register: result
-
-- name: assert the file checksum matches
- assert:
- that:
- - "result.stat.checksum == '66a72e71f42c4775f4326da95cfe82c8830e5022'"
-
-#########################################################################
-# issue #33858
-# \r\n causes line break instead of printing literally which breaks paths.
-
-- name: create testing file
- win_copy:
- src: test_linebreak.txt
- dest: "{{win_output_dir}}/test_linebreak.txt"
-
-- name: stat the test file
- win_stat:
- path: "{{win_output_dir}}/test_linebreak.txt"
- register: result
-
-# (Get-FileHash -path C:\ansible\test\integration\targets\win_lineinfile\files\test_linebreak.txt -Algorithm sha1).hash.tolower()
-- name: check win_stat file result
- assert:
- that:
- - result.stat.exists
- - not result.stat.isdir
- - result.stat.checksum == 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
- - result is not failed
- - result is not changed
-
-- name: insert path c:\return\new to test file
- win_lineinfile:
- dest: "{{win_output_dir}}/test_linebreak.txt"
- line: c:\return\new
- register: result_literal
-
-- name: insert path "c:\return\new" to test file, will cause line breaks
- win_lineinfile:
- dest: "{{win_output_dir}}/test_linebreak.txt"
- line: "c:\return\new"
- register: result_expand
-
-- name: assert that the lines were inserted
- assert:
- that:
- - result_literal.changed == true
- - result_literal.msg == 'line added'
- - result_expand.changed == true
- - result_expand.msg == 'line added'
-
-- name: stat the test file
- win_stat:
- path: "{{win_output_dir}}/test_linebreak.txt"
- register: result
-
-- debug:
- var: result
- verbosity: 1
-
-# expect that the file looks like this:
-# c:\return\new
-# c:
-# eturn
-# ew #or c:eturnew on windows
-- name: assert that one line is literal and the other has breaks
- assert:
- that:
- - result.stat.checksum == 'd2dfd11bc70526ff13a91153c76a7ae5595a845b'
diff --git a/test/integration/targets/incidental_win_ping/aliases b/test/integration/targets/incidental_win_ping/aliases
deleted file mode 100644
index a5fc90dc..00000000
--- a/test/integration/targets/incidental_win_ping/aliases
+++ /dev/null
@@ -1,2 +0,0 @@
-shippable/windows/incidental
-windows
diff --git a/test/integration/targets/incidental_win_ping/library/win_ping_set_attr.ps1 b/test/integration/targets/incidental_win_ping/library/win_ping_set_attr.ps1
deleted file mode 100644
index f1704964..00000000
--- a/test/integration/targets/incidental_win_ping/library/win_ping_set_attr.ps1
+++ /dev/null
@@ -1,31 +0,0 @@
-#!powershell
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# POWERSHELL_COMMON
-
-$params = Parse-Args $args $true;
-
-$data = Get-Attr $params "data" "pong";
-
-$result = @{
- changed = $false
- ping = "pong"
-};
-
-# Test that Set-Attr will replace an existing attribute.
-Set-Attr $result "ping" $data
-
-Exit-Json $result;
diff --git a/test/integration/targets/incidental_win_ping/library/win_ping_strict_mode_error.ps1 b/test/integration/targets/incidental_win_ping/library/win_ping_strict_mode_error.ps1
deleted file mode 100644
index 508174af..00000000
--- a/test/integration/targets/incidental_win_ping/library/win_ping_strict_mode_error.ps1
+++ /dev/null
@@ -1,30 +0,0 @@
-#!powershell
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# POWERSHELL_COMMON
-
-$params = Parse-Args $args $true;
-
-$params.thisPropertyDoesNotExist
-
-$data = Get-Attr $params "data" "pong";
-
-$result = @{
- changed = $false
- ping = $data
-};
-
-Exit-Json $result;
diff --git a/test/integration/targets/incidental_win_ping/library/win_ping_syntax_error.ps1 b/test/integration/targets/incidental_win_ping/library/win_ping_syntax_error.ps1
deleted file mode 100644
index d4c9f07a..00000000
--- a/test/integration/targets/incidental_win_ping/library/win_ping_syntax_error.ps1
+++ /dev/null
@@ -1,30 +0,0 @@
-#!powershell
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# POWERSHELL_COMMON
-
-$blah = 'I can't quote my strings correctly.'
-
-$params = Parse-Args $args $true;
-
-$data = Get-Attr $params "data" "pong";
-
-$result = @{
- changed = $false
- ping = $data
-};
-
-Exit-Json $result;
diff --git a/test/integration/targets/incidental_win_ping/library/win_ping_throw.ps1 b/test/integration/targets/incidental_win_ping/library/win_ping_throw.ps1
deleted file mode 100644
index 7306f4d2..00000000
--- a/test/integration/targets/incidental_win_ping/library/win_ping_throw.ps1
+++ /dev/null
@@ -1,30 +0,0 @@
-#!powershell
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# POWERSHELL_COMMON
-
-throw
-
-$params = Parse-Args $args $true;
-
-$data = Get-Attr $params "data" "pong";
-
-$result = @{
- changed = $false
- ping = $data
-};
-
-Exit-Json $result;
diff --git a/test/integration/targets/incidental_win_ping/library/win_ping_throw_string.ps1 b/test/integration/targets/incidental_win_ping/library/win_ping_throw_string.ps1
deleted file mode 100644
index 09e3b7cb..00000000
--- a/test/integration/targets/incidental_win_ping/library/win_ping_throw_string.ps1
+++ /dev/null
@@ -1,30 +0,0 @@
-#!powershell
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# POWERSHELL_COMMON
-
-throw "no ping for you"
-
-$params = Parse-Args $args $true;
-
-$data = Get-Attr $params "data" "pong";
-
-$result = @{
- changed = $false
- ping = $data
-};
-
-Exit-Json $result;
diff --git a/test/integration/targets/incidental_win_ping/tasks/main.yml b/test/integration/targets/incidental_win_ping/tasks/main.yml
deleted file mode 100644
index a7e6ba7f..00000000
--- a/test/integration/targets/incidental_win_ping/tasks/main.yml
+++ /dev/null
@@ -1,67 +0,0 @@
-# test code for the win_ping module
-# (c) 2014, Chris Church <chris@ninemoreminutes.com>
-
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-- name: test win_ping
- action: win_ping
- register: win_ping_result
-
-- name: check win_ping result
- assert:
- that:
- - win_ping_result is not failed
- - win_ping_result is not changed
- - win_ping_result.ping == 'pong'
-
-- name: test win_ping with data
- win_ping:
- data: ☠
- register: win_ping_with_data_result
-
-- name: check win_ping result with data
- assert:
- that:
- - win_ping_with_data_result is not failed
- - win_ping_with_data_result is not changed
- - win_ping_with_data_result.ping == '☠'
-
-- name: test win_ping.ps1 with data as complex args
- # win_ping.ps1: # TODO: do we want to actually support this? no other tests that I can see...
- win_ping:
- data: bleep
- register: win_ping_ps1_result
-
-- name: check win_ping.ps1 result with data
- assert:
- that:
- - win_ping_ps1_result is not failed
- - win_ping_ps1_result is not changed
- - win_ping_ps1_result.ping == 'bleep'
-
-- name: test win_ping using data=crash so that it throws an exception
- win_ping:
- data: crash
- register: win_ping_crash_result
- ignore_errors: yes
-
-- name: check win_ping_crash result
- assert:
- that:
- - win_ping_crash_result is failed
- - win_ping_crash_result is not changed
- - 'win_ping_crash_result.msg == "Unhandled exception while executing module: boom"'
- - '"throw \"boom\"" in win_ping_crash_result.exception'
diff --git a/test/integration/targets/incidental_win_prepare_tests/aliases b/test/integration/targets/incidental_win_prepare_tests/aliases
deleted file mode 100644
index 136c05e0..00000000
--- a/test/integration/targets/incidental_win_prepare_tests/aliases
+++ /dev/null
@@ -1 +0,0 @@
-hidden
diff --git a/test/integration/targets/incidental_win_prepare_tests/meta/main.yml b/test/integration/targets/incidental_win_prepare_tests/meta/main.yml
deleted file mode 100644
index cf5427b6..00000000
--- a/test/integration/targets/incidental_win_prepare_tests/meta/main.yml
+++ /dev/null
@@ -1,3 +0,0 @@
----
-
-allow_duplicates: yes
diff --git a/test/integration/targets/include_import/issue73657.yml b/test/integration/targets/include_import/issue73657.yml
new file mode 100644
index 00000000..b692ccb5
--- /dev/null
+++ b/test/integration/targets/include_import/issue73657.yml
@@ -0,0 +1,8 @@
+- hosts: localhost
+ gather_facts: no
+ tasks:
+ - block:
+ - include_tasks: issue73657_tasks.yml
+ rescue:
+ - debug:
+ msg: SHOULD_NOT_EXECUTE
diff --git a/test/integration/targets/include_import/issue73657_tasks.yml b/test/integration/targets/include_import/issue73657_tasks.yml
new file mode 100644
index 00000000..7247d769
--- /dev/null
+++ b/test/integration/targets/include_import/issue73657_tasks.yml
@@ -0,0 +1,2 @@
+- wrong.wrong.wrong:
+ parser: error
diff --git a/test/integration/targets/include_import/runme.sh b/test/integration/targets/include_import/runme.sh
index 7029ab6d..b60ecffb 100755
--- a/test/integration/targets/include_import/runme.sh
+++ b/test/integration/targets/include_import/runme.sh
@@ -135,3 +135,7 @@ cat out.txt
test "$(grep out.txt -ce 'In imported playbook')" = 2
test "$(grep out.txt -ce 'In imported tasks')" = 3
test "$(grep out.txt -ce 'In imported role')" = 3
+
+# https://github.com/ansible/ansible/issues/73657
+ansible-playbook issue73657.yml 2>&1 | tee issue73657.out
+test "$(grep -c 'SHOULD_NOT_EXECUTE' issue73657.out)" = 0
diff --git a/test/integration/targets/interpreter_discovery_python/tasks/main.yml b/test/integration/targets/interpreter_discovery_python/tasks/main.yml
index 770de0c5..7e9b2e87 100644
--- a/test/integration/targets/interpreter_discovery_python/tasks/main.yml
+++ b/test/integration/targets/interpreter_discovery_python/tasks/main.yml
@@ -11,6 +11,7 @@
set_fact:
distro: '{{ ansible_distribution | default("unknown") | lower }}'
distro_version: '{{ ansible_distribution_version | default("unknown") }}'
+ distro_major_version: '{{ ansible_distribution_major_version | default("unknown") }}'
os_family: '{{ ansible_os_family | default("unknown") }}'
- name: test that python discovery is working and that fact persistence makes it only run once
@@ -156,9 +157,11 @@
assert:
that:
# rhel 6/7
- - (auto_out.ansible_facts.discovered_interpreter_python == '/usr/bin/python' and distro_version is version('8','<')) or distro_version is version('8','>=')
- # rhel 8+
- - (auto_out.ansible_facts.discovered_interpreter_python == '/usr/libexec/platform-python' and distro_version is version('8','>=')) or distro_version is version('8','<')
+ - (auto_out.ansible_facts.discovered_interpreter_python == '/usr/bin/python' and distro_major_version is version('8','<')) or distro_major_version is version('8','>=')
+ # rhel 8
+ - (auto_out.ansible_facts.discovered_interpreter_python == '/usr/libexec/platform-python' and distro_major_version is version('8','==')) or distro_major_version is version('8','!=')
+ # rhel 9
+ - (auto_out.ansible_facts.discovered_interpreter_python == '/usr/bin/python3' and distro_major_version is version('9','==')) or distro_major_version is version('9','!=')
when: distro == 'redhat'
- name: ubuntu assertions
diff --git a/test/integration/targets/inventory/1/2/inventory.yml b/test/integration/targets/inventory/1/2/inventory.yml
index 5082cef2..b6c31ad8 100644
--- a/test/integration/targets/inventory/1/2/inventory.yml
+++ b/test/integration/targets/inventory/1/2/inventory.yml
@@ -1,3 +1,3 @@
-plugin: constructed
+plugin: ansible.builtin.constructed
groups:
webservers: inventory_hostname.startswith('web')
diff --git a/test/integration/targets/inventory/extra_vars_constructed.yml b/test/integration/targets/inventory/extra_vars_constructed.yml
index 66bee863..ee6f5fd6 100644
--- a/test/integration/targets/inventory/extra_vars_constructed.yml
+++ b/test/integration/targets/inventory/extra_vars_constructed.yml
@@ -1,4 +1,4 @@
-plugin: constructed
+plugin: ansible.builtin.constructed
strict: true
use_extra_vars: True
compose:
diff --git a/test/integration/targets/inventory_constructed/constructed.yml b/test/integration/targets/inventory_constructed/constructed.yml
index baeea323..be02858f 100644
--- a/test/integration/targets/inventory_constructed/constructed.yml
+++ b/test/integration/targets/inventory_constructed/constructed.yml
@@ -1,4 +1,4 @@
-plugin: constructed
+plugin: ansible.builtin.constructed
keyed_groups:
- key: hostvar0
- key: hostvar1
diff --git a/test/integration/targets/inventory_constructed/invs/2/constructed.yml b/test/integration/targets/inventory_constructed/invs/2/constructed.yml
index 7c62ef1d..ca26e2c9 100644
--- a/test/integration/targets/inventory_constructed/invs/2/constructed.yml
+++ b/test/integration/targets/inventory_constructed/invs/2/constructed.yml
@@ -1,4 +1,4 @@
-plugin: constructed
+plugin: ansible.builtin.constructed
use_vars_plugins: true
keyed_groups:
- key: iamdefined
diff --git a/test/integration/targets/inventory_constructed/keyed_group_default_value.yml b/test/integration/targets/inventory_constructed/keyed_group_default_value.yml
index e4d0a76b..d69e8ec5 100644
--- a/test/integration/targets/inventory_constructed/keyed_group_default_value.yml
+++ b/test/integration/targets/inventory_constructed/keyed_group_default_value.yml
@@ -1,4 +1,4 @@
-plugin: constructed
+plugin: ansible.builtin.constructed
keyed_groups:
- key: tags
prefix: tag
diff --git a/test/integration/targets/inventory_constructed/keyed_group_list_default_value.yml b/test/integration/targets/inventory_constructed/keyed_group_list_default_value.yml
index 1c2d00e0..4481db31 100644
--- a/test/integration/targets/inventory_constructed/keyed_group_list_default_value.yml
+++ b/test/integration/targets/inventory_constructed/keyed_group_list_default_value.yml
@@ -1,5 +1,5 @@
-plugin: constructed
+plugin: ansible.builtin.constructed
keyed_groups:
- key: roles
default_value: storage
- prefix: host \ No newline at end of file
+ prefix: host
diff --git a/test/integration/targets/inventory_constructed/keyed_group_str_default_value.yml b/test/integration/targets/inventory_constructed/keyed_group_str_default_value.yml
index ae3fd5ae..256d3309 100644
--- a/test/integration/targets/inventory_constructed/keyed_group_str_default_value.yml
+++ b/test/integration/targets/inventory_constructed/keyed_group_str_default_value.yml
@@ -1,5 +1,5 @@
-plugin: constructed
+plugin: ansible.builtin.constructed
keyed_groups:
- key: os
default_value: "fedora"
- prefix: host \ No newline at end of file
+ prefix: host
diff --git a/test/integration/targets/inventory_constructed/keyed_group_trailing_separator.yml b/test/integration/targets/inventory_constructed/keyed_group_trailing_separator.yml
index cbe57c60..d69899db 100644
--- a/test/integration/targets/inventory_constructed/keyed_group_trailing_separator.yml
+++ b/test/integration/targets/inventory_constructed/keyed_group_trailing_separator.yml
@@ -1,4 +1,4 @@
-plugin: constructed
+plugin: ansible.builtin.constructed
keyed_groups:
- key: tags
prefix: tag
diff --git a/test/integration/targets/inventory_constructed/no_leading_separator_constructed.yml b/test/integration/targets/inventory_constructed/no_leading_separator_constructed.yml
index 5f35de14..5ff8f933 100644
--- a/test/integration/targets/inventory_constructed/no_leading_separator_constructed.yml
+++ b/test/integration/targets/inventory_constructed/no_leading_separator_constructed.yml
@@ -1,4 +1,4 @@
-plugin: constructed
+plugin: ansible.builtin.constructed
keyed_groups:
- key: hostvar0
- key: hostvar1
diff --git a/test/integration/targets/iptables/aliases b/test/integration/targets/iptables/aliases
new file mode 100644
index 00000000..7d66ecf8
--- /dev/null
+++ b/test/integration/targets/iptables/aliases
@@ -0,0 +1,5 @@
+shippable/posix/group2
+skip/freebsd
+skip/osx
+skip/macos
+skip/docker
diff --git a/test/integration/targets/iptables/tasks/chain_management.yml b/test/integration/targets/iptables/tasks/chain_management.yml
new file mode 100644
index 00000000..03551228
--- /dev/null
+++ b/test/integration/targets/iptables/tasks/chain_management.yml
@@ -0,0 +1,71 @@
+# test code for the iptables module
+# (c) 2021, Éloi Rivard <eloi@yaal.coop>
+
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+---
+- name: get the state of the iptable rules
+ shell: "{{ iptables_bin }} -L"
+ become: true
+ register: result
+
+- name: assert the rule is absent
+ assert:
+ that:
+ - result is not failed
+ - '"FOOBAR-CHAIN" not in result.stdout'
+
+- name: create the foobar chain
+ become: true
+ iptables:
+ chain: FOOBAR-CHAIN
+ chain_management: true
+ state: present
+
+- name: get the state of the iptable rules after chain is created
+ become: true
+ shell: "{{ iptables_bin }} -L"
+ register: result
+
+- name: assert the rule is present
+ assert:
+ that:
+ - result is not failed
+ - '"FOOBAR-CHAIN" in result.stdout'
+
+- name: flush the foobar chain
+ become: true
+ iptables:
+ chain: FOOBAR-CHAIN
+ flush: true
+
+- name: delete the foobar chain
+ become: true
+ iptables:
+ chain: FOOBAR-CHAIN
+ chain_management: true
+ state: absent
+
+- name: get the state of the iptable rules after chain is deleted
+ become: true
+ shell: "{{ iptables_bin }} -L"
+ register: result
+
+- name: assert the rule is absent
+ assert:
+ that:
+ - result is not failed
+ - '"FOOBAR-CHAIN" not in result.stdout'
+ - '"FOOBAR-RULE" not in result.stdout'
diff --git a/test/integration/targets/incidental_win_prepare_tests/tasks/main.yml b/test/integration/targets/iptables/tasks/main.yml
index e87b614b..eb2674ac 100644
--- a/test/integration/targets/incidental_win_prepare_tests/tasks/main.yml
+++ b/test/integration/targets/iptables/tasks/main.yml
@@ -1,6 +1,5 @@
-# test code for the windows versions of copy, file and template module
-# originally
-# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com>
+# test code for the iptables module
+# (c) 2021, Éloi Rivard <eloi@yaal.coop>
# This file is part of Ansible
#
@@ -16,14 +15,22 @@
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+---
+- name: Include distribution specific variables
+ include_vars: "{{ lookup('first_found', search) }}"
+ vars:
+ search:
+ files:
+ - '{{ ansible_distribution | lower }}.yml'
+ - '{{ ansible_os_family | lower }}.yml'
+ - '{{ ansible_system | lower }}.yml'
+ - default.yml
+ paths:
+ - vars
+- name: install dependencies for iptables test
+ package:
+ name: iptables
+ state: present
-- name: clean out the test directory
- win_file: name={{win_output_dir|mandatory}} state=absent
- tags:
- - prepare
-
-- name: create the test directory
- win_file: name={{win_output_dir}} state=directory
- tags:
- - prepare
+- import_tasks: chain_management.yml
diff --git a/test/integration/targets/iptables/vars/alpine.yml b/test/integration/targets/iptables/vars/alpine.yml
new file mode 100644
index 00000000..7bdd1a00
--- /dev/null
+++ b/test/integration/targets/iptables/vars/alpine.yml
@@ -0,0 +1,2 @@
+---
+iptables_bin: /sbin/iptables
diff --git a/test/integration/targets/iptables/vars/centos.yml b/test/integration/targets/iptables/vars/centos.yml
new file mode 100644
index 00000000..7bdd1a00
--- /dev/null
+++ b/test/integration/targets/iptables/vars/centos.yml
@@ -0,0 +1,2 @@
+---
+iptables_bin: /sbin/iptables
diff --git a/test/integration/targets/iptables/vars/default.yml b/test/integration/targets/iptables/vars/default.yml
new file mode 100644
index 00000000..0c5f8773
--- /dev/null
+++ b/test/integration/targets/iptables/vars/default.yml
@@ -0,0 +1,2 @@
+---
+iptables_bin: /usr/sbin/iptables
diff --git a/test/integration/targets/iptables/vars/fedora.yml b/test/integration/targets/iptables/vars/fedora.yml
new file mode 100644
index 00000000..7bdd1a00
--- /dev/null
+++ b/test/integration/targets/iptables/vars/fedora.yml
@@ -0,0 +1,2 @@
+---
+iptables_bin: /sbin/iptables
diff --git a/test/integration/targets/iptables/vars/redhat.yml b/test/integration/targets/iptables/vars/redhat.yml
new file mode 100644
index 00000000..7bdd1a00
--- /dev/null
+++ b/test/integration/targets/iptables/vars/redhat.yml
@@ -0,0 +1,2 @@
+---
+iptables_bin: /sbin/iptables
diff --git a/test/integration/targets/iptables/vars/suse.yml b/test/integration/targets/iptables/vars/suse.yml
new file mode 100644
index 00000000..7bdd1a00
--- /dev/null
+++ b/test/integration/targets/iptables/vars/suse.yml
@@ -0,0 +1,2 @@
+---
+iptables_bin: /sbin/iptables
diff --git a/test/integration/targets/jinja2_native_types/nested_undefined.yml b/test/integration/targets/jinja2_native_types/nested_undefined.yml
index c808ffb7..b60a871f 100644
--- a/test/integration/targets/jinja2_native_types/nested_undefined.yml
+++ b/test/integration/targets/jinja2_native_types/nested_undefined.yml
@@ -21,4 +21,3 @@
- assert:
that:
- "\"'nested_and_undefined' is undefined\" in result.msg"
- when: lookup('pipe', ansible_python_interpreter ~ ' -c "import jinja2; print(jinja2.__version__)"') is version('2.10', '>=')
diff --git a/test/integration/targets/jinja2_native_types/runtests.yml b/test/integration/targets/jinja2_native_types/runtests.yml
index efcdb7a5..422ef57b 100644
--- a/test/integration/targets/jinja2_native_types/runtests.yml
+++ b/test/integration/targets/jinja2_native_types/runtests.yml
@@ -31,20 +31,10 @@
s_false: "False"
yaml_none: ~
tasks:
- - name: check jinja version
- command: "{{ ansible_python_interpreter }} -c 'import jinja2; print(jinja2.__version__)'"
- register: jinja2_version
-
- - name: make sure jinja is the right version
- set_fact:
- is_native: "{{ jinja2_version.stdout is version('2.10', '>=') }}"
-
- - block:
- - import_tasks: test_casting.yml
- - import_tasks: test_concatentation.yml
- - import_tasks: test_bool.yml
- - import_tasks: test_dunder.yml
- - import_tasks: test_types.yml
- - import_tasks: test_none.yml
- - import_tasks: test_template.yml
- when: is_native
+ - import_tasks: test_casting.yml
+ - import_tasks: test_concatentation.yml
+ - import_tasks: test_bool.yml
+ - import_tasks: test_dunder.yml
+ - import_tasks: test_types.yml
+ - import_tasks: test_none.yml
+ - import_tasks: test_template.yml
diff --git a/test/integration/targets/known_hosts/meta/main.yml b/test/integration/targets/known_hosts/meta/main.yml
index 07faa217..cb6005d0 100644
--- a/test/integration/targets/known_hosts/meta/main.yml
+++ b/test/integration/targets/known_hosts/meta/main.yml
@@ -1,2 +1,3 @@
dependencies:
- prepare_tests
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/known_hosts/tasks/main.yml b/test/integration/targets/known_hosts/tasks/main.yml
index 4ea91c35..67f0e5a6 100644
--- a/test/integration/targets/known_hosts/tasks/main.yml
+++ b/test/integration/targets/known_hosts/tasks/main.yml
@@ -19,7 +19,7 @@
- name: copy an existing file in place
copy:
src: existing_known_hosts
- dest: "{{ output_dir }}/known_hosts"
+ dest: "{{ remote_tmp_dir }}/known_hosts"
# test addition
@@ -29,14 +29,14 @@
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
register: diff
- name: assert that the diff looks as expected (the key was added at the end)
assert:
that:
- 'diff is changed'
- - 'diff.diff.before_header == diff.diff.after_header == output_dir|expanduser + "/known_hosts"'
+ - 'diff.diff.before_header == diff.diff.after_header == remote_tmp_dir|expanduser + "/known_hosts"'
- 'diff.diff.after.splitlines()[:-1] == diff.diff.before.splitlines()'
- 'diff.diff.after.splitlines()[-1] == example_org_rsa_key.strip()'
@@ -45,11 +45,11 @@
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
register: result
- name: get the file content
- command: "cat {{output_dir}}/known_hosts"
+ command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts
- name: assert that the key was added and ordering preserved
@@ -68,7 +68,7 @@
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
register: check
- name: assert that no changes were expected
@@ -82,11 +82,11 @@
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
register: result
- name: get the file content
- command: "cat {{output_dir}}/known_hosts"
+ command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v2
- name: assert that no changes happened
@@ -104,13 +104,13 @@
name: example.org
key: "{{ example_org_rsa_key }}"
state: absent
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
register: diff
- name: assert that the diff looks as expected (the key was removed)
assert:
that:
- - 'diff.diff.before_header == diff.diff.after_header == output_dir|expanduser + "/known_hosts"'
+ - 'diff.diff.before_header == diff.diff.after_header == remote_tmp_dir|expanduser + "/known_hosts"'
- 'diff.diff.before.splitlines()[-1] == example_org_rsa_key.strip()'
- 'diff.diff.after.splitlines() == diff.diff.before.splitlines()[:-1]'
@@ -119,11 +119,11 @@
name: example.org
key: "{{ example_org_rsa_key }}"
state: absent
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
register: result
- name: get the file content
- command: "cat {{output_dir}}/known_hosts"
+ command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v3
- name: assert that the key was removed and ordering preserved
@@ -142,7 +142,7 @@
name: example.org
key: "{{ example_org_rsa_key }}"
state: absent
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
register: check
- name: assert that no changes were expected
@@ -156,11 +156,11 @@
name: example.org
key: "{{ example_org_rsa_key }}"
state: absent
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
register: result
- name: get the file content
- command: "cat {{output_dir}}/known_hosts"
+ command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v4
- name: assert that no changes happened
@@ -177,12 +177,12 @@
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
hash_host: yes
register: result
- name: get the file content
- command: "cat {{output_dir}}/known_hosts"
+ command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v5
- name: assert that the key was added and ordering preserved
@@ -201,12 +201,12 @@
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
hash_host: yes
register: result
- name: get the file content
- command: "cat {{output_dir}}/known_hosts"
+ command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v6
- name: assert that no changes happened
@@ -223,11 +223,11 @@
name: example.org
key: "{{ example_org_rsa_key }}"
state: absent
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
register: result
- name: get the file content
- command: "cat {{output_dir}}/known_hosts"
+ command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v7
- name: assert that the key was removed and ordering preserved
@@ -245,11 +245,11 @@
name: example.org
key: "{{ example_org_rsa_key }}"
state: absent
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
register: result
- name: get the file content
- command: "cat {{output_dir}}/known_hosts"
+ command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v8
- name: assert that no changes happened
@@ -267,10 +267,10 @@
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
- name: get the file content
- command: "cat {{output_dir}}/known_hosts"
+ command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v8
- name: assert the plaintext host is there
@@ -283,11 +283,11 @@
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
hash_host: true
- name: get the file content
- command: "cat {{output_dir}}/known_hosts"
+ command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v9
- name: assert the hashed host is there
@@ -301,10 +301,10 @@
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
- name: get the file content
- command: "cat {{output_dir}}/known_hosts"
+ command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v10
- name: assert the plaintext host is there
@@ -317,7 +317,7 @@
- name: copy an existing file in place
copy:
src: existing_known_hosts
- dest: "{{ output_dir }}/known_hosts"
+ dest: "{{ remote_tmp_dir }}/known_hosts"
# Test key changes
@@ -326,7 +326,7 @@
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
hash_host: true
- name: change the key of a hashed host
@@ -334,11 +334,11 @@
name: example.org
key: "{{ example_org_rsa_key.strip()[:-7] + 'RANDOM=' }}"
state: present
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
hash_host: true
- name: get the file content
- command: "cat {{output_dir}}/known_hosts"
+ command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v11
- name: assert the change took place and the key got modified
@@ -352,7 +352,7 @@
known_hosts:
name: example.org,acme.com
key: "{{ example_org_rsa_key }}"
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
ignore_errors: yes
register: result
@@ -366,7 +366,7 @@
known_hosts:
name: example.com
key: "{{ example_org_rsa_key }}"
- path: "{{output_dir}}/known_hosts"
+ path: "{{remote_tmp_dir}}/known_hosts"
ignore_errors: yes
register: result
diff --git a/test/integration/targets/lookup_env/runme.sh b/test/integration/targets/lookup_env/runme.sh
new file mode 100755
index 00000000..698d6bfa
--- /dev/null
+++ b/test/integration/targets/lookup_env/runme.sh
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+set -ex
+
+unset USR
+# this should succeed and return 'nobody' as var is undefined
+ansible -m debug -a msg="{{ lookup('env', 'USR', default='nobody')}}" localhost |grep nobody
+# var is defined but empty, so should return empty
+USR='' ansible -m debug -a msg="{{ lookup('env', 'USR', default='nobody')}}" localhost |grep -v nobody
+
+# this should fail with undefined
+ansible -m debug -a msg="{{ lookup('env', 'USR', default=Undefined)}}" localhost && exit 1 || exit 0
diff --git a/test/integration/targets/lookup_url/aliases b/test/integration/targets/lookup_url/aliases
index 90ef161f..9911b8a3 100644
--- a/test/integration/targets/lookup_url/aliases
+++ b/test/integration/targets/lookup_url/aliases
@@ -1,3 +1,4 @@
destructive
shippable/posix/group1
needs/httptester
+skip/macos/12.0 # This test crashes Python due to https://wefearchange.org/2018/11/forkmacos.rst.html
diff --git a/test/integration/targets/loop-until/aliases b/test/integration/targets/loop-until/aliases
new file mode 100644
index 00000000..90ea9e12
--- /dev/null
+++ b/test/integration/targets/loop-until/aliases
@@ -0,0 +1,2 @@
+shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/loop-until/tasks/main.yml b/test/integration/targets/loop-until/tasks/main.yml
new file mode 100644
index 00000000..bb3799a3
--- /dev/null
+++ b/test/integration/targets/loop-until/tasks/main.yml
@@ -0,0 +1,160 @@
+# Test code for integration of until and loop options
+# Copyright: (c) 2018, Ansible Project
+
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
+- shell: '{{ ansible_python.executable }} -c "import tempfile; print(tempfile.mkstemp()[1])"'
+ register: tempfilepaths
+ # 0 to 3:
+ loop: "{{ range(0, 3 + 1) | list }}"
+
+- set_fact:
+ "until_tempfile_path_{{ idx }}": "{{ tmp_file.stdout }}"
+ until_tempfile_path_var_names: >
+ {{ [ 'until_tempfile_path_' + idx | string ] + until_tempfile_path_var_names | default([]) }}
+ loop: "{{ tempfilepaths.results }}"
+ loop_control:
+ index_var: idx
+ loop_var: tmp_file
+
+# `select` filter is only available since Jinja 2.7,
+# thus tests are failing under CentOS in CI
+#- set_fact:
+# until_tempfile_path_var_names: >
+# {{ vars | select('match', '^until_tempfile_path_') | list }}
+
+- name: loop and until with 6 retries
+ shell: echo "run" >> {{ lookup('vars', tmp_file_var) }} && wc -w < {{ lookup('vars', tmp_file_var) }} | tr -d ' '
+ register: runcount
+ until: runcount.stdout | int == idx + 3
+ retries: "{{ idx + 2 }}"
+ delay: 0.01
+ loop: "{{ until_tempfile_path_var_names }}"
+ loop_control:
+ index_var: idx
+ loop_var: tmp_file_var
+
+- debug: var=runcount
+
+- assert:
+ that: item.stdout | int == idx + 3
+ loop: "{{ runcount.results }}"
+ loop_control:
+ index_var: idx
+
+- &cleanup-tmp-files
+ name: Empty tmp files
+ copy:
+ content: ""
+ dest: "{{ lookup('vars', tmp_file_var) }}"
+ loop: "{{ until_tempfile_path_var_names }}"
+ loop_control:
+ index_var: idx
+ loop_var: tmp_file_var
+
+- name: loop with specified max retries
+ shell: echo "run" >> {{ lookup('vars', tmp_file_var) }}
+ until: 1==0
+ retries: 5
+ delay: 0.01
+ ignore_errors: true
+ loop: "{{ until_tempfile_path_var_names }}"
+ loop_control:
+ index_var: idx
+ loop_var: tmp_file_var
+
+- name: validate output
+ shell: wc -l < {{ lookup('vars', tmp_file_var) }}
+ register: runcount
+ loop: "{{ until_tempfile_path_var_names }}"
+ loop_control:
+ index_var: idx
+ loop_var: tmp_file_var
+
+- assert:
+ that: item.stdout | int == 6 # initial + 5 retries
+ loop: "{{ runcount.results }}"
+
+- *cleanup-tmp-files
+
+- name: Test failed_when impacting until
+ shell: echo "run" >> {{ lookup('vars', tmp_file_var) }}
+ register: failed_when_until
+ failed_when: True
+ until: failed_when_until is successful
+ retries: 3
+ delay: 0.5
+ ignore_errors: True
+ loop: "{{ until_tempfile_path_var_names }}"
+ loop_control:
+ index_var: idx
+ loop_var: tmp_file_var
+
+- name: Get attempts number
+ shell: wc -l < {{ lookup('vars', tmp_file_var) }}
+ register: runcount
+ loop: "{{ until_tempfile_path_var_names }}"
+ loop_control:
+ index_var: idx
+ loop_var: tmp_file_var
+
+- assert:
+ that: item.stdout | int == 3 + 1
+ loop: "{{ runcount.results }}"
+
+- *cleanup-tmp-files
+
+- name: Test changed_when impacting until
+ shell: echo "run" >> {{ lookup('vars', tmp_file_var) }}
+ register: changed_when_until
+ changed_when: False
+ until: changed_when_until is changed
+ retries: 3
+ delay: 0.5
+ ignore_errors: True
+ loop: "{{ until_tempfile_path_var_names }}"
+ loop_control:
+ index_var: idx
+ loop_var: tmp_file_var
+
+- name: Get attempts number
+ shell: wc -l < {{ lookup('vars', tmp_file_var) }}
+ register: runcount
+ loop: "{{ until_tempfile_path_var_names }}"
+ loop_control:
+ index_var: idx
+ loop_var: tmp_file_var
+
+- assert:
+ that: item.stdout | int == 3 + 1
+ loop: "{{ runcount.results }}"
+
+- *cleanup-tmp-files
+
+- name: Test access to attempts in changed_when/failed_when
+ shell: 'true'
+ register: changed_when_attempts
+ until: 1 == 0
+ retries: 5
+ delay: 0.5
+ failed_when: changed_when_attempts.attempts > 6
+ loop: "{{ runcount.results }}"
+
+- &wipe-out-tmp-files
+ file: path="{{ lookup('vars', tmp_file_var) }}" state=absent
+ loop: "{{ until_tempfile_path_var_names }}"
+ loop_control:
+ index_var: idx
+ loop_var: tmp_file_var
diff --git a/test/integration/targets/module_utils/aliases b/test/integration/targets/module_utils/aliases
index 769d265d..64d1b64a 100644
--- a/test/integration/targets/module_utils/aliases
+++ b/test/integration/targets/module_utils/aliases
@@ -1,4 +1,5 @@
shippable/posix/group3
needs/root
needs/target/setup_nobody
+needs/target/setup_remote_tmp_dir
context/target
diff --git a/test/integration/targets/module_utils/library/test_alias_deprecation.py b/test/integration/targets/module_utils/library/test_alias_deprecation.py
index 96410fc4..dc36abae 100644
--- a/test/integration/targets/module_utils/library/test_alias_deprecation.py
+++ b/test/integration/targets/module_utils/library/test_alias_deprecation.py
@@ -4,7 +4,8 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils.basic import AnsibleModule
-from ansible.module_utils.facts import data
+# overridden
+from ansible.module_utils.ansible_release import data
results = {"data": data}
diff --git a/test/integration/targets/module_utils/library/test_override.py b/test/integration/targets/module_utils/library/test_override.py
index b4e21cdd..7f6e7a5f 100644
--- a/test/integration/targets/module_utils/library/test_override.py
+++ b/test/integration/targets/module_utils/library/test_override.py
@@ -3,7 +3,8 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils.basic import AnsibleModule
-from ansible.module_utils.facts import data
+# overridden
+from ansible.module_utils.ansible_release import data
results = {"data": data}
diff --git a/test/integration/targets/module_utils/module_utils/ansible_release.py b/test/integration/targets/module_utils/module_utils/ansible_release.py
new file mode 100644
index 00000000..7d43bf87
--- /dev/null
+++ b/test/integration/targets/module_utils/module_utils/ansible_release.py
@@ -0,0 +1,4 @@
+# This file overrides the builtin ansible.module_utils.ansible_release file
+# to test that it can be overridden. Previously this was facts.py but caused issues
+# with dependencies that may need to execute a module that makes use of facts
+data = 'overridden ansible_release.py'
diff --git a/test/integration/targets/module_utils/module_utils/facts.py b/test/integration/targets/module_utils/module_utils/facts.py
deleted file mode 100644
index ba7cbb7b..00000000
--- a/test/integration/targets/module_utils/module_utils/facts.py
+++ /dev/null
@@ -1 +0,0 @@
-data = 'overridden facts.py'
diff --git a/test/integration/targets/module_utils/module_utils_envvar.yml b/test/integration/targets/module_utils/module_utils_envvar.yml
index 8d97e0eb..8c37940e 100644
--- a/test/integration/targets/module_utils/module_utils_envvar.yml
+++ b/test/integration/targets/module_utils/module_utils_envvar.yml
@@ -33,10 +33,10 @@
test_override:
register: result
- - name: Make sure the we used the local facts.py, not the one shipped with ansible
+ - name: Make sure the we used the local ansible_release.py, not the one shipped with ansible
assert:
that:
- - 'result["data"] == "overridden facts.py"'
+ - 'result["data"] == "overridden ansible_release.py"'
- name: Test that importing something from the module_utils in the env_vars works
test_env_override:
diff --git a/test/integration/targets/module_utils/module_utils_test.yml b/test/integration/targets/module_utils/module_utils_test.yml
index a6019cda..4e948bd6 100644
--- a/test/integration/targets/module_utils/module_utils_test.yml
+++ b/test/integration/targets/module_utils/module_utils_test.yml
@@ -33,10 +33,10 @@
test_override:
register: result
- - name: Make sure the we used the local facts.py, not the one shipped with ansible
+ - name: Make sure the we used the local ansible_release.py, not the one shipped with ansible
assert:
that:
- - result["data"] == "overridden facts.py"
+ - result["data"] == "overridden ansible_release.py"
- name: Test that importing a module that only exists inside of a submodule does not work
test_failure:
@@ -61,13 +61,16 @@
- result.deprecations[-1].version == '9.99'
- block:
+ - import_role:
+ name: setup_remote_tmp_dir
+
- name: Get a string with a \0 in it
command: echo -e 'hi\0foo'
register: string_with_null
- name: Use the null string as a module parameter
lineinfile:
- path: "{{ output_dir }}/nulltest"
+ path: "{{ remote_tmp_dir }}/nulltest"
line: "{{ string_with_null.stdout }}"
create: yes
ignore_errors: yes
@@ -75,7 +78,7 @@
- name: See if the file exists
stat:
- path: "{{ output_dir }}/nulltest"
+ path: "{{ remote_tmp_dir }}/nulltest"
register: nullstat
- assert:
@@ -94,7 +97,7 @@
- nullstat.stat.exists == nulltest is successful
always:
- file:
- path: "{{ output_dir }}/nulltest"
+ path: "{{ remote_tmp_dir }}/nulltest"
state: absent
- name: Test that date and datetime in module output works
diff --git a/test/integration/targets/module_utils/runme.sh b/test/integration/targets/module_utils/runme.sh
index 801734f9..b4ba1356 100755
--- a/test/integration/targets/module_utils/runme.sh
+++ b/test/integration/targets/module_utils/runme.sh
@@ -2,13 +2,15 @@
set -eux
-ANSIBLE_ROLES_PATH=../ ansible-playbook module_utils_basic_setcwd.yml -i ../../inventory "$@"
+export ANSIBLE_ROLES_PATH=../
+
+ansible-playbook module_utils_basic_setcwd.yml -i ../../inventory "$@"
# Keep the -vvvvv here. This acts as a test for testing that higher verbosity
# doesn't traceback with unicode in the custom module_utils directory path.
ansible-playbook module_utils_vvvvv.yml -i ../../inventory -vvvvv "$@"
-ansible-playbook module_utils_test.yml -i ../../inventory -e output_dir="$OUTPUT_DIR" -v "$@"
+ansible-playbook module_utils_test.yml -i ../../inventory -v "$@"
ANSIBLE_MODULE_UTILS=other_mu_dir ansible-playbook module_utils_envvar.yml -i ../../inventory -v "$@"
diff --git a/test/integration/targets/module_utils_Ansible.AccessToken/library/ansible_access_token_tests.ps1 b/test/integration/targets/module_utils_Ansible.AccessToken/library/ansible_access_token_tests.ps1
index 5e3a0af5..a1de2b4e 100644
--- a/test/integration/targets/module_utils_Ansible.AccessToken/library/ansible_access_token_tests.ps1
+++ b/test/integration/targets/module_utils_Ansible.AccessToken/library/ansible_access_token_tests.ps1
@@ -15,38 +15,41 @@ $module = [Ansible.Basic.AnsibleModule]::Create($args, $spec)
$test_username = $module.Params.test_username
$test_password = $module.Params.test_password
-Function Assert-Equals {
+Function Assert-Equal {
param(
- [Parameter(Mandatory=$true, ValueFromPipeline=$true)][AllowNull()]$Actual,
- [Parameter(Mandatory=$true, Position=0)][AllowNull()]$Expected
+ [Parameter(Mandatory = $true, ValueFromPipeline = $true)][AllowNull()]$Actual,
+ [Parameter(Mandatory = $true, Position = 0)][AllowNull()]$Expected
)
- $matched = $false
- if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array]) {
- $Actual.Count | Assert-Equals -Expected $Expected.Count
- for ($i = 0; $i -lt $Actual.Count; $i++) {
- $actual_value = $Actual[$i]
- $expected_value = $Expected[$i]
- Assert-Equals -Actual $actual_value -Expected $expected_value
+ process {
+ $matched = $false
+ if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array]) {
+ $Actual.Count | Assert-Equal -Expected $Expected.Count
+ for ($i = 0; $i -lt $Actual.Count; $i++) {
+ $actual_value = $Actual[$i]
+ $expected_value = $Expected[$i]
+ Assert-Equal -Actual $actual_value -Expected $expected_value
+ }
+ $matched = $true
}
- $matched = $true
- } else {
- $matched = $Actual -ceq $Expected
- }
-
- if (-not $matched) {
- if ($Actual -is [PSObject]) {
- $Actual = $Actual.ToString()
+ else {
+ $matched = $Actual -ceq $Expected
}
- $call_stack = (Get-PSCallStack)[1]
- $module.Result.test = $test
- $module.Result.actual = $Actual
- $module.Result.expected = $Expected
- $module.Result.line = $call_stack.ScriptLineNumber
- $module.Result.method = $call_stack.Position.Text
+ if (-not $matched) {
+ if ($Actual -is [PSObject]) {
+ $Actual = $Actual.ToString()
+ }
+
+ $call_stack = (Get-PSCallStack)[1]
+ $module.Result.test = $test
+ $module.Result.actual = $Actual
+ $module.Result.expected = $Expected
+ $module.Result.line = $call_stack.ScriptLineNumber
+ $module.Result.method = $call_stack.Position.Text
- $module.FailJson("AssertionError: actual != expected")
+ $module.FailJson("AssertionError: actual != expected")
+ }
}
}
@@ -58,15 +61,16 @@ $tests = [Ordered]@{
$h_token = [Ansible.AccessToken.TokenUtil]::OpenProcessToken($h_process, "Query")
try {
- $h_token.IsClosed | Assert-Equals -Expected $false
- $h_token.IsInvalid | Assert-Equals -Expected $false
+ $h_token.IsClosed | Assert-Equal -Expected $false
+ $h_token.IsInvalid | Assert-Equal -Expected $false
$actual_user = [Ansible.AccessToken.TokenUtil]::GetTokenUser($h_token)
- $actual_user | Assert-Equals -Expected $current_user
- } finally {
+ $actual_user | Assert-Equal -Expected $current_user
+ }
+ finally {
$h_token.Dispose()
}
- $h_token.IsClosed | Assert-Equals -Expected $true
+ $h_token.IsClosed | Assert-Equal -Expected $true
}
"Open process token of another process" = {
@@ -74,21 +78,24 @@ $tests = [Ordered]@{
try {
$h_process = [Ansible.AccessToken.TokenUtil]::OpenProcess($proc_info.Id, "QueryInformation", $false)
try {
- $h_process.IsClosed | Assert-Equals -Expected $false
- $h_process.IsInvalid | Assert-Equals -Expected $false
+ $h_process.IsClosed | Assert-Equal -Expected $false
+ $h_process.IsInvalid | Assert-Equal -Expected $false
$h_token = [Ansible.AccessToken.TokenUtil]::OpenProcessToken($h_process, "Query")
try {
$actual_user = [Ansible.AccessToken.TokenUtil]::GetTokenUser($h_token)
- $actual_user | Assert-Equals -Expected $current_user
- } finally {
+ $actual_user | Assert-Equal -Expected $current_user
+ }
+ finally {
$h_token.Dispose()
}
- } finally {
+ }
+ finally {
$h_process.Dispose()
}
- $h_process.IsClosed | Assert-Equals -Expected $true
- } finally {
+ $h_process.IsClosed | Assert-Equal -Expected $true
+ }
+ finally {
$proc_info | Stop-Process
}
}
@@ -98,11 +105,13 @@ $tests = [Ordered]@{
try {
$h_process = [Ansible.AccessToken.TokenUtil]::OpenProcess(4, "QueryInformation", $false)
$h_process.Dispose() # Incase this doesn't fail, make sure we still dispose of it
- } catch [Ansible.AccessToken.Win32Exception] {
+ }
+ catch [Ansible.AccessToken.Win32Exception] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "Failed to open process 4 with access QueryInformation (Access is denied, Win32ErrorCode 5 - 0x00000005)"
+ $msg = "Failed to open process 4 with access QueryInformation (Access is denied, Win32ErrorCode 5 - 0x00000005)"
+ $_.Exception.Message | Assert-Equal -Expected $msg
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
"Duplicate access token primary" = {
@@ -111,22 +120,24 @@ $tests = [Ordered]@{
try {
$dup_token = [Ansible.AccessToken.TokenUtil]::DuplicateToken($h_token, "Query", "Anonymous", "Primary")
try {
- $dup_token.IsClosed | Assert-Equals -Expected $false
- $dup_token.IsInvalid | Assert-Equals -Expected $false
+ $dup_token.IsClosed | Assert-Equal -Expected $false
+ $dup_token.IsInvalid | Assert-Equal -Expected $false
$actual_user = [Ansible.AccessToken.TokenUtil]::GetTokenUser($dup_token)
- $actual_user | Assert-Equals -Expected $current_user
+ $actual_user | Assert-Equal -Expected $current_user
$actual_stat = [Ansible.AccessToken.TokenUtil]::GetTokenStatistics($dup_token)
- $actual_stat.TokenType | Assert-Equals -Expected ([Ansible.AccessToken.TokenType]::Primary)
- $actual_stat.ImpersonationLevel | Assert-Equals -Expected ([Ansible.AccessToken.SecurityImpersonationLevel]::Anonymous)
- } finally {
+ $actual_stat.TokenType | Assert-Equal -Expected ([Ansible.AccessToken.TokenType]::Primary)
+ $actual_stat.ImpersonationLevel | Assert-Equal -Expected ([Ansible.AccessToken.SecurityImpersonationLevel]::Anonymous)
+ }
+ finally {
$dup_token.Dispose()
}
- $dup_token.IsClosed | Assert-Equals -Expected $true
- } finally {
+ $dup_token.IsClosed | Assert-Equal -Expected $true
+ }
+ finally {
$h_token.Dispose()
}
}
@@ -140,16 +151,18 @@ $tests = [Ordered]@{
try {
$actual_user = [Ansible.AccessToken.TokenUtil]::GetTokenUser($dup_token)
- $actual_user | Assert-Equals -Expected $current_user
+ $actual_user | Assert-Equal -Expected $current_user
$actual_stat = [Ansible.AccessToken.TokenUtil]::GetTokenStatistics($dup_token)
- $actual_stat.TokenType | Assert-Equals -Expected ([Ansible.AccessToken.TokenType]::Impersonation)
- $actual_stat.ImpersonationLevel | Assert-Equals -Expected ([Ansible.AccessToken.SecurityImpersonationLevel]"$_")
- } finally {
+ $actual_stat.TokenType | Assert-Equal -Expected ([Ansible.AccessToken.TokenType]::Impersonation)
+ $actual_stat.ImpersonationLevel | Assert-Equal -Expected ([Ansible.AccessToken.SecurityImpersonationLevel]"$_")
+ }
+ finally {
$dup_token.Dispose()
}
}
- } finally {
+ }
+ finally {
$h_token.Dispose()
}
}
@@ -162,25 +175,26 @@ $tests = [Ordered]@{
$tested = $false
foreach ($h_token in [Ansible.AccessToken.TokenUtil]::EnumerateUserTokens($system_sid, "Duplicate, Impersonate, Query")) {
$actual_user = [Ansible.AccessToken.TokenUtil]::GetTokenUser($h_token)
- $actual_user | Assert-Equals -Expected $system_sid
+ $actual_user | Assert-Equal -Expected $system_sid
[Ansible.AccessToken.TokenUtil]::ImpersonateToken($h_token)
try {
$current_sid = [System.Security.Principal.WindowsIdentity]::GetCurrent().User
- $current_sid | Assert-Equals -Expected $system_sid
- } finally {
+ $current_sid | Assert-Equal -Expected $system_sid
+ }
+ finally {
[Ansible.AccessToken.TokenUtil]::RevertToSelf()
}
$current_sid = [System.Security.Principal.WindowsIdentity]::GetCurrent().User
- $current_sid | Assert-Equals -Expected $current_user
+ $current_sid | Assert-Equal -Expected $current_user
# Will keep on looping for each SYSTEM token it can retrieve, we only want to test 1
$tested = $true
break
}
- $tested | Assert-Equals -Expected $true
+ $tested | Assert-Equal -Expected $true
}
"Get token privileges" = {
@@ -191,8 +205,8 @@ $tests = [Ordered]@{
$actual_privs = [Ansible.AccessToken.Tokenutil]::GetTokenPrivileges($h_token)
$actual_stat = [Ansible.AccessToken.TokenUtil]::GetTokenStatistics($h_token)
- $actual_privs.Count | Assert-Equals -Expected $priv_info.Count
- $actual_privs.Count | Assert-Equals -Expected $actual_stat.PrivilegeCount
+ $actual_privs.Count | Assert-Equal -Expected $priv_info.Count
+ $actual_privs.Count | Assert-Equal -Expected $actual_stat.PrivilegeCount
foreach ($info in $priv_info) {
$info_split = $info.Split(" ", [System.StringSplitOptions]::RemoveEmptyEntries)
@@ -200,14 +214,16 @@ $tests = [Ordered]@{
$priv_enabled = $info_split[-1] -eq "Enabled"
$actual_priv = $actual_privs | Where-Object { $_.Name -eq $priv_name }
- $actual_priv -eq $null | Assert-Equals -Expected $false
+ $actual_priv -eq $null | Assert-Equal -Expected $false
if ($priv_enabled) {
- $actual_priv.Attributes.HasFlag([Ansible.AccessToken.PrivilegeAttributes]::Enabled) | Assert-Equals -Expected $true
- } else {
- $actual_priv.Attributes.HasFlag([Ansible.AccessToken.PrivilegeAttributes]::Disabled) | Assert-Equals -Expected $true
+ $actual_priv.Attributes.HasFlag([Ansible.AccessToken.PrivilegeAttributes]::Enabled) | Assert-Equal -Expected $true
+ }
+ else {
+ $actual_priv.Attributes.HasFlag([Ansible.AccessToken.PrivilegeAttributes]::Disabled) | Assert-Equal -Expected $true
}
}
- } finally {
+ }
+ finally {
$h_token.Dispose()
}
}
@@ -219,25 +235,27 @@ $tests = [Ordered]@{
$actual_priv = [Ansible.AccessToken.Tokenutil]::GetTokenPrivileges($h_token)
$actual_stat = [Ansible.AccessToken.TokenUtil]::GetTokenStatistics($h_token)
- $actual_stat.TokenId.GetType().FullName | Assert-Equals -Expected "Ansible.AccessToken.Luid"
- $actual_stat.AuthenticationId.GetType().FullName | Assert-Equals -Expected "Ansible.AccessToken.Luid"
- $actual_stat.ExpirationTime.GetType().FullName | Assert-Equals -Expected "System.Int64"
+ $actual_stat.TokenId.GetType().FullName | Assert-Equal -Expected "Ansible.AccessToken.Luid"
+ $actual_stat.AuthenticationId.GetType().FullName | Assert-Equal -Expected "Ansible.AccessToken.Luid"
+ $actual_stat.ExpirationTime.GetType().FullName | Assert-Equal -Expected "System.Int64"
- $actual_stat.TokenType | Assert-Equals -Expected ([Ansible.AccessToken.TokenType]::Primary)
+ $actual_stat.TokenType | Assert-Equal -Expected ([Ansible.AccessToken.TokenType]::Primary)
$os_version = [Version](Get-Item -LiteralPath $env:SystemRoot\System32\kernel32.dll).VersionInfo.ProductVersion
if ($os_version -lt [Version]"6.1") {
# While the token is a primary token, Server 2008 reports the SecurityImpersonationLevel for a primary token as Impersonation
- $actual_stat.ImpersonationLevel | Assert-Equals -Expected ([Ansible.AccessToken.SecurityImpersonationLevel]::Impersonation)
- } else {
- $actual_stat.ImpersonationLevel | Assert-Equals -Expected ([Ansible.AccessToken.SecurityImpersonationLevel]::Anonymous)
+ $actual_stat.ImpersonationLevel | Assert-Equal -Expected ([Ansible.AccessToken.SecurityImpersonationLevel]::Impersonation)
+ }
+ else {
+ $actual_stat.ImpersonationLevel | Assert-Equal -Expected ([Ansible.AccessToken.SecurityImpersonationLevel]::Anonymous)
}
- $actual_stat.DynamicCharged.GetType().FullName | Assert-Equals -Expected "System.UInt32"
- $actual_stat.DynamicAvailable.GetType().FullName | Assert-Equals -Expected "System.UInt32"
- $actual_stat.GroupCount.GetType().FullName | Assert-Equals -Expected "System.UInt32"
- $actual_stat.PrivilegeCount | Assert-Equals -Expected $actual_priv.Count
- $actual_stat.ModifiedId.GetType().FullName | Assert-Equals -Expected "Ansible.AccessToken.Luid"
- } finally {
+ $actual_stat.DynamicCharged.GetType().FullName | Assert-Equal -Expected "System.UInt32"
+ $actual_stat.DynamicAvailable.GetType().FullName | Assert-Equal -Expected "System.UInt32"
+ $actual_stat.GroupCount.GetType().FullName | Assert-Equal -Expected "System.UInt32"
+ $actual_stat.PrivilegeCount | Assert-Equal -Expected $actual_priv.Count
+ $actual_stat.ModifiedId.GetType().FullName | Assert-Equal -Expected "Ansible.AccessToken.Luid"
+ }
+ finally {
$h_token.Dispose()
}
}
@@ -246,23 +264,25 @@ $tests = [Ordered]@{
$h_token = [Ansible.AccessToken.TokenUtil]::LogonUser($test_username, $null, $test_password, "Interactive", "Default")
try {
$actual_elevation_type = [Ansible.AccessToken.TokenUtil]::GetTokenElevationType($h_token)
- $actual_elevation_type | Assert-Equals -Expected ([Ansible.AccessToken.TokenElevationType]::Limited)
+ $actual_elevation_type | Assert-Equal -Expected ([Ansible.AccessToken.TokenElevationType]::Limited)
$actual_linked = [Ansible.AccessToken.TokenUtil]::GetTokenLinkedToken($h_token)
try {
- $actual_linked.IsClosed | Assert-Equals -Expected $false
- $actual_linked.IsInvalid | Assert-Equals -Expected $false
+ $actual_linked.IsClosed | Assert-Equal -Expected $false
+ $actual_linked.IsInvalid | Assert-Equal -Expected $false
$actual_elevation_type = [Ansible.AccessToken.TokenUtil]::GetTokenElevationType($actual_linked)
- $actual_elevation_type | Assert-Equals -Expected ([Ansible.AccessToken.TokenElevationType]::Full)
+ $actual_elevation_type | Assert-Equal -Expected ([Ansible.AccessToken.TokenElevationType]::Full)
$actual_stat = [Ansible.AccessToken.TokenUtil]::GetTokenStatistics($actual_linked)
- $actual_stat.TokenType | Assert-Equals -Expected ([Ansible.AccessToken.TokenType]::Impersonation)
- } finally {
+ $actual_stat.TokenType | Assert-Equal -Expected ([Ansible.AccessToken.TokenType]::Impersonation)
+ }
+ finally {
$actual_linked.Dispose()
}
- $actual_linked.IsClosed | Assert-Equals -Expected $true
- } finally {
+ $actual_linked.IsClosed | Assert-Equal -Expected $true
+ }
+ finally {
$h_token.Dispose()
}
}
@@ -286,29 +306,32 @@ $tests = [Ordered]@{
try {
$actual_linked = [Ansible.AccessToken.TokenUtil]::GetTokenLinkedToken($h_token)
try {
- $actual_linked.IsClosed | Assert-Equals -Expected $false
- $actual_linked.IsInvalid | Assert-Equals -Expected $false
+ $actual_linked.IsClosed | Assert-Equal -Expected $false
+ $actual_linked.IsInvalid | Assert-Equal -Expected $false
$actual_elevation_type = [Ansible.AccessToken.TokenUtil]::GetTokenElevationType($actual_linked)
- $actual_elevation_type | Assert-Equals -Expected ([Ansible.AccessToken.TokenElevationType]::Full)
+ $actual_elevation_type | Assert-Equal -Expected ([Ansible.AccessToken.TokenElevationType]::Full)
$actual_stat = [Ansible.AccessToken.TokenUtil]::GetTokenStatistics($actual_linked)
- $actual_stat.TokenType | Assert-Equals -Expected ([Ansible.AccessToken.TokenType]::Primary)
- } finally {
+ $actual_stat.TokenType | Assert-Equal -Expected ([Ansible.AccessToken.TokenType]::Primary)
+ }
+ finally {
$actual_linked.Dispose()
}
- $actual_linked.IsClosed | Assert-Equals -Expected $true
- } finally {
+ $actual_linked.IsClosed | Assert-Equal -Expected $true
+ }
+ finally {
[Ansible.AccessToken.TokenUtil]::RevertToSelf()
}
- } finally {
+ }
+ finally {
$h_token.Dispose()
}
$tested = $true
break
}
- $tested | Assert-Equals -Expected $true
+ $tested | Assert-Equal -Expected $true
}
"Failed to get token information" = {
@@ -318,13 +341,16 @@ $tests = [Ordered]@{
$failed = $false
try {
[Ansible.AccessToken.TokenUtil]::GetTokenUser($h_token)
- } catch [Ansible.AccessToken.Win32Exception] {
+ }
+ catch [Ansible.AccessToken.Win32Exception] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "GetTokenInformation(TokenUser) failed to get buffer length (Access is denied, Win32ErrorCode 5 - 0x00000005)"
- } finally {
+ $msg = "GetTokenInformation(TokenUser) failed to get buffer length (Access is denied, Win32ErrorCode 5 - 0x00000005)"
+ $_.Exception.Message | Assert-Equal -Expected $msg
+ }
+ finally {
$h_token.Dispose()
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
"Logon with valid credentials" = {
@@ -333,39 +359,42 @@ $tests = [Ordered]@{
$h_token = [Ansible.AccessToken.TokenUtil]::LogonUser($test_username, $null, $test_password, "Network", "Default")
try {
- $h_token.IsClosed | Assert-Equals -Expected $false
- $h_token.IsInvalid | Assert-Equals -Expected $false
+ $h_token.IsClosed | Assert-Equal -Expected $false
+ $h_token.IsInvalid | Assert-Equal -Expected $false
$actual_user = [Ansible.AccessToken.TokenUtil]::GetTokenUser($h_token)
- $actual_user | Assert-Equals -Expected $expected_sid
- } finally {
+ $actual_user | Assert-Equal -Expected $expected_sid
+ }
+ finally {
$h_token.Dispose()
}
- $h_token.IsClosed | Assert-Equals -Expected $true
+ $h_token.IsClosed | Assert-Equal -Expected $true
}
"Logon with invalid credentials" = {
$failed = $false
try {
[Ansible.AccessToken.TokenUtil]::LogonUser("fake-user", $null, "fake-pass", "Network", "Default")
- } catch [Ansible.AccessToken.Win32Exception] {
+ }
+ catch [Ansible.AccessToken.Win32Exception] {
$failed = $true
- $_.Exception.Message.Contains("Failed to logon fake-user") | Assert-Equals -Expected $true
- $_.Exception.Message.Contains("Win32ErrorCode 1326 - 0x0000052E)") | Assert-Equals -Expected $true
+ $_.Exception.Message.Contains("Failed to logon fake-user") | Assert-Equal -Expected $true
+ $_.Exception.Message.Contains("Win32ErrorCode 1326 - 0x0000052E)") | Assert-Equal -Expected $true
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
"Logon with invalid credential with domain account" = {
$failed = $false
try {
[Ansible.AccessToken.TokenUtil]::LogonUser("fake-user", "fake-domain", "fake-pass", "Network", "Default")
- } catch [Ansible.AccessToken.Win32Exception] {
+ }
+ catch [Ansible.AccessToken.Win32Exception] {
$failed = $true
- $_.Exception.Message.Contains("Failed to logon fake-domain\fake-user") | Assert-Equals -Expected $true
- $_.Exception.Message.Contains("Win32ErrorCode 1326 - 0x0000052E)") | Assert-Equals -Expected $true
+ $_.Exception.Message.Contains("Failed to logon fake-domain\fake-user") | Assert-Equal -Expected $true
+ $_.Exception.Message.Contains("Win32ErrorCode 1326 - 0x0000052E)") | Assert-Equal -Expected $true
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
}
diff --git a/test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1 b/test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1
index 9278e386..cfa73c60 100644
--- a/test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1
+++ b/test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1
@@ -4,76 +4,85 @@
$module = [Ansible.Basic.AnsibleModule]::Create($args, @{})
-Function Assert-Equals {
+Function Assert-Equal {
param(
- [Parameter(Mandatory=$true, ValueFromPipeline=$true)][AllowNull()]$Actual,
- [Parameter(Mandatory=$true, Position=0)][AllowNull()]$Expected
+ [Parameter(Mandatory = $true, ValueFromPipeline = $true)][AllowNull()]$Actual,
+ [Parameter(Mandatory = $true, Position = 0)][AllowNull()]$Expected
)
- $matched = $false
- if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array]) {
- $Actual.Count | Assert-Equals -Expected $Expected.Count
- for ($i = 0; $i -lt $Actual.Count; $i++) {
- $actual_value = $Actual[$i]
- $expected_value = $Expected[$i]
- Assert-Equals -Actual $actual_value -Expected $expected_value
+ process {
+ $matched = $false
+ if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array]) {
+ $Actual.Count | Assert-Equal -Expected $Expected.Count
+ for ($i = 0; $i -lt $Actual.Count; $i++) {
+ $actual_value = $Actual[$i]
+ $expected_value = $Expected[$i]
+ Assert-Equal -Actual $actual_value -Expected $expected_value
+ }
+ $matched = $true
}
- $matched = $true
- } else {
- $matched = $Actual -ceq $Expected
- }
-
- if (-not $matched) {
- if ($Actual -is [PSObject]) {
- $Actual = $Actual.ToString()
+ else {
+ $matched = $Actual -ceq $Expected
}
- $call_stack = (Get-PSCallStack)[1]
- $module.Result.failed = $true
- $module.Result.test = $test
- $module.Result.actual = $Actual
- $module.Result.expected = $Expected
- $module.Result.line = $call_stack.ScriptLineNumber
- $module.Result.method = $call_stack.Position.Text
- $module.Result.msg = "AssertionError: actual != expected"
+ if (-not $matched) {
+ if ($Actual -is [PSObject]) {
+ $Actual = $Actual.ToString()
+ }
- Exit-Module
+ $call_stack = (Get-PSCallStack)[1]
+ $module.Result.failed = $true
+ $module.Result.test = $test
+ $module.Result.actual = $Actual
+ $module.Result.expected = $Expected
+ $module.Result.line = $call_stack.ScriptLineNumber
+ $module.Result.method = $call_stack.Position.Text
+ $module.Result.msg = "AssertionError: actual != expected"
+
+ Exit-Module
+ }
}
}
-Function Assert-DictionaryEquals {
+Function Assert-DictionaryEqual {
param(
- [Parameter(Mandatory=$true, ValueFromPipeline=$true)][AllowNull()]$Actual,
- [Parameter(Mandatory=$true, Position=0)][AllowNull()]$Expected
+ [Parameter(Mandatory = $true, ValueFromPipeline = $true)][AllowNull()]$Actual,
+ [Parameter(Mandatory = $true, Position = 0)][AllowNull()]$Expected
)
- $actual_keys = $Actual.Keys
- $expected_keys = $Expected.Keys
-
- $actual_keys.Count | Assert-Equals -Expected $expected_keys.Count
- foreach ($actual_entry in $Actual.GetEnumerator()) {
- $actual_key = $actual_entry.Key
- ($actual_key -cin $expected_keys) | Assert-Equals -Expected $true
- $actual_value = $actual_entry.Value
- $expected_value = $Expected.$actual_key
-
- if ($actual_value -is [System.Collections.IDictionary]) {
- $actual_value | Assert-DictionaryEquals -Expected $expected_value
- } elseif ($actual_value -is [System.Collections.ArrayList] -or $actual_value -is [Array]) {
- for ($i = 0; $i -lt $actual_value.Count; $i++) {
- $actual_entry = $actual_value[$i]
- $expected_entry = $expected_value[$i]
- if ($actual_entry -is [System.Collections.IDictionary]) {
- $actual_entry | Assert-DictionaryEquals -Expected $expected_entry
- } else {
- Assert-Equals -Actual $actual_entry -Expected $expected_entry
+
+ process {
+ $actual_keys = $Actual.Keys
+ $expected_keys = $Expected.Keys
+
+ $actual_keys.Count | Assert-Equal -Expected $expected_keys.Count
+ foreach ($actual_entry in $Actual.GetEnumerator()) {
+ $actual_key = $actual_entry.Key
+ ($actual_key -cin $expected_keys) | Assert-Equal -Expected $true
+ $actual_value = $actual_entry.Value
+ $expected_value = $Expected.$actual_key
+
+ if ($actual_value -is [System.Collections.IDictionary]) {
+ $actual_value | Assert-DictionaryEqual -Expected $expected_value
+ }
+ elseif ($actual_value -is [System.Collections.ArrayList] -or $actual_value -is [Array]) {
+ for ($i = 0; $i -lt $actual_value.Count; $i++) {
+ $actual_entry = $actual_value[$i]
+ $expected_entry = $expected_value[$i]
+ if ($actual_entry -is [System.Collections.IDictionary]) {
+ $actual_entry | Assert-DictionaryEqual -Expected $expected_entry
+ }
+ else {
+ Assert-Equal -Actual $actual_entry -Expected $expected_entry
+ }
}
}
- } else {
- Assert-Equals -Actual $actual_value -Expected $expected_value
+ else {
+ Assert-Equal -Actual $actual_value -Expected $expected_value
+ }
+ }
+ foreach ($expected_key in $expected_keys) {
+ ($expected_key -cin $actual_keys) | Assert-Equal -Expected $true
}
- }
- foreach ($expected_key in $expected_keys) {
- ($expected_key -cin $actual_keys) | Assert-Equals -Expected $true
}
}
@@ -104,28 +113,28 @@ $tests = @{
[System.IO.File]::WriteAllText($args_file, '{ "ANSIBLE_MODULE_ARGS": {} }')
$m = [Ansible.Basic.AnsibleModule]::Create(@($args_file), @{})
- $m.CheckMode | Assert-Equals -Expected $false
- $m.DebugMode | Assert-Equals -Expected $false
- $m.DiffMode | Assert-Equals -Expected $false
- $m.KeepRemoteFiles | Assert-Equals -Expected $false
- $m.ModuleName | Assert-Equals -Expected "undefined win module"
- $m.NoLog | Assert-Equals -Expected $false
- $m.Verbosity | Assert-Equals -Expected 0
- $m.AnsibleVersion | Assert-Equals -Expected $null
+ $m.CheckMode | Assert-Equal -Expected $false
+ $m.DebugMode | Assert-Equal -Expected $false
+ $m.DiffMode | Assert-Equal -Expected $false
+ $m.KeepRemoteFiles | Assert-Equal -Expected $false
+ $m.ModuleName | Assert-Equal -Expected "undefined win module"
+ $m.NoLog | Assert-Equal -Expected $false
+ $m.Verbosity | Assert-Equal -Expected 0
+ $m.AnsibleVersion | Assert-Equal -Expected $null
}
"Empty spec and no options - complex_args" = {
Set-Variable -Name complex_args -Scope Global -Value @{}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
- $m.CheckMode | Assert-Equals -Expected $false
- $m.DebugMode | Assert-Equals -Expected $false
- $m.DiffMode | Assert-Equals -Expected $false
- $m.KeepRemoteFiles | Assert-Equals -Expected $false
- $m.ModuleName | Assert-Equals -Expected "undefined win module"
- $m.NoLog | Assert-Equals -Expected $false
- $m.Verbosity | Assert-Equals -Expected 0
- $m.AnsibleVersion | Assert-Equals -Expected $null
+ $m.CheckMode | Assert-Equal -Expected $false
+ $m.DebugMode | Assert-Equal -Expected $false
+ $m.DiffMode | Assert-Equal -Expected $false
+ $m.KeepRemoteFiles | Assert-Equal -Expected $false
+ $m.ModuleName | Assert-Equal -Expected "undefined win module"
+ $m.NoLog | Assert-Equal -Expected $false
+ $m.Verbosity | Assert-Equal -Expected 0
+ $m.AnsibleVersion | Assert-Equal -Expected $null
}
"Internal param changes - args file" = {
@@ -152,16 +161,16 @@ $tests = @{
}
}
"@)
- $m = [Ansible.Basic.AnsibleModule]::Create(@($args_file), @{supports_check_mode=$true})
- $m.CheckMode | Assert-Equals -Expected $true
- $m.DebugMode | Assert-Equals -Expected $true
- $m.DiffMode | Assert-Equals -Expected $true
- $m.KeepRemoteFiles | Assert-Equals -Expected $true
- $m.ModuleName | Assert-Equals -Expected "ansible_basic_tests"
- $m.NoLog | Assert-Equals -Expected $true
- $m.Verbosity | Assert-Equals -Expected 3
- $m.AnsibleVersion | Assert-Equals -Expected "2.8.0"
- $m.Tmpdir | Assert-Equals -Expected $m_tmpdir
+ $m = [Ansible.Basic.AnsibleModule]::Create(@($args_file), @{supports_check_mode = $true })
+ $m.CheckMode | Assert-Equal -Expected $true
+ $m.DebugMode | Assert-Equal -Expected $true
+ $m.DiffMode | Assert-Equal -Expected $true
+ $m.KeepRemoteFiles | Assert-Equal -Expected $true
+ $m.ModuleName | Assert-Equal -Expected "ansible_basic_tests"
+ $m.NoLog | Assert-Equal -Expected $true
+ $m.Verbosity | Assert-Equal -Expected 3
+ $m.AnsibleVersion | Assert-Equal -Expected "2.8.0"
+ $m.Tmpdir | Assert-Equal -Expected $m_tmpdir
}
"Internal param changes - complex_args" = {
@@ -187,15 +196,15 @@ $tests = @{
supports_check_mode = $true
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- $m.CheckMode | Assert-Equals -Expected $true
- $m.DebugMode | Assert-Equals -Expected $true
- $m.DiffMode | Assert-Equals -Expected $true
- $m.KeepRemoteFiles | Assert-Equals -Expected $true
- $m.ModuleName | Assert-Equals -Expected "ansible_basic_tests"
- $m.NoLog | Assert-Equals -Expected $true
- $m.Verbosity | Assert-Equals -Expected 3
- $m.AnsibleVersion | Assert-Equals -Expected "2.8.0"
- $m.Tmpdir | Assert-Equals -Expected $m_tmpdir
+ $m.CheckMode | Assert-Equal -Expected $true
+ $m.DebugMode | Assert-Equal -Expected $true
+ $m.DiffMode | Assert-Equal -Expected $true
+ $m.KeepRemoteFiles | Assert-Equal -Expected $true
+ $m.ModuleName | Assert-Equal -Expected "ansible_basic_tests"
+ $m.NoLog | Assert-Equal -Expected $true
+ $m.Verbosity | Assert-Equal -Expected 3
+ $m.AnsibleVersion | Assert-Equal -Expected "2.8.0"
+ $m.Tmpdir | Assert-Equal -Expected $m_tmpdir
}
"Parse complex module options" = {
@@ -203,65 +212,65 @@ $tests = @{
options = @{
option_default = @{}
missing_option_default = @{}
- string_option = @{type = "str"}
- required_option = @{required = $true}
- missing_choices = @{choices = "a", "b"}
- choices = @{choices = "a", "b"}
- one_choice = @{choices = ,"b"}
- choice_with_default = @{choices = "a", "b"; default = "b"}
- alias_direct = @{aliases = ,"alias_direct1"}
- alias_as_alias = @{aliases = "alias_as_alias1", "alias_as_alias2"}
- bool_type = @{type = "bool"}
- bool_from_str = @{type = "bool"}
+ string_option = @{type = "str" }
+ required_option = @{required = $true }
+ missing_choices = @{choices = "a", "b" }
+ choices = @{choices = "a", "b" }
+ one_choice = @{choices = , "b" }
+ choice_with_default = @{choices = "a", "b"; default = "b" }
+ alias_direct = @{aliases = , "alias_direct1" }
+ alias_as_alias = @{aliases = "alias_as_alias1", "alias_as_alias2" }
+ bool_type = @{type = "bool" }
+ bool_from_str = @{type = "bool" }
dict_type = @{
type = "dict"
options = @{
- int_type = @{type = "int"}
- str_type = @{type = "str"; default = "str_sub_type"}
+ int_type = @{type = "int" }
+ str_type = @{type = "str"; default = "str_sub_type" }
}
}
dict_type_missing = @{
type = "dict"
options = @{
- int_type = @{type = "int"}
- str_type = @{type = "str"; default = "str_sub_type"}
+ int_type = @{type = "int" }
+ str_type = @{type = "str"; default = "str_sub_type" }
}
}
dict_type_defaults = @{
type = "dict"
apply_defaults = $true
options = @{
- int_type = @{type = "int"}
- str_type = @{type = "str"; default = "str_sub_type"}
+ int_type = @{type = "int" }
+ str_type = @{type = "str"; default = "str_sub_type" }
}
}
- dict_type_json = @{type = "dict"}
- dict_type_str = @{type = "dict"}
- float_type = @{type = "float"}
- int_type = @{type = "int"}
- json_type = @{type = "json"}
- json_type_dict = @{type = "json"}
- list_type = @{type = "list"}
- list_type_str = @{type = "list"}
- list_with_int = @{type = "list"; elements = "int"}
- list_type_single = @{type = "list"}
+ dict_type_json = @{type = "dict" }
+ dict_type_str = @{type = "dict" }
+ float_type = @{type = "float" }
+ int_type = @{type = "int" }
+ json_type = @{type = "json" }
+ json_type_dict = @{type = "json" }
+ list_type = @{type = "list" }
+ list_type_str = @{type = "list" }
+ list_with_int = @{type = "list"; elements = "int" }
+ list_type_single = @{type = "list" }
list_with_dict = @{
type = "list"
elements = "dict"
options = @{
- int_type = @{type = "int"}
- str_type = @{type = "str"; default = "str_sub_type"}
+ int_type = @{type = "int" }
+ str_type = @{type = "str"; default = "str_sub_type" }
}
}
- path_type = @{type = "path"}
- path_type_nt = @{type = "path"}
- path_type_missing = @{type = "path"}
- raw_type_str = @{type = "raw"}
- raw_type_int = @{type = "raw"}
- sid_type = @{type = "sid"}
- sid_from_name = @{type = "sid"}
- str_type = @{type = "str"}
- delegate_type = @{type = [Func[[Object], [UInt64]]]{ [System.UInt64]::Parse($args[0]) }}
+ path_type = @{type = "path" }
+ path_type_nt = @{type = "path" }
+ path_type_missing = @{type = "path" }
+ raw_type_str = @{type = "raw" }
+ raw_type_int = @{type = "raw" }
+ sid_type = @{type = "sid" }
+ sid_from_name = @{type = "sid" }
+ str_type = @{type = "str" }
+ delegate_type = @{type = [Func[[Object], [UInt64]]] { [System.UInt64]::Parse($args[0]) } }
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
@@ -311,129 +320,131 @@ $tests = @{
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- $m.Params.option_default | Assert-Equals -Expected "1"
- $m.Params.option_default.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.missing_option_default | Assert-Equals -Expected $null
- $m.Params.string_option | Assert-Equals -Expected "1"
- $m.Params.string_option.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.required_option | Assert-Equals -Expected "required"
- $m.Params.required_option.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.missing_choices | Assert-Equals -Expected $null
- $m.Params.choices | Assert-Equals -Expected "a"
- $m.Params.choices.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.one_choice | Assert-Equals -Expected "b"
- $m.Params.one_choice.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.choice_with_default | Assert-Equals -Expected "b"
- $m.Params.choice_with_default.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.alias_direct | Assert-Equals -Expected "a"
- $m.Params.alias_direct.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.alias_as_alias | Assert-Equals -Expected "a"
- $m.Params.alias_as_alias.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.bool_type | Assert-Equals -Expected $true
- $m.Params.bool_type.GetType().ToString() | Assert-Equals -Expected "System.Boolean"
- $m.Params.bool_from_str | Assert-Equals -Expected $false
- $m.Params.bool_from_str.GetType().ToString() | Assert-Equals -Expected "System.Boolean"
- $m.Params.dict_type | Assert-DictionaryEquals -Expected @{int_type = 10; str_type = "str_sub_type"}
- $m.Params.dict_type.GetType().ToString() | Assert-Equals -Expected "System.Collections.Generic.Dictionary``2[System.String,System.Object]"
- $m.Params.dict_type.int_type.GetType().ToString() | Assert-Equals -Expected "System.Int32"
- $m.Params.dict_type.str_type.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.dict_type_missing | Assert-Equals -Expected $null
- $m.Params.dict_type_defaults | Assert-DictionaryEquals -Expected @{int_type = $null; str_type = "str_sub_type"}
- $m.Params.dict_type_defaults.GetType().ToString() | Assert-Equals -Expected "System.Collections.Generic.Dictionary``2[System.String,System.Object]"
- $m.Params.dict_type_defaults.str_type.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.dict_type_json | Assert-DictionaryEquals -Expected @{
+ $m.Params.option_default | Assert-Equal -Expected "1"
+ $m.Params.option_default.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.missing_option_default | Assert-Equal -Expected $null
+ $m.Params.string_option | Assert-Equal -Expected "1"
+ $m.Params.string_option.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.required_option | Assert-Equal -Expected "required"
+ $m.Params.required_option.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.missing_choices | Assert-Equal -Expected $null
+ $m.Params.choices | Assert-Equal -Expected "a"
+ $m.Params.choices.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.one_choice | Assert-Equal -Expected "b"
+ $m.Params.one_choice.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.choice_with_default | Assert-Equal -Expected "b"
+ $m.Params.choice_with_default.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.alias_direct | Assert-Equal -Expected "a"
+ $m.Params.alias_direct.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.alias_as_alias | Assert-Equal -Expected "a"
+ $m.Params.alias_as_alias.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.bool_type | Assert-Equal -Expected $true
+ $m.Params.bool_type.GetType().ToString() | Assert-Equal -Expected "System.Boolean"
+ $m.Params.bool_from_str | Assert-Equal -Expected $false
+ $m.Params.bool_from_str.GetType().ToString() | Assert-Equal -Expected "System.Boolean"
+ $m.Params.dict_type | Assert-DictionaryEqual -Expected @{int_type = 10; str_type = "str_sub_type" }
+ $m.Params.dict_type.GetType().ToString() | Assert-Equal -Expected "System.Collections.Generic.Dictionary``2[System.String,System.Object]"
+ $m.Params.dict_type.int_type.GetType().ToString() | Assert-Equal -Expected "System.Int32"
+ $m.Params.dict_type.str_type.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.dict_type_missing | Assert-Equal -Expected $null
+ $m.Params.dict_type_defaults | Assert-DictionaryEqual -Expected @{int_type = $null; str_type = "str_sub_type" }
+ $m.Params.dict_type_defaults.GetType().ToString() | Assert-Equal -Expected "System.Collections.Generic.Dictionary``2[System.String,System.Object]"
+ $m.Params.dict_type_defaults.str_type.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.dict_type_json | Assert-DictionaryEqual -Expected @{
a = "a"
b = 1
c = @("a", "b")
}
- $m.Params.dict_type_json.GetType().ToString() | Assert-Equals -Expected "System.Collections.Generic.Dictionary``2[System.String,System.Object]"
- $m.Params.dict_type_json.a.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.dict_type_json.b.GetType().ToString() | Assert-Equals -Expected "System.Int32"
- $m.Params.dict_type_json.c.GetType().ToString() | Assert-Equals -Expected "System.Collections.ArrayList"
- $m.Params.dict_type_str | Assert-DictionaryEquals -Expected @{a = "a"; b = "b 2"; c = "c"}
- $m.Params.dict_type_str.GetType().ToString() | Assert-Equals -Expected "System.Collections.Generic.Dictionary``2[System.String,System.Object]"
- $m.Params.dict_type_str.a.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.dict_type_str.b.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.dict_type_str.c.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.float_type | Assert-Equals -Expected ([System.Single]3.14159)
- $m.Params.float_type.GetType().ToString() | Assert-Equals -Expected "System.Single"
- $m.Params.int_type | Assert-Equals -Expected 0
- $m.Params.int_type.GetType().ToString() | Assert-Equals -Expected "System.Int32"
- $m.Params.json_type | Assert-Equals -Expected '{"a":"a","b":1,"c":["a","b"]}'
- $m.Params.json_type.GetType().ToString() | Assert-Equals -Expected "System.String"
- [Ansible.Basic.AnsibleModule]::FromJson($m.Params.json_type_dict) | Assert-DictionaryEquals -Expected ([Ansible.Basic.AnsibleModule]::FromJson('{"a":"a","b":1,"c":["a","b"]}'))
- $m.Params.json_type_dict.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.list_type.GetType().ToString() | Assert-Equals -Expected "System.Collections.Generic.List``1[System.Object]"
- $m.Params.list_type.Count | Assert-Equals -Expected 4
- $m.Params.list_type[0] | Assert-Equals -Expected "a"
- $m.Params.list_type[0].GetType().FullName | Assert-Equals -Expected "System.String"
- $m.Params.list_type[1] | Assert-Equals -Expected "b"
- $m.Params.list_type[1].GetType().FullName | Assert-Equals -Expected "System.String"
- $m.Params.list_type[2] | Assert-Equals -Expected 1
- $m.Params.list_type[2].GetType().FullName | Assert-Equals -Expected "System.Int32"
- $m.Params.list_type[3] | Assert-Equals -Expected 2
- $m.Params.list_type[3].GetType().FullName | Assert-Equals -Expected "System.Int32"
- $m.Params.list_type_str.GetType().ToString() | Assert-Equals -Expected "System.Collections.Generic.List``1[System.Object]"
- $m.Params.list_type_str.Count | Assert-Equals -Expected 4
- $m.Params.list_type_str[0] | Assert-Equals -Expected "a"
- $m.Params.list_type_str[0].GetType().FullName | Assert-Equals -Expected "System.String"
- $m.Params.list_type_str[1] | Assert-Equals -Expected "b"
- $m.Params.list_type_str[1].GetType().FullName | Assert-Equals -Expected "System.String"
- $m.Params.list_type_str[2] | Assert-Equals -Expected "1"
- $m.Params.list_type_str[2].GetType().FullName | Assert-Equals -Expected "System.String"
- $m.Params.list_type_str[3] | Assert-Equals -Expected "2"
- $m.Params.list_type_str[3].GetType().FullName | Assert-Equals -Expected "System.String"
- $m.Params.list_with_int.GetType().ToString() | Assert-Equals -Expected "System.Collections.Generic.List``1[System.Object]"
- $m.Params.list_with_int.Count | Assert-Equals -Expected 2
- $m.Params.list_with_int[0] | Assert-Equals -Expected 1
- $m.Params.list_with_int[0].GetType().FullName | Assert-Equals -Expected "System.Int32"
- $m.Params.list_with_int[1] | Assert-Equals -Expected 2
- $m.Params.list_with_int[1].GetType().FullName | Assert-Equals -Expected "System.Int32"
- $m.Params.list_type_single.GetType().ToString() | Assert-Equals -Expected "System.Collections.Generic.List``1[System.Object]"
- $m.Params.list_type_single.Count | Assert-Equals -Expected 1
- $m.Params.list_type_single[0] | Assert-Equals -Expected "single"
- $m.Params.list_type_single[0].GetType().FullName | Assert-Equals -Expected "System.String"
- $m.Params.list_with_dict.GetType().FullName.StartsWith("System.Collections.Generic.List``1[[System.Object") | Assert-Equals -Expected $true
- $m.Params.list_with_dict.Count | Assert-Equals -Expected 3
- $m.Params.list_with_dict[0].GetType().FullName.StartsWith("System.Collections.Generic.Dictionary``2[[System.String") | Assert-Equals -Expected $true
- $m.Params.list_with_dict[0] | Assert-DictionaryEquals -Expected @{int_type = 2; str_type = "dict entry"}
- $m.Params.list_with_dict[0].int_type.GetType().FullName.ToString() | Assert-Equals -Expected "System.Int32"
- $m.Params.list_with_dict[0].str_type.GetType().FullName.ToString() | Assert-Equals -Expected "System.String"
- $m.Params.list_with_dict[1].GetType().FullName.StartsWith("System.Collections.Generic.Dictionary``2[[System.String") | Assert-Equals -Expected $true
- $m.Params.list_with_dict[1] | Assert-DictionaryEquals -Expected @{int_type = 1; str_type = "str_sub_type"}
- $m.Params.list_with_dict[1].int_type.GetType().FullName.ToString() | Assert-Equals -Expected "System.Int32"
- $m.Params.list_with_dict[1].str_type.GetType().FullName.ToString() | Assert-Equals -Expected "System.String"
- $m.Params.list_with_dict[2].GetType().FullName.StartsWith("System.Collections.Generic.Dictionary``2[[System.String") | Assert-Equals -Expected $true
- $m.Params.list_with_dict[2] | Assert-DictionaryEquals -Expected @{int_type = $null; str_type = "str_sub_type"}
- $m.Params.list_with_dict[2].str_type.GetType().FullName.ToString() | Assert-Equals -Expected "System.String"
- $m.Params.path_type | Assert-Equals -Expected "$($env:SystemRoot)\System32"
- $m.Params.path_type.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.path_type_nt | Assert-Equals -Expected "\\?\%SystemRoot%\System32"
- $m.Params.path_type_nt.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.path_type_missing | Assert-Equals -Expected "T:\missing\path"
- $m.Params.path_type_missing.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.raw_type_str | Assert-Equals -Expected "str"
- $m.Params.raw_type_str.GetType().FullName | Assert-Equals -Expected "System.String"
- $m.Params.raw_type_int | Assert-Equals -Expected 1
- $m.Params.raw_type_int.GetType().FullName | Assert-Equals -Expected "System.Int32"
- $m.Params.sid_type | Assert-Equals -Expected (New-Object -TypeName System.Security.Principal.SecurityIdentifier -ArgumentList "S-1-5-18")
- $m.Params.sid_type.GetType().ToString() | Assert-Equals -Expected "System.Security.Principal.SecurityIdentifier"
- $m.Params.sid_from_name | Assert-Equals -Expected (New-Object -TypeName System.Security.Principal.SecurityIdentifier -ArgumentList "S-1-5-18")
- $m.Params.sid_from_name.GetType().ToString() | Assert-Equals -Expected "System.Security.Principal.SecurityIdentifier"
- $m.Params.str_type | Assert-Equals -Expected "str"
- $m.Params.str_type.GetType().ToString() | Assert-Equals -Expected "System.String"
- $m.Params.delegate_type | Assert-Equals -Expected 1234
- $m.Params.delegate_type.GetType().ToString() | Assert-Equals -Expected "System.UInt64"
+ $m.Params.dict_type_json.GetType().ToString() | Assert-Equal -Expected "System.Collections.Generic.Dictionary``2[System.String,System.Object]"
+ $m.Params.dict_type_json.a.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.dict_type_json.b.GetType().ToString() | Assert-Equal -Expected "System.Int32"
+ $m.Params.dict_type_json.c.GetType().ToString() | Assert-Equal -Expected "System.Collections.ArrayList"
+ $m.Params.dict_type_str | Assert-DictionaryEqual -Expected @{a = "a"; b = "b 2"; c = "c" }
+ $m.Params.dict_type_str.GetType().ToString() | Assert-Equal -Expected "System.Collections.Generic.Dictionary``2[System.String,System.Object]"
+ $m.Params.dict_type_str.a.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.dict_type_str.b.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.dict_type_str.c.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.float_type | Assert-Equal -Expected ([System.Single]3.14159)
+ $m.Params.float_type.GetType().ToString() | Assert-Equal -Expected "System.Single"
+ $m.Params.int_type | Assert-Equal -Expected 0
+ $m.Params.int_type.GetType().ToString() | Assert-Equal -Expected "System.Int32"
+ $m.Params.json_type | Assert-Equal -Expected '{"a":"a","b":1,"c":["a","b"]}'
+ $m.Params.json_type.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $jsonValue = ([Ansible.Basic.AnsibleModule]::FromJson('{"a":"a","b":1,"c":["a","b"]}'))
+ [Ansible.Basic.AnsibleModule]::FromJson($m.Params.json_type_dict) | Assert-DictionaryEqual -Expected $jsonValue
+ $m.Params.json_type_dict.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.list_type.GetType().ToString() | Assert-Equal -Expected "System.Collections.Generic.List``1[System.Object]"
+ $m.Params.list_type.Count | Assert-Equal -Expected 4
+ $m.Params.list_type[0] | Assert-Equal -Expected "a"
+ $m.Params.list_type[0].GetType().FullName | Assert-Equal -Expected "System.String"
+ $m.Params.list_type[1] | Assert-Equal -Expected "b"
+ $m.Params.list_type[1].GetType().FullName | Assert-Equal -Expected "System.String"
+ $m.Params.list_type[2] | Assert-Equal -Expected 1
+ $m.Params.list_type[2].GetType().FullName | Assert-Equal -Expected "System.Int32"
+ $m.Params.list_type[3] | Assert-Equal -Expected 2
+ $m.Params.list_type[3].GetType().FullName | Assert-Equal -Expected "System.Int32"
+ $m.Params.list_type_str.GetType().ToString() | Assert-Equal -Expected "System.Collections.Generic.List``1[System.Object]"
+ $m.Params.list_type_str.Count | Assert-Equal -Expected 4
+ $m.Params.list_type_str[0] | Assert-Equal -Expected "a"
+ $m.Params.list_type_str[0].GetType().FullName | Assert-Equal -Expected "System.String"
+ $m.Params.list_type_str[1] | Assert-Equal -Expected "b"
+ $m.Params.list_type_str[1].GetType().FullName | Assert-Equal -Expected "System.String"
+ $m.Params.list_type_str[2] | Assert-Equal -Expected "1"
+ $m.Params.list_type_str[2].GetType().FullName | Assert-Equal -Expected "System.String"
+ $m.Params.list_type_str[3] | Assert-Equal -Expected "2"
+ $m.Params.list_type_str[3].GetType().FullName | Assert-Equal -Expected "System.String"
+ $m.Params.list_with_int.GetType().ToString() | Assert-Equal -Expected "System.Collections.Generic.List``1[System.Object]"
+ $m.Params.list_with_int.Count | Assert-Equal -Expected 2
+ $m.Params.list_with_int[0] | Assert-Equal -Expected 1
+ $m.Params.list_with_int[0].GetType().FullName | Assert-Equal -Expected "System.Int32"
+ $m.Params.list_with_int[1] | Assert-Equal -Expected 2
+ $m.Params.list_with_int[1].GetType().FullName | Assert-Equal -Expected "System.Int32"
+ $m.Params.list_type_single.GetType().ToString() | Assert-Equal -Expected "System.Collections.Generic.List``1[System.Object]"
+ $m.Params.list_type_single.Count | Assert-Equal -Expected 1
+ $m.Params.list_type_single[0] | Assert-Equal -Expected "single"
+ $m.Params.list_type_single[0].GetType().FullName | Assert-Equal -Expected "System.String"
+ $m.Params.list_with_dict.GetType().FullName.StartsWith("System.Collections.Generic.List``1[[System.Object") | Assert-Equal -Expected $true
+ $m.Params.list_with_dict.Count | Assert-Equal -Expected 3
+ $m.Params.list_with_dict[0].GetType().FullName.StartsWith("System.Collections.Generic.Dictionary``2[[System.String") | Assert-Equal -Expected $true
+ $m.Params.list_with_dict[0] | Assert-DictionaryEqual -Expected @{int_type = 2; str_type = "dict entry" }
+ $m.Params.list_with_dict[0].int_type.GetType().FullName.ToString() | Assert-Equal -Expected "System.Int32"
+ $m.Params.list_with_dict[0].str_type.GetType().FullName.ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.list_with_dict[1].GetType().FullName.StartsWith("System.Collections.Generic.Dictionary``2[[System.String") | Assert-Equal -Expected $true
+ $m.Params.list_with_dict[1] | Assert-DictionaryEqual -Expected @{int_type = 1; str_type = "str_sub_type" }
+ $m.Params.list_with_dict[1].int_type.GetType().FullName.ToString() | Assert-Equal -Expected "System.Int32"
+ $m.Params.list_with_dict[1].str_type.GetType().FullName.ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.list_with_dict[2].GetType().FullName.StartsWith("System.Collections.Generic.Dictionary``2[[System.String") | Assert-Equal -Expected $true
+ $m.Params.list_with_dict[2] | Assert-DictionaryEqual -Expected @{int_type = $null; str_type = "str_sub_type" }
+ $m.Params.list_with_dict[2].str_type.GetType().FullName.ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.path_type | Assert-Equal -Expected "$($env:SystemRoot)\System32"
+ $m.Params.path_type.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.path_type_nt | Assert-Equal -Expected "\\?\%SystemRoot%\System32"
+ $m.Params.path_type_nt.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.path_type_missing | Assert-Equal -Expected "T:\missing\path"
+ $m.Params.path_type_missing.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.raw_type_str | Assert-Equal -Expected "str"
+ $m.Params.raw_type_str.GetType().FullName | Assert-Equal -Expected "System.String"
+ $m.Params.raw_type_int | Assert-Equal -Expected 1
+ $m.Params.raw_type_int.GetType().FullName | Assert-Equal -Expected "System.Int32"
+ $m.Params.sid_type | Assert-Equal -Expected (New-Object -TypeName System.Security.Principal.SecurityIdentifier -ArgumentList "S-1-5-18")
+ $m.Params.sid_type.GetType().ToString() | Assert-Equal -Expected "System.Security.Principal.SecurityIdentifier"
+ $m.Params.sid_from_name | Assert-Equal -Expected (New-Object -TypeName System.Security.Principal.SecurityIdentifier -ArgumentList "S-1-5-18")
+ $m.Params.sid_from_name.GetType().ToString() | Assert-Equal -Expected "System.Security.Principal.SecurityIdentifier"
+ $m.Params.str_type | Assert-Equal -Expected "str"
+ $m.Params.str_type.GetType().ToString() | Assert-Equal -Expected "System.String"
+ $m.Params.delegate_type | Assert-Equal -Expected 1234
+ $m.Params.delegate_type.GetType().ToString() | Assert-Equal -Expected "System.UInt64"
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_module_args = @{
option_default = "1"
@@ -500,9 +511,9 @@ $tests = @{
str_type = "str"
delegate_type = 1234
}
- $actual.Keys.Count | Assert-Equals -Expected 2
- $actual.changed | Assert-Equals -Expected $false
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $expected_module_args}
+ $actual.Keys.Count | Assert-Equal -Expected 2
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $expected_module_args }
}
"Parse module args with list elements and delegate type" = {
@@ -510,7 +521,7 @@ $tests = @{
options = @{
list_delegate_type = @{
type = "list"
- elements = [Func[[Object], [UInt16]]]{ [System.UInt16]::Parse($args[0]) }
+ elements = [Func[[Object], [UInt16]]] { [System.UInt16]::Parse($args[0]) }
}
}
}
@@ -521,19 +532,20 @@ $tests = @{
)
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- $m.Params.list_delegate_type.GetType().Name | Assert-Equals -Expected 'List`1'
- $m.Params.list_delegate_type[0].GetType().FullName | Assert-Equals -Expected "System.UInt16"
- $m.Params.list_delegate_Type[1].GetType().FullName | Assert-Equals -Expected "System.UInt16"
+ $m.Params.list_delegate_type.GetType().Name | Assert-Equal -Expected 'List`1'
+ $m.Params.list_delegate_type[0].GetType().FullName | Assert-Equal -Expected "System.UInt16"
+ $m.Params.list_delegate_Type[1].GetType().FullName | Assert-Equal -Expected "System.UInt16"
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_module_args = @{
list_delegate_type = @(
@@ -541,9 +553,9 @@ $tests = @{
4321
)
}
- $actual.Keys.Count | Assert-Equals -Expected 2
- $actual.changed | Assert-Equals -Expected $false
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $expected_module_args}
+ $actual.Keys.Count | Assert-Equal -Expected 2
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $expected_module_args }
}
"Parse module args with case insensitive input" = {
@@ -559,21 +571,22 @@ $tests = @{
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
# Verifies the case of the params key is set to the module spec not actual input
- $m.Params.Keys | Assert-Equals -Expected @("option1")
- $m.Params.option1 | Assert-Equals -Expected 1
+ $m.Params.Keys | Assert-Equal -Expected @("option1")
+ $m.Params.option1 | Assert-Equal -Expected 1
# Verifies the type conversion happens even on a case insensitive match
- $m.Params.option1.GetType().FullName | Assert-Equals -Expected "System.Int32"
+ $m.Params.option1.GetType().FullName | Assert-Equal -Expected "System.Int32"
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_warnings = "Parameters for (win_test) was a case insensitive match: Option1. "
$expected_warnings += "Module options will become case sensitive in a future Ansible release. "
@@ -589,16 +602,16 @@ $tests = @{
# We have disabled the warning for now
#warnings = @($expected_warnings)
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
"No log values" = {
$spec = @{
options = @{
- username = @{type = "str"}
- password = @{type = "str"; no_log = $true}
- password2 = @{type = "int"; no_log = $true}
- dict = @{type = "dict"}
+ username = @{type = "str" }
+ password = @{type = "str"; no_log = $true }
+ password2 = @{type = "int"; no_log = $true }
+ dict = @{type = "dict" }
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
@@ -634,20 +647,21 @@ $tests = @{
$m.Result.data = $complex_args.dict
# verify params internally aren't masked
- $m.Params.username | Assert-Equals -Expected "user - pass - name"
- $m.Params.password | Assert-Equals -Expected "pass"
- $m.Params.password2 | Assert-Equals -Expected 1234
- $m.Params.dict.custom | Assert-Equals -Expected "pass"
+ $m.Params.username | Assert-Equal -Expected "user - pass - name"
+ $m.Params.password | Assert-Equal -Expected "pass"
+ $m.Params.password2 | Assert-Equal -Expected 1234
+ $m.Params.dict.custom | Assert-Equal -Expected "pass"
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
# verify no_log params are masked in invocation
$expected = @{
@@ -683,7 +697,7 @@ $tests = @{
changed = $false
data = $complex_args.dict
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
$expected_event = @'
test_no_log - Invoked with:
@@ -707,14 +721,14 @@ test_no_log - Invoked with:
password: VALUE_SPECIFIED_IN_NO_LOG_PARAMETER
'@
$actual_event = (Get-EventLog -LogName Application -Source Ansible -Newest 1).Message
- $actual_event | Assert-DictionaryEquals -Expected $expected_event
+ $actual_event | Assert-DictionaryEqual -Expected $expected_event
}
"No log value with an empty string" = {
$spec = @{
options = @{
- password1 = @{type = "str"; no_log = $true}
- password2 = @{type = "str"; no_log = $true}
+ password1 = @{type = "str"; no_log = $true }
+ password2 = @{type = "str"; no_log = $true }
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
@@ -726,18 +740,19 @@ test_no_log - Invoked with:
$m.Result.data = $complex_args.dict
# verify params internally aren't masked
- $m.Params.password1 | Assert-Equals -Expected ""
- $m.Params.password2 | Assert-Equals -Expected $null
+ $m.Params.password1 | Assert-Equal -Expected ""
+ $m.Params.password2 | Assert-Equal -Expected $null
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected = @{
invocation = @{
@@ -749,15 +764,15 @@ test_no_log - Invoked with:
changed = $false
data = $complex_args.dict
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
"Removed in version" = {
$spec = @{
options = @{
- removed1 = @{removed_in_version = "2.1"}
- removed2 = @{removed_in_version = "2.2"}
- removed3 = @{removed_in_version = "2.3"; removed_from_collection = "ansible.builtin"}
+ removed1 = @{removed_in_version = "2.1" }
+ removed2 = @{removed_in_version = "2.2" }
+ removed3 = @{removed_in_version = "2.3"; removed_from_collection = "ansible.builtin" }
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
@@ -770,12 +785,13 @@ test_no_log - Invoked with:
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
@@ -799,15 +815,15 @@ test_no_log - Invoked with:
}
)
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
"Removed at date" = {
$spec = @{
options = @{
- removed1 = @{removed_at_date = [DateTime]"2020-03-10"}
- removed2 = @{removed_at_date = [DateTime]"2020-03-11"}
- removed3 = @{removed_at_date = [DateTime]"2020-06-07"; removed_from_collection = "ansible.builtin"}
+ removed1 = @{removed_at_date = [DateTime]"2020-03-10" }
+ removed2 = @{removed_at_date = [DateTime]"2020-03-11" }
+ removed3 = @{removed_at_date = [DateTime]"2020-06-07"; removed_from_collection = "ansible.builtin" }
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
@@ -820,12 +836,13 @@ test_no_log - Invoked with:
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
@@ -849,29 +866,53 @@ test_no_log - Invoked with:
}
)
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
"Deprecated aliases" = {
$spec = @{
options = @{
- option1 = @{ type = "str"; aliases = "alias1"; deprecated_aliases = @(@{name = "alias1"; version = "2.10"}) }
- option2 = @{ type = "str"; aliases = "alias2"; deprecated_aliases = @(@{name = "alias2"; version = "2.11"}) }
+ option1 = @{ type = "str"; aliases = "alias1"; deprecated_aliases = @(@{name = "alias1"; version = "2.10" }) }
+ option2 = @{ type = "str"; aliases = "alias2"; deprecated_aliases = @(@{name = "alias2"; version = "2.11" }) }
option3 = @{
type = "dict"
options = @{
- option1 = @{ type = "str"; aliases = "alias1"; deprecated_aliases = @(@{name = "alias1"; version = "2.10"}) }
- option2 = @{ type = "str"; aliases = "alias2"; deprecated_aliases = @(@{name = "alias2"; version = "2.11"}) }
- option3 = @{ type = "str"; aliases = "alias3"; deprecated_aliases = @(@{name = "alias3"; version = "2.12"; collection_name = "ansible.builtin"}) }
- option4 = @{ type = "str"; aliases = "alias4"; deprecated_aliases = @(@{name = "alias4"; date = [DateTime]"2020-03-11"}) }
- option5 = @{ type = "str"; aliases = "alias5"; deprecated_aliases = @(@{name = "alias5"; date = [DateTime]"2020-03-09"}) }
- option6 = @{ type = "str"; aliases = "alias6"; deprecated_aliases = @(@{name = "alias6"; date = [DateTime]"2020-06-01"; collection_name = "ansible.builtin"}) }
+ option1 = @{ type = "str"; aliases = "alias1"; deprecated_aliases = @(@{name = "alias1"; version = "2.10" }) }
+ option2 = @{ type = "str"; aliases = "alias2"; deprecated_aliases = @(@{name = "alias2"; version = "2.11" }) }
+ option3 = @{
+ type = "str"
+ aliases = "alias3"
+ deprecated_aliases = @(
+ @{name = "alias3"; version = "2.12"; collection_name = "ansible.builtin" }
+ )
+ }
+ option4 = @{ type = "str"; aliases = "alias4"; deprecated_aliases = @(@{name = "alias4"; date = [DateTime]"2020-03-11" }) }
+ option5 = @{ type = "str"; aliases = "alias5"; deprecated_aliases = @(@{name = "alias5"; date = [DateTime]"2020-03-09" }) }
+ option6 = @{
+ type = "str"
+ aliases = "alias6"
+ deprecated_aliases = @(
+ @{name = "alias6"; date = [DateTime]"2020-06-01"; collection_name = "ansible.builtin" }
+ )
+ }
}
}
- option4 = @{ type = "str"; aliases = "alias4"; deprecated_aliases = @(@{name = "alias4"; date = [DateTime]"2020-03-10"}) }
- option5 = @{ type = "str"; aliases = "alias5"; deprecated_aliases = @(@{name = "alias5"; date = [DateTime]"2020-03-12"}) }
- option6 = @{ type = "str"; aliases = "alias6"; deprecated_aliases = @(@{name = "alias6"; version = "2.12"; collection_name = "ansible.builtin"}) }
- option7 = @{ type = "str"; aliases = "alias7"; deprecated_aliases = @(@{name = "alias7"; date = [DateTime]"2020-06-07"; collection_name = "ansible.builtin"}) }
+ option4 = @{ type = "str"; aliases = "alias4"; deprecated_aliases = @(@{name = "alias4"; date = [DateTime]"2020-03-10" }) }
+ option5 = @{ type = "str"; aliases = "alias5"; deprecated_aliases = @(@{name = "alias5"; date = [DateTime]"2020-03-12" }) }
+ option6 = @{
+ type = "str"
+ aliases = "alias6"
+ deprecated_aliases = @(
+ @{name = "alias6"; version = "2.12"; collection_name = "ansible.builtin" }
+ )
+ }
+ option7 = @{
+ type = "str"
+ aliases = "alias7"
+ deprecated_aliases = @(
+ @{name = "alias7"; date = [DateTime]"2020-06-07"; collection_name = "ansible.builtin" }
+ )
+ }
}
}
@@ -896,12 +937,13 @@ test_no_log - Invoked with:
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
@@ -974,15 +1016,15 @@ test_no_log - Invoked with:
}
)
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
"Required by - single value" = {
$spec = @{
options = @{
- option1 = @{type = "str"}
- option2 = @{type = "str"}
- option3 = @{type = "str"}
+ option1 = @{type = "str" }
+ option2 = @{type = "str" }
+ option3 = @{type = "str" }
}
required_by = @{
option1 = "option2"
@@ -998,12 +1040,13 @@ test_no_log - Invoked with:
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
@@ -1015,15 +1058,15 @@ test_no_log - Invoked with:
}
}
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
"Required by - multiple values" = {
$spec = @{
options = @{
- option1 = @{type = "str"}
- option2 = @{type = "str"}
- option3 = @{type = "str"}
+ option1 = @{type = "str" }
+ option2 = @{type = "str" }
+ option3 = @{type = "str" }
}
required_by = @{
option1 = "option2", "option3"
@@ -1040,12 +1083,13 @@ test_no_log - Invoked with:
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
@@ -1057,15 +1101,15 @@ test_no_log - Invoked with:
}
}
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
"Required by explicit null" = {
$spec = @{
options = @{
- option1 = @{type = "str"}
- option2 = @{type = "str"}
- option3 = @{type = "str"}
+ option1 = @{type = "str" }
+ option2 = @{type = "str" }
+ option3 = @{type = "str" }
}
required_by = @{
option1 = "option2"
@@ -1081,12 +1125,13 @@ test_no_log - Invoked with:
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
@@ -1098,15 +1143,15 @@ test_no_log - Invoked with:
}
}
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
"Required by failed - single value" = {
$spec = @{
options = @{
- option1 = @{type = "str"}
- option2 = @{type = "str"}
- option3 = @{type = "str"}
+ option1 = @{type = "str" }
+ option2 = @{type = "str" }
+ option3 = @{type = "str" }
}
required_by = @{
option1 = "option2"
@@ -1119,12 +1164,13 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
@@ -1136,15 +1182,15 @@ test_no_log - Invoked with:
}
msg = "missing parameter(s) required by 'option1': option2"
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
"Required by failed - multiple values" = {
$spec = @{
options = @{
- option1 = @{type = "str"}
- option2 = @{type = "str"}
- option3 = @{type = "str"}
+ option1 = @{type = "str" }
+ option2 = @{type = "str" }
+ option3 = @{type = "str" }
}
required_by = @{
option1 = "option2", "option3"
@@ -1157,12 +1203,13 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
@@ -1174,7 +1221,7 @@ test_no_log - Invoked with:
}
msg = "missing parameter(s) required by 'option1': option2, option3"
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
"Debug without debug set" = {
@@ -1184,7 +1231,7 @@ test_no_log - Invoked with:
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
$m.Debug("debug message")
$actual_event = (Get-EventLog -LogName Application -Source Ansible -Newest 1).Message
- $actual_event | Assert-Equals -Expected "undefined win module - Invoked with:`r`n "
+ $actual_event | Assert-Equal -Expected "undefined win module - Invoked with:`r`n "
}
"Debug with debug set" = {
@@ -1194,7 +1241,7 @@ test_no_log - Invoked with:
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
$m.Debug("debug message")
$actual_event = (Get-EventLog -LogName Application -Source Ansible -Newest 1).Message
- $actual_event | Assert-Equals -Expected "undefined win module - [DEBUG] debug message"
+ $actual_event | Assert-Equal -Expected "undefined win module - [DEBUG] debug message"
}
"Deprecate and warn with version" = {
@@ -1206,20 +1253,21 @@ test_no_log - Invoked with:
$m.Warn("warning")
$actual_warn_event = Get-EventLog -LogName Application -Source Ansible -Newest 1
- $actual_deprecate_event_1.Message | Assert-Equals -Expected "undefined win module - [DEPRECATION WARNING] message 2.7"
- $actual_deprecate_event_2.Message | Assert-Equals -Expected "undefined win module - [DEPRECATION WARNING] message w collection 2.8"
- $actual_warn_event.EntryType | Assert-Equals -Expected "Warning"
- $actual_warn_event.Message | Assert-Equals -Expected "undefined win module - [WARNING] warning"
+ $actual_deprecate_event_1.Message | Assert-Equal -Expected "undefined win module - [DEPRECATION WARNING] message 2.7"
+ $actual_deprecate_event_2.Message | Assert-Equal -Expected "undefined win module - [DEPRECATION WARNING] message w collection 2.8"
+ $actual_warn_event.EntryType | Assert-Equal -Expected "Warning"
+ $actual_warn_event.Message | Assert-Equal -Expected "undefined win module - [WARNING] warning"
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
@@ -1228,11 +1276,11 @@ test_no_log - Invoked with:
}
warnings = @("warning")
deprecations = @(
- @{msg = "message"; version = "2.7"; collection_name = $null},
- @{msg = "message w collection"; version = "2.8"; collection_name = "ansible.builtin"}
+ @{msg = "message"; version = "2.7"; collection_name = $null },
+ @{msg = "message w collection"; version = "2.8"; collection_name = "ansible.builtin" }
)
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
"Deprecate and warn with date" = {
@@ -1244,20 +1292,21 @@ test_no_log - Invoked with:
$m.Warn("warning")
$actual_warn_event = Get-EventLog -LogName Application -Source Ansible -Newest 1
- $actual_deprecate_event_1.Message | Assert-Equals -Expected "undefined win module - [DEPRECATION WARNING] message 2020-01-01"
- $actual_deprecate_event_2.Message | Assert-Equals -Expected "undefined win module - [DEPRECATION WARNING] message w collection 2020-01-02"
- $actual_warn_event.EntryType | Assert-Equals -Expected "Warning"
- $actual_warn_event.Message | Assert-Equals -Expected "undefined win module - [WARNING] warning"
+ $actual_deprecate_event_1.Message | Assert-Equal -Expected "undefined win module - [DEPRECATION WARNING] message 2020-01-01"
+ $actual_deprecate_event_2.Message | Assert-Equal -Expected "undefined win module - [DEPRECATION WARNING] message w collection 2020-01-02"
+ $actual_warn_event.EntryType | Assert-Equal -Expected "Warning"
+ $actual_warn_event.Message | Assert-Equal -Expected "undefined win module - [WARNING] warning"
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
@@ -1266,11 +1315,11 @@ test_no_log - Invoked with:
}
warnings = @("warning")
deprecations = @(
- @{msg = "message"; date = "2020-01-01"; collection_name = $null},
- @{msg = "message w collection"; date = "2020-01-02"; collection_name = "ansible.builtin"}
+ @{msg = "message"; date = "2020-01-01"; collection_name = $null },
+ @{msg = "message w collection"; date = "2020-01-02"; collection_name = "ansible.builtin" }
)
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
"FailJson with message" = {
@@ -1279,12 +1328,13 @@ test_no_log - Invoked with:
$failed = $false
try {
$m.FailJson("fail message")
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $failed
+ $failed | Assert-Equal -Expected $failed
$expected = @{
changed = $false
@@ -1294,7 +1344,7 @@ test_no_log - Invoked with:
failed = $true
msg = "fail message"
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
"FailJson with Exception" = {
@@ -1302,19 +1352,21 @@ test_no_log - Invoked with:
try {
[System.IO.Path]::GetFullPath($null)
- } catch {
+ }
+ catch {
$excp = $_.Exception
}
$failed = $false
try {
$m.FailJson("fail message", $excp)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $failed
+ $failed | Assert-Equal -Expected $failed
$expected = @{
changed = $false
@@ -1324,7 +1376,7 @@ test_no_log - Invoked with:
failed = $true
msg = "fail message"
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
"FailJson with ErrorRecord" = {
@@ -1332,19 +1384,21 @@ test_no_log - Invoked with:
try {
Get-Item -LiteralPath $null
- } catch {
+ }
+ catch {
$error_record = $_
}
$failed = $false
try {
$m.FailJson("fail message", $error_record)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $failed
+ $failed | Assert-Equal -Expected $failed
$expected = @{
changed = $false
@@ -1354,7 +1408,7 @@ test_no_log - Invoked with:
failed = $true
msg = "fail message"
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
"FailJson with Exception and verbosity 3" = {
@@ -1365,25 +1419,28 @@ test_no_log - Invoked with:
try {
[System.IO.Path]::GetFullPath($null)
- } catch {
+ }
+ catch {
$excp = $_.Exception
}
$failed = $false
try {
$m.FailJson("fail message", $excp)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $failed
+ $failed | Assert-Equal -Expected $failed
- $actual.changed | Assert-Equals -Expected $false
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = @{}}
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected "fail message"
- $actual.exception.Contains('System.Management.Automation.MethodInvocationException: Exception calling "GetFullPath" with "1" argument(s)') | Assert-Equals -Expected $true
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = @{} }
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected "fail message"
+ $expected = 'System.Management.Automation.MethodInvocationException: Exception calling "GetFullPath" with "1" argument(s)'
+ $actual.exception.Contains($expected) | Assert-Equal -Expected $true
}
"FailJson with ErrorRecord and verbosity 3" = {
@@ -1394,43 +1451,46 @@ test_no_log - Invoked with:
try {
Get-Item -LiteralPath $null
- } catch {
+ }
+ catch {
$error_record = $_
}
$failed = $false
try {
$m.FailJson("fail message", $error_record)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $failed
+ $failed | Assert-Equal -Expected $failed
- $actual.changed | Assert-Equals -Expected $false
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = @{}}
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected "fail message"
- $actual.exception.Contains("Cannot bind argument to parameter 'LiteralPath' because it is null") | Assert-Equals -Expected $true
- $actual.exception.Contains("+ Get-Item -LiteralPath `$null") | Assert-Equals -Expected $true
- $actual.exception.Contains("ScriptStackTrace:") | Assert-Equals -Expected $true
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = @{} }
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected "fail message"
+ $actual.exception.Contains("Cannot bind argument to parameter 'LiteralPath' because it is null") | Assert-Equal -Expected $true
+ $actual.exception.Contains("+ Get-Item -LiteralPath `$null") | Assert-Equal -Expected $true
+ $actual.exception.Contains("ScriptStackTrace:") | Assert-Equal -Expected $true
}
"Diff entry without diff set" = {
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
- $m.Diff.before = @{a = "a"}
- $m.Diff.after = @{b = "b"}
+ $m.Diff.before = @{a = "a" }
+ $m.Diff.after = @{b = "b" }
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $failed
+ $failed | Assert-Equal -Expected $failed
$expected = @{
changed = $false
@@ -1438,7 +1498,7 @@ test_no_log - Invoked with:
module_args = @{}
}
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
"Diff entry with diff set" = {
@@ -1446,18 +1506,19 @@ test_no_log - Invoked with:
_ansible_diff = $true
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
- $m.Diff.before = @{a = "a"}
- $m.Diff.after = @{b = "b"}
+ $m.Diff.before = @{a = "a" }
+ $m.Diff.after = @{b = "b" }
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $failed
+ $failed | Assert-Equal -Expected $failed
$expected = @{
changed = $false
@@ -1465,11 +1526,11 @@ test_no_log - Invoked with:
module_args = @{}
}
diff = @{
- before = @{a = "a"}
- after = @{b = "b"}
+ before = @{a = "a" }
+ after = @{b = "b" }
}
}
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
"ParseBool tests" = {
@@ -1508,8 +1569,8 @@ test_no_log - Invoked with:
foreach ($map in $mapping.GetEnumerator()) {
$expected = $map.Value
$actual = [Ansible.Basic.AnsibleModule]::ParseBool($map.Key)
- $actual | Assert-Equals -Expected $expected
- $actual.GetType().FullName | Assert-Equals -Expected "System.Boolean"
+ $actual | Assert-Equal -Expected $expected
+ $actual.GetType().FullName | Assert-Equal -Expected "System.Boolean"
}
$fail_bools = @(
@@ -1523,11 +1584,12 @@ test_no_log - Invoked with:
$failed = $false
try {
[Ansible.Basic.AnsibleModule]::ParseBool($fail_bool)
- } catch {
+ }
+ catch {
$failed = $true
- $_.Exception.Message.Contains("The value '$fail_bool' is not a valid boolean") | Assert-Equals -Expected $true
+ $_.Exception.Message.Contains("The value '$fail_bool' is not a valid boolean") | Assert-Equal -Expected $true
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
}
@@ -1538,9 +1600,10 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$expected = @{
invocation = @{
@@ -1553,9 +1616,9 @@ test_no_log - Invoked with:
msg = "Unsupported parameters for (undefined win module) module: _ansible_invalid. Supported parameters include: "
}
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
- $actual | Assert-DictionaryEquals -Expected $expected
+ $actual | Assert-DictionaryEqual -Expected $expected
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
"Module tmpdir with present remote tmp" = {
@@ -1577,29 +1640,30 @@ test_no_log - Invoked with:
_ansible_remote_tmp = $remote_tmp.ToString()
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
- (Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equals -Expected $true
+ (Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equal -Expected $true
$actual_tmpdir = $m.Tmpdir
$parent_tmpdir = Split-Path -Path $actual_tmpdir -Parent
$tmpdir_name = Split-Path -Path $actual_tmpdir -Leaf
- $parent_tmpdir | Assert-Equals -Expected $remote_tmp
- $tmpdir_name.StartSwith("ansible-moduletmp-") | Assert-Equals -Expected $true
- (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equals -Expected $true
- (Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equals -Expected $true
+ $parent_tmpdir | Assert-Equal -Expected $remote_tmp
+ $tmpdir_name.StartSwith("ansible-moduletmp-") | Assert-Equal -Expected $true
+ (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $true
+ (Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equal -Expected $true
$children = [System.IO.Directory]::EnumerateDirectories($remote_tmp)
- $children.Count | Assert-Equals -Expected 1
+ $children.Count | Assert-Equal -Expected 1
$actual_tmpdir_sd = (Get-Acl -Path $actual_tmpdir).GetSecurityDescriptorSddlForm("Access, Owner")
- $actual_tmpdir_sd | Assert-Equals -Expected $expected_sd
+ $actual_tmpdir_sd | Assert-Equal -Expected $expected_sd
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equals -Expected $false
- (Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equals -Expected $true
- $output.warnings.Count | Assert-Equals -Expected 0
+ (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $false
+ (Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equal -Expected $true
+ $output.warnings.Count | Assert-Equal -Expected 0
}
"Module tmpdir with missing remote_tmp" = {
@@ -1620,36 +1684,37 @@ test_no_log - Invoked with:
_ansible_remote_tmp = $remote_tmp.ToString()
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
- (Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equals -Expected $false
+ (Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equal -Expected $false
$actual_tmpdir = $m.Tmpdir
$parent_tmpdir = Split-Path -Path $actual_tmpdir -Parent
$tmpdir_name = Split-Path -Path $actual_tmpdir -Leaf
- $parent_tmpdir | Assert-Equals -Expected $remote_tmp
- $tmpdir_name.StartSwith("ansible-moduletmp-") | Assert-Equals -Expected $true
- (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equals -Expected $true
- (Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equals -Expected $true
+ $parent_tmpdir | Assert-Equal -Expected $remote_tmp
+ $tmpdir_name.StartSwith("ansible-moduletmp-") | Assert-Equal -Expected $true
+ (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $true
+ (Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equal -Expected $true
$children = [System.IO.Directory]::EnumerateDirectories($remote_tmp)
- $children.Count | Assert-Equals -Expected 1
+ $children.Count | Assert-Equal -Expected 1
$actual_remote_sd = (Get-Acl -Path $remote_tmp).GetSecurityDescriptorSddlForm("Access, Owner")
$actual_tmpdir_sd = (Get-Acl -Path $actual_tmpdir).GetSecurityDescriptorSddlForm("Access, Owner")
- $actual_remote_sd | Assert-Equals -Expected $expected_sd
- $actual_tmpdir_sd | Assert-Equals -Expected $expected_sd
+ $actual_remote_sd | Assert-Equal -Expected $expected_sd
+ $actual_tmpdir_sd | Assert-Equal -Expected $expected_sd
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equals -Expected $false
- (Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equals -Expected $true
- $output.warnings.Count | Assert-Equals -Expected 1
+ (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $false
+ (Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equal -Expected $true
+ $output.warnings.Count | Assert-Equal -Expected 1
$nt_account = $current_user.Translate([System.Security.Principal.NTAccount])
$actual_warning = "Module remote_tmp $remote_tmp did not exist and was created with FullControl to $nt_account, "
$actual_warning += "this may cause issues when running as another user. To avoid this, "
$actual_warning += "create the remote_tmp dir with the correct permissions manually"
- $actual_warning | Assert-Equals -Expected $output.warnings[0]
+ $actual_warning | Assert-Equal -Expected $output.warnings[0]
}
"Module tmp, keep remote files" = {
@@ -1665,19 +1730,20 @@ test_no_log - Invoked with:
$parent_tmpdir = Split-Path -Path $actual_tmpdir -Parent
$tmpdir_name = Split-Path -Path $actual_tmpdir -Leaf
- $parent_tmpdir | Assert-Equals -Expected $remote_tmp
- $tmpdir_name.StartSwith("ansible-moduletmp-") | Assert-Equals -Expected $true
- (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equals -Expected $true
- (Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equals -Expected $true
+ $parent_tmpdir | Assert-Equal -Expected $remote_tmp
+ $tmpdir_name.StartSwith("ansible-moduletmp-") | Assert-Equal -Expected $true
+ (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $true
+ (Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equal -Expected $true
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equals -Expected $true
- (Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equals -Expected $true
- $output.warnings.Count | Assert-Equals -Expected 0
+ (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $true
+ (Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equal -Expected $true
+ $output.warnings.Count | Assert-Equal -Expected 0
Remove-Item -LiteralPath $actual_tmpdir -Force -Recurse
}
@@ -1688,22 +1754,23 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "internal error: argument spec entry contains an invalid key 'invalid', valid keys: apply_defaults, "
$expected_msg += "aliases, choices, default, deprecated_aliases, elements, mutually_exclusive, no_log, options, "
$expected_msg += "removed_in_version, removed_at_date, removed_from_collection, required, required_by, required_if, "
$expected_msg += "required_one_of, required_together, supports_check_mode, type"
- $actual.Keys.Count | Assert-Equals -Expected 3
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- ("exception" -cin $actual.Keys) | Assert-Equals -Expected $true
+ $actual.Keys.Count | Assert-Equal -Expected 3
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ ("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Invalid argument spec key - nested" = {
@@ -1721,22 +1788,23 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "internal error: argument spec entry contains an invalid key 'invalid', valid keys: apply_defaults, "
$expected_msg += "aliases, choices, default, deprecated_aliases, elements, mutually_exclusive, no_log, options, "
$expected_msg += "removed_in_version, removed_at_date, removed_from_collection, required, required_by, required_if, "
$expected_msg += "required_one_of, required_together, supports_check_mode, type - found in option_key -> sub_option_key"
- $actual.Keys.Count | Assert-Equals -Expected 3
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- ("exception" -cin $actual.Keys) | Assert-Equals -Expected $true
+ $actual.Keys.Count | Assert-Equal -Expected 3
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ ("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Invalid argument spec value type" = {
@@ -1746,20 +1814,21 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "internal error: argument spec for 'apply_defaults' did not match expected "
$expected_msg += "type System.Boolean: actual type System.String"
- $actual.Keys.Count | Assert-Equals -Expected 3
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- ("exception" -cin $actual.Keys) | Assert-Equals -Expected $true
+ $actual.Keys.Count | Assert-Equal -Expected 3
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ ("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Invalid argument spec option type" = {
@@ -1773,20 +1842,21 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "internal error: type 'invalid type' is unsupported - found in option_key. "
$expected_msg += "Valid types are: bool, dict, float, int, json, list, path, raw, sid, str"
- $actual.Keys.Count | Assert-Equals -Expected 3
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- ("exception" -cin $actual.Keys) | Assert-Equals -Expected $true
+ $actual.Keys.Count | Assert-Equal -Expected 3
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ ("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Invalid argument spec option element type" = {
@@ -1801,20 +1871,21 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "internal error: elements 'invalid type' is unsupported - found in option_key. "
$expected_msg += "Valid types are: bool, dict, float, int, json, list, path, raw, sid, str"
- $actual.Keys.Count | Assert-Equals -Expected 3
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- ("exception" -cin $actual.Keys) | Assert-Equals -Expected $true
+ $actual.Keys.Count | Assert-Equal -Expected 3
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ ("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Invalid deprecated aliases entry - no version and date" = {
@@ -1822,9 +1893,9 @@ test_no_log - Invoked with:
options = @{
option_key = @{
type = "str"
- aliases = ,"alias_name"
+ aliases = , "alias_name"
deprecated_aliases = @(
- @{name = "alias_name"}
+ @{name = "alias_name" }
)
}
}
@@ -1833,19 +1904,20 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "internal error: One of version or date is required in a deprecated_aliases entry"
- $actual.Keys.Count | Assert-Equals -Expected 3
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- ("exception" -cin $actual.Keys) | Assert-Equals -Expected $true
+ $actual.Keys.Count | Assert-Equal -Expected 3
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ ("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Invalid deprecated aliases entry - no name (nested)" = {
@@ -1856,9 +1928,9 @@ test_no_log - Invoked with:
options = @{
sub_option_key = @{
type = "str"
- aliases = ,"alias_name"
+ aliases = , "alias_name"
deprecated_aliases = @(
- @{version = "2.10"}
+ @{version = "2.10" }
)
}
}
@@ -1875,12 +1947,13 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.ArgumentException] {
+ }
+ catch [System.ArgumentException] {
$failed = $true
$expected_msg = "name is required in a deprecated_aliases entry - found in option_key"
- $_.Exception.Message | Assert-Equals -Expected $expected_msg
+ $_.Exception.Message | Assert-Equal -Expected $expected_msg
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
"Invalid deprecated aliases entry - both version and date" = {
@@ -1888,7 +1961,7 @@ test_no_log - Invoked with:
options = @{
option_key = @{
type = "str"
- aliases = ,"alias_name"
+ aliases = , "alias_name"
deprecated_aliases = @(
@{
name = "alias_name"
@@ -1903,19 +1976,20 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "internal error: Only one of version or date is allowed in a deprecated_aliases entry"
- $actual.Keys.Count | Assert-Equals -Expected 3
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- ("exception" -cin $actual.Keys) | Assert-Equals -Expected $true
+ $actual.Keys.Count | Assert-Equal -Expected 3
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ ("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Invalid deprecated aliases entry - wrong date type" = {
@@ -1923,7 +1997,7 @@ test_no_log - Invoked with:
options = @{
option_key = @{
type = "str"
- aliases = ,"alias_name"
+ aliases = , "alias_name"
deprecated_aliases = @(
@{
name = "alias_name"
@@ -1937,19 +2011,20 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "internal error: A deprecated_aliases date must be a DateTime object"
- $actual.Keys.Count | Assert-Equals -Expected 3
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- ("exception" -cin $actual.Keys) | Assert-Equals -Expected $true
+ $actual.Keys.Count | Assert-Equal -Expected 3
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ ("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Spec required and default set at the same time" = {
@@ -1965,19 +2040,20 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "internal error: required and default are mutually exclusive for option_key"
- $actual.Keys.Count | Assert-Equals -Expected 3
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- ("exception" -cin $actual.Keys) | Assert-Equals -Expected $true
+ $actual.Keys.Count | Assert-Equal -Expected 3
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ ("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Unsupported options" = {
@@ -1997,21 +2073,22 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "Unsupported parameters for (undefined win module) module: another_key, invalid_key. "
$expected_msg += "Supported parameters include: option_key"
- $actual.Keys.Count | Assert-Equals -Expected 4
- $actual.changed | Assert-Equals -Expected $false
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.Keys.Count | Assert-Equal -Expected 4
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Check mode and module doesn't support check mode" = {
@@ -2030,20 +2107,21 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "remote module (undefined win module) does not support check mode"
- $actual.Keys.Count | Assert-Equals -Expected 4
- $actual.changed | Assert-Equals -Expected $false
- $actual.skipped | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = @{option_key = "abc"}}
+ $actual.Keys.Count | Assert-Equal -Expected 4
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.skipped | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = @{option_key = "abc" } }
}
"Check mode with suboption without supports_check_mode" = {
@@ -2065,7 +2143,7 @@ test_no_log - Invoked with:
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- $m.CheckMode | Assert-Equals -Expected $true
+ $m.CheckMode | Assert-Equal -Expected $true
}
"Type conversion error" = {
@@ -2083,21 +2161,22 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "argument for option_key is of type System.String and we were unable to convert to int: "
$expected_msg += "Input string was not in a correct format."
- $actual.Keys.Count | Assert-Equals -Expected 4
- $actual.changed | Assert-Equals -Expected $false
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.Keys.Count | Assert-Equal -Expected 4
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Type conversion error - delegate" = {
@@ -2107,7 +2186,7 @@ test_no_log - Invoked with:
type = "dict"
options = @{
sub_option_key = @{
- type = [Func[[Object], [UInt64]]]{ [System.UInt64]::Parse($args[0]) }
+ type = [Func[[Object], [UInt64]]] { [System.UInt64]::Parse($args[0]) }
}
}
}
@@ -2122,22 +2201,23 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "argument for sub_option_key is of type System.String and we were unable to convert to delegate: "
$expected_msg += "Exception calling `"Parse`" with `"1`" argument(s): `"Input string was not in a correct format.`" "
$expected_msg += "found in option_key"
- $actual.Keys.Count | Assert-Equals -Expected 4
- $actual.changed | Assert-Equals -Expected $false
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.Keys.Count | Assert-Equal -Expected 4
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Numeric choices" = {
@@ -2156,12 +2236,13 @@ test_no_log - Invoked with:
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $output.Keys.Count | Assert-Equals -Expected 2
- $output.changed | Assert-Equals -Expected $false
- $output.invocation | Assert-DictionaryEquals -Expected @{module_args = @{option_key = 2}}
+ $output.Keys.Count | Assert-Equal -Expected 2
+ $output.changed | Assert-Equal -Expected $false
+ $output.invocation | Assert-DictionaryEqual -Expected @{module_args = @{option_key = 2 } }
}
"Case insensitive choice" = {
@@ -2179,17 +2260,18 @@ test_no_log - Invoked with:
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$expected_warning = "value of option_key was a case insensitive match of one of: abc, def. "
$expected_warning += "Checking of choices will be case sensitive in a future Ansible release. "
$expected_warning += "Case insensitive matches were: ABC"
- $output.invocation | Assert-DictionaryEquals -Expected @{module_args = @{option_key = "ABC"}}
+ $output.invocation | Assert-DictionaryEqual -Expected @{module_args = @{option_key = "ABC" } }
# We have disabled the warnings for now
- #$output.warnings.Count | Assert-Equals -Expected 1
- #$output.warnings[0] | Assert-Equals -Expected $expected_warning
+ #$output.warnings.Count | Assert-Equal -Expected 1
+ #$output.warnings[0] | Assert-Equal -Expected $expected_warning
}
"Case insensitive choice no_log" = {
@@ -2208,17 +2290,18 @@ test_no_log - Invoked with:
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$expected_warning = "value of option_key was a case insensitive match of one of: abc, def. "
$expected_warning += "Checking of choices will be case sensitive in a future Ansible release. "
$expected_warning += "Case insensitive matches were: VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
- $output.invocation | Assert-DictionaryEquals -Expected @{module_args = @{option_key = "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"}}
+ $output.invocation | Assert-DictionaryEqual -Expected @{module_args = @{option_key = "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" } }
# We have disabled the warnings for now
- #$output.warnings.Count | Assert-Equals -Expected 1
- #$output.warnings[0] | Assert-Equals -Expected $expected_warning
+ #$output.warnings.Count | Assert-Equal -Expected 1
+ #$output.warnings[0] | Assert-Equal -Expected $expected_warning
}
"Case insentitive choice as list" = {
@@ -2238,17 +2321,18 @@ test_no_log - Invoked with:
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$expected_warning = "value of option_key was a case insensitive match of one or more of: abc, def, ghi, JKL. "
$expected_warning += "Checking of choices will be case sensitive in a future Ansible release. "
$expected_warning += "Case insensitive matches were: AbC, jkl"
- $output.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $output.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
# We have disabled the warnings for now
- #$output.warnings.Count | Assert-Equals -Expected 1
- #$output.warnings[0] | Assert-Equals -Expected $expected_warning
+ #$output.warnings.Count | Assert-Equal -Expected 1
+ #$output.warnings[0] | Assert-Equal -Expected $expected_warning
}
"Invalid choice" = {
@@ -2266,20 +2350,21 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "value of option_key must be one of: a, b. Got no match for: c"
- $actual.Keys.Count | Assert-Equals -Expected 4
- $actual.changed | Assert-Equals -Expected $false
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.Keys.Count | Assert-Equal -Expected 4
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Invalid choice with no_log" = {
@@ -2298,20 +2383,21 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "value of option_key must be one of: a, b. Got no match for: ********"
- $actual.Keys.Count | Assert-Equals -Expected 4
- $actual.changed | Assert-Equals -Expected $false
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = @{option_key = "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"}}
+ $actual.Keys.Count | Assert-Equal -Expected 4
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = @{option_key = "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" } }
}
"Invalid choice in list" = {
@@ -2330,20 +2416,21 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "value of option_key must be one or more of: a, b. Got no match for: c"
- $actual.Keys.Count | Assert-Equals -Expected 4
- $actual.changed | Assert-Equals -Expected $false
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.Keys.Count | Assert-Equal -Expected 4
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Mutually exclusive options" = {
@@ -2352,7 +2439,7 @@ test_no_log - Invoked with:
option1 = @{}
option2 = @{}
}
- mutually_exclusive = @(,@("option1", "option2"))
+ mutually_exclusive = @(, @("option1", "option2"))
}
Set-Variable -Name complex_args -Scope Global -Value @{
option1 = "a"
@@ -2362,27 +2449,28 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "parameters are mutually exclusive: option1, option2"
- $actual.Keys.Count | Assert-Equals -Expected 4
- $actual.changed | Assert-Equals -Expected $false
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.Keys.Count | Assert-Equal -Expected 4
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Missing required argument" = {
$spec = @{
options = @{
option1 = @{}
- option2 = @{required = $true}
+ option2 = @{required = $true }
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
@@ -2392,20 +2480,21 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "missing required arguments: option2"
- $actual.Keys.Count | Assert-Equals -Expected 4
- $actual.changed | Assert-Equals -Expected $false
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.Keys.Count | Assert-Equal -Expected 4
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Missing required argument subspec - no value defined" = {
@@ -2426,16 +2515,17 @@ test_no_log - Invoked with:
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
- $actual.Keys.Count | Assert-Equals -Expected 2
- $actual.changed | Assert-Equals -Expected $false
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.Keys.Count | Assert-Equal -Expected 2
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Missing required argument subspec" = {
@@ -2461,20 +2551,21 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "missing required arguments: sub_option_key found in option_key"
- $actual.Keys.Count | Assert-Equals -Expected 4
- $actual.changed | Assert-Equals -Expected $false
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.Keys.Count | Assert-Equal -Expected 4
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Required together not set" = {
@@ -2483,7 +2574,7 @@ test_no_log - Invoked with:
option1 = @{}
option2 = @{}
}
- required_together = @(,@("option1", "option2"))
+ required_together = @(, @("option1", "option2"))
}
Set-Variable -Name complex_args -Scope Global -Value @{
option1 = "abc"
@@ -2492,20 +2583,21 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "parameters are required together: option1, option2"
- $actual.Keys.Count | Assert-Equals -Expected 4
- $actual.changed | Assert-Equals -Expected $false
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.Keys.Count | Assert-Equal -Expected 4
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Required together not set - subspec" = {
@@ -2517,11 +2609,11 @@ test_no_log - Invoked with:
option1 = @{}
option2 = @{}
}
- required_together = @(,@("option1", "option2"))
+ required_together = @(, @("option1", "option2"))
}
another_option = @{}
}
- required_together = @(,@("option_key", "another_option"))
+ required_together = @(, @("option_key", "another_option"))
}
Set-Variable -Name complex_args -Scope Global -Value @{
option_key = @{
@@ -2533,20 +2625,21 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "parameters are required together: option1, option2 found in option_key"
- $actual.Keys.Count | Assert-Equals -Expected 4
- $actual.changed | Assert-Equals -Expected $false
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.Keys.Count | Assert-Equal -Expected 4
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Required one of not set" = {
@@ -2565,58 +2658,60 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "one of the following is required: option2, option3"
- $actual.Keys.Count | Assert-Equals -Expected 4
- $actual.changed | Assert-Equals -Expected $false
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.Keys.Count | Assert-Equal -Expected 4
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Required if invalid entries" = {
$spec = @{
options = @{
- state = @{choices = "absent", "present"; default = "present"}
- path = @{type = "path"}
+ state = @{choices = "absent", "present"; default = "present" }
+ path = @{type = "path" }
}
- required_if = @(,@("state", "absent"))
+ required_if = @(, @("state", "absent"))
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "internal error: invalid required_if value count of 2, expecting 3 or 4 entries"
- $actual.Keys.Count | Assert-Equals -Expected 4
- $actual.changed | Assert-Equals -Expected $false
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.Keys.Count | Assert-Equal -Expected 4
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Required if no missing option" = {
$spec = @{
options = @{
- state = @{choices = "absent", "present"; default = "present"}
+ state = @{choices = "absent", "present"; default = "present" }
name = @{}
- path = @{type = "path"}
+ path = @{type = "path" }
}
- required_if = @(,@("state", "absent", @("name", "path")))
+ required_if = @(, @("state", "absent", @("name", "path")))
}
Set-Variable -Name complex_args -Scope Global -Value @{
name = "abc"
@@ -2626,26 +2721,27 @@ test_no_log - Invoked with:
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
- $actual.Keys.Count | Assert-Equals -Expected 2
- $actual.changed | Assert-Equals -Expected $false
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.Keys.Count | Assert-Equal -Expected 2
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Required if missing option" = {
$spec = @{
options = @{
- state = @{choices = "absent", "present"; default = "present"}
+ state = @{choices = "absent", "present"; default = "present" }
name = @{}
- path = @{type = "path"}
+ path = @{type = "path" }
}
- required_if = @(,@("state", "absent", @("name", "path")))
+ required_if = @(, @("state", "absent", @("name", "path")))
}
Set-Variable -Name complex_args -Scope Global -Value @{
state = "absent"
@@ -2655,30 +2751,31 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "state is absent but all of the following are missing: path"
- $actual.Keys.Count | Assert-Equals -Expected 4
- $actual.changed | Assert-Equals -Expected $false
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.Keys.Count | Assert-Equal -Expected 4
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Required if missing option and required one is set" = {
$spec = @{
options = @{
- state = @{choices = "absent", "present"; default = "present"}
+ state = @{choices = "absent", "present"; default = "present" }
name = @{}
- path = @{type = "path"}
+ path = @{type = "path" }
}
- required_if = @(,@("state", "absent", @("name", "path"), $true))
+ required_if = @(, @("state", "absent", @("name", "path"), $true))
}
Set-Variable -Name complex_args -Scope Global -Value @{
state = "absent"
@@ -2687,30 +2784,31 @@ test_no_log - Invoked with:
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$expected_msg = "state is absent but any of the following are missing: name, path"
- $actual.Keys.Count | Assert-Equals -Expected 4
- $actual.changed | Assert-Equals -Expected $false
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected $expected_msg
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.Keys.Count | Assert-Equal -Expected 4
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected $expected_msg
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Required if missing option but one required set" = {
$spec = @{
options = @{
- state = @{choices = "absent", "present"; default = "present"}
+ state = @{choices = "absent", "present"; default = "present" }
name = @{}
- path = @{type = "path"}
+ path = @{type = "path" }
}
- required_if = @(,@("state", "absent", @("name", "path"), $true))
+ required_if = @(, @("state", "absent", @("name", "path"), $true))
}
Set-Variable -Name complex_args -Scope Global -Value @{
state = "absent"
@@ -2721,16 +2819,17 @@ test_no_log - Invoked with:
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
- $actual.Keys.Count | Assert-Equals -Expected 2
- $actual.changed | Assert-Equals -Expected $false
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.Keys.Count | Assert-Equal -Expected 2
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"PS Object in return result" = {
@@ -2738,38 +2837,39 @@ test_no_log - Invoked with:
# JavaScriptSerializer struggles with PS Object like PSCustomObject due to circular references, this test makes
# sure we can handle these types of objects without bombing
- $m.Result.output = [PSCustomObject]@{a = "a"; b = "b"}
+ $m.Result.output = [PSCustomObject]@{a = "a"; b = "b" }
$failed = $true
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
- $actual.Keys.Count | Assert-Equals -Expected 3
- $actual.changed | Assert-Equals -Expected $false
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = @{}}
- $actual.output | Assert-DictionaryEquals -Expected @{a = "a"; b = "b"}
+ $actual.Keys.Count | Assert-Equal -Expected 3
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = @{} }
+ $actual.output | Assert-DictionaryEqual -Expected @{a = "a"; b = "b" }
}
"String json array to object" = {
$input_json = '["abc", "def"]'
$actual = [Ansible.Basic.AnsibleModule]::FromJson($input_json)
- $actual -is [Array] | Assert-Equals -Expected $true
- $actual.Length | Assert-Equals -Expected 2
- $actual[0] | Assert-Equals -Expected "abc"
- $actual[1] | Assert-Equals -Expected "def"
+ $actual -is [Array] | Assert-Equal -Expected $true
+ $actual.Length | Assert-Equal -Expected 2
+ $actual[0] | Assert-Equal -Expected "abc"
+ $actual[1] | Assert-Equal -Expected "def"
}
"String json array of dictionaries to object" = {
$input_json = '[{"abc":"def"}]'
$actual = [Ansible.Basic.AnsibleModule]::FromJson($input_json)
- $actual -is [Array] | Assert-Equals -Expected $true
- $actual.Length | Assert-Equals -Expected 1
- $actual[0] | Assert-DictionaryEquals -Expected @{"abc" = "def"}
+ $actual -is [Array] | Assert-Equal -Expected $true
+ $actual.Length | Assert-Equal -Expected 1
+ $actual[0] | Assert-DictionaryEqual -Expected @{"abc" = "def" }
}
"Spec with fragments" = {
@@ -2793,15 +2893,16 @@ test_no_log - Invoked with:
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
- $actual.changed | Assert-Equals -Expected $false
- $actual.invocation | Assert-DictionaryEquals -Expected @{module_args = $complex_args}
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Fragment spec that with a deprecated alias" = {
@@ -2811,13 +2912,13 @@ test_no_log - Invoked with:
aliases = @("alias1_spec")
type = "str"
deprecated_aliases = @(
- @{name = "alias1_spec"; version = "2.0"}
+ @{name = "alias1_spec"; version = "2.0" }
)
}
option2 = @{
aliases = @("alias2_spec")
deprecated_aliases = @(
- @{name = "alias2_spec"; version = "2.0"; collection_name = "ansible.builtin"}
+ @{name = "alias2_spec"; version = "2.0"; collection_name = "ansible.builtin" }
)
}
}
@@ -2831,7 +2932,7 @@ test_no_log - Invoked with:
option2 = @{
aliases = @("alias2")
deprecated_aliases = @(
- @{name = "alias2"; version = "2.0"; collection_name = "foo.bar"}
+ @{name = "alias2"; version = "2.0"; collection_name = "foo.bar" }
)
type = "str"
}
@@ -2847,22 +2948,23 @@ test_no_log - Invoked with:
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
- $actual.deprecations.Count | Assert-Equals -Expected 2
- $actual.deprecations[0] | Assert-DictionaryEquals -Expected @{
+ $actual.deprecations.Count | Assert-Equal -Expected 2
+ $actual.deprecations[0] | Assert-DictionaryEqual -Expected @{
msg = "Alias 'alias1_spec' is deprecated. See the module docs for more information"; version = "2.0"; collection_name = $null
}
- $actual.deprecations[1] | Assert-DictionaryEquals -Expected @{
+ $actual.deprecations[1] | Assert-DictionaryEqual -Expected @{
msg = "Alias 'alias2' is deprecated. See the module docs for more information"; version = "2.0"; collection_name = "foo.bar"
}
- $actual.changed | Assert-Equals -Expected $false
- $actual.invocation | Assert-DictionaryEquals -Expected @{
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.invocation | Assert-DictionaryEqual -Expected @{
module_args = @{
option1 = "option1"
alias1_spec = "option1"
@@ -2879,7 +2981,7 @@ test_no_log - Invoked with:
option2 = @{ type = "str" }
}
mutually_exclusive = @(
- ,@('option1', 'option2')
+ , @('option1', 'option2')
)
}
$fragment1 = @{
@@ -2888,7 +2990,7 @@ test_no_log - Invoked with:
fragment1_2 = @{ type = "str" }
}
mutually_exclusive = @(
- ,@('fragment1_1', 'fragment1_2')
+ , @('fragment1_1', 'fragment1_2')
)
}
$fragment2 = @{
@@ -2907,17 +3009,18 @@ test_no_log - Invoked with:
$failed = $false
try {
[Ansible.Basic.AnsibleModule]::Create(@(), $spec, @($fragment1, $fragment2))
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
- $actual.changed | Assert-Equals -Expected $false
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg | Assert-Equals -Expected "parameters are mutually exclusive: fragment1_1, fragment1_2"
- $actual.invocation | Assert-DictionaryEquals -Expected @{ module_args = $complex_args }
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg | Assert-Equal -Expected "parameters are mutually exclusive: fragment1_1, fragment1_2"
+ $actual.invocation | Assert-DictionaryEqual -Expected @{ module_args = $complex_args }
}
"Fragment spec with no_log" = {
@@ -2945,15 +3048,16 @@ test_no_log - Invoked with:
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
- $actual.changed | Assert-Equals -Expected $false
- $actual.invocation | Assert-DictionaryEquals -Expected @{
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.invocation | Assert-DictionaryEqual -Expected @{
module_args = @{
option1 = "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
alias = "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
@@ -2979,15 +3083,16 @@ test_no_log - Invoked with:
$failed = $false
try {
[Ansible.Basic.AnsibleModule]::Create(@(), $spec, @($fragment))
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 1"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
- $actual.failed | Assert-Equals -Expected $true
- $actual.msg.StartsWith("internal error: argument spec entry contains an invalid key 'invalid', valid keys: ") | Assert-Equals -Expected $true
+ $actual.failed | Assert-Equal -Expected $true
+ $actual.msg.StartsWith("internal error: argument spec entry contains an invalid key 'invalid', valid keys: ") | Assert-Equal -Expected $true
}
"Spec with different list types" = {
@@ -2996,25 +3101,25 @@ test_no_log - Invoked with:
# Single element of the same list type not in a list
option1 = @{
aliases = "alias1"
- deprecated_aliases = @{name="alias1";version="2.0";collection_name="foo.bar"}
+ deprecated_aliases = @{name = "alias1"; version = "2.0"; collection_name = "foo.bar" }
}
# Arrays
option2 = @{
- aliases = ,"alias2"
- deprecated_aliases = ,@{name="alias2";version="2.0";collection_name="foo.bar"}
+ aliases = , "alias2"
+ deprecated_aliases = , @{name = "alias2"; version = "2.0"; collection_name = "foo.bar" }
}
# ArrayList
option3 = @{
aliases = [System.Collections.ArrayList]@("alias3")
- deprecated_aliases = [System.Collections.ArrayList]@(@{name="alias3";version="2.0";collection_name="foo.bar"})
+ deprecated_aliases = [System.Collections.ArrayList]@(@{name = "alias3"; version = "2.0"; collection_name = "foo.bar" })
}
# Generic.List[Object]
option4 = @{
aliases = [System.Collections.Generic.List[Object]]@("alias4")
- deprecated_aliases = [System.Collections.Generic.List[Object]]@(@{name="alias4";version="2.0";collection_name="foo.bar"})
+ deprecated_aliases = [System.Collections.Generic.List[Object]]@(@{name = "alias4"; version = "2.0"; collection_name = "foo.bar" })
}
# Generic.List[T]
@@ -3024,7 +3129,7 @@ test_no_log - Invoked with:
}
}
}
- $spec.options.option5.deprecated_aliases.Add(@{name="alias5";version="2.0";collection_name="foo.bar"})
+ $spec.options.option5.deprecated_aliases.Add(@{name = "alias5"; version = "2.0"; collection_name = "foo.bar" })
Set-Variable -Name complex_args -Scope Global -Value @{
alias1 = "option1"
@@ -3038,21 +3143,22 @@ test_no_log - Invoked with:
$failed = $false
try {
$m.ExitJson()
- } catch [System.Management.Automation.RuntimeException] {
+ }
+ catch [System.Management.Automation.RuntimeException] {
$failed = $true
- $_.Exception.Message | Assert-Equals -Expected "exit: 0"
+ $_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
- $actual.changed | Assert-Equals -Expected $false
- $actual.deprecations.Count | Assert-Equals -Expected 5
+ $actual.changed | Assert-Equal -Expected $false
+ $actual.deprecations.Count | Assert-Equal -Expected 5
foreach ($dep in $actual.deprecations) {
- $dep.msg -like "Alias 'alias?' is deprecated. See the module docs for more information" | Assert-Equals -Expected $true
- $dep.version | Assert-Equals -Expected '2.0'
- $dep.collection_name | Assert-Equals -Expected 'foo.bar'
+ $dep.msg -like "Alias 'alias?' is deprecated. See the module docs for more information" | Assert-Equal -Expected $true
+ $dep.version | Assert-Equal -Expected '2.0'
+ $dep.collection_name | Assert-Equal -Expected 'foo.bar'
}
- $actual.invocation | Assert-DictionaryEquals -Expected @{
+ $actual.invocation | Assert-DictionaryEqual -Expected @{
module_args = @{
alias1 = "option1"
option1 = "option1"
@@ -3078,7 +3184,8 @@ try {
&$test_impl.Value
}
$module.Result.data = "success"
-} catch [System.Management.Automation.RuntimeException] {
+}
+catch [System.Management.Automation.RuntimeException] {
$module.Result.failed = $true
$module.Result.test = $test
$module.Result.line = $_.InvocationInfo.ScriptLineNumber
@@ -3088,7 +3195,8 @@ try {
# The exception was caused by an unexpected Exit call, log that on the output
$module.Result.output = (ConvertFrom-Json -InputObject $_.Exception.InnerException.Output)
$module.Result.msg = "Uncaught AnsibleModule exit in tests, see output"
- } else {
+ }
+ else {
# Unrelated exception
$module.Result.exception = $_.Exception.ToString()
$module.Result.msg = "Uncaught exception: $(($_ | Out-String).ToString())"
diff --git a/test/integration/targets/module_utils_Ansible.Become/library/ansible_become_tests.ps1 b/test/integration/targets/module_utils_Ansible.Become/library/ansible_become_tests.ps1
index 4d1f319b..6e363211 100644
--- a/test/integration/targets/module_utils_Ansible.Become/library/ansible_become_tests.ps1
+++ b/test/integration/targets/module_utils_Ansible.Become/library/ansible_become_tests.ps1
@@ -5,37 +5,40 @@
$module = [Ansible.Basic.AnsibleModule]::Create($args, @{})
-Function Assert-Equals {
+Function Assert-Equal {
param(
- [Parameter(Mandatory=$true, ValueFromPipeline=$true)][AllowNull()]$Actual,
- [Parameter(Mandatory=$true, Position=0)][AllowNull()]$Expected
+ [Parameter(Mandatory = $true, ValueFromPipeline = $true)][AllowNull()]$Actual,
+ [Parameter(Mandatory = $true, Position = 0)][AllowNull()]$Expected
)
- $matched = $false
- if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array]) {
- $Actual.Count | Assert-Equals -Expected $Expected.Count
- for ($i = 0; $i -lt $Actual.Count; $i++) {
- $actual_value = $Actual[$i]
- $expected_value = $Expected[$i]
- Assert-Equals -Actual $actual_value -Expected $expected_value
+ process {
+ $matched = $false
+ if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array]) {
+ $Actual.Count | Assert-Equal -Expected $Expected.Count
+ for ($i = 0; $i -lt $Actual.Count; $i++) {
+ $actual_value = $Actual[$i]
+ $expected_value = $Expected[$i]
+ Assert-Equal -Actual $actual_value -Expected $expected_value
+ }
+ $matched = $true
}
- $matched = $true
- } else {
- $matched = $Actual -ceq $Expected
- }
-
- if (-not $matched) {
- if ($Actual -is [PSObject]) {
- $Actual = $Actual.ToString()
+ else {
+ $matched = $Actual -ceq $Expected
}
- $call_stack = (Get-PSCallStack)[1]
- $module.Result.test = $test
- $module.Result.actual = $Actual
- $module.Result.expected = $Expected
- $module.Result.line = $call_stack.ScriptLineNumber
- $module.Result.method = $call_stack.Position.Text
- $module.FailJson("AssertionError: actual != expected")
+ if (-not $matched) {
+ if ($Actual -is [PSObject]) {
+ $Actual = $Actual.ToString()
+ }
+
+ $call_stack = (Get-PSCallStack)[1]
+ $module.Result.test = $test
+ $module.Result.actual = $Actual
+ $module.Result.expected = $Expected
+ $module.Result.line = $call_stack.ScriptLineNumber
+ $module.Result.method = $call_stack.Position.Text
+ $module.FailJson("AssertionError: actual != expected")
+ }
}
}
@@ -437,101 +440,101 @@ $tests = @{
"Runas standard user" = {
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($standard_user, $become_pass,
"powershell.exe -NoProfile -ExecutionPolicy ByPass -File $tmp_script")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "Interactive"
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- $stdout.UserSid.Value | Assert-Equals -Expected $standard_user_sid
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $medium_integrity_sid
+ $stdout.LogonType | Assert-Equal -Expected "Interactive"
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ $stdout.UserSid.Value | Assert-Equal -Expected $standard_user_sid
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $medium_integrity_sid
}
"Runas admin user" = {
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($admin_user, $become_pass,
"powershell.exe -NoProfile -ExecutionPolicy ByPass -File $tmp_script")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "Interactive"
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- $stdout.UserSid.Value | Assert-Equals -Expected $admin_user_sid
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $high_integrity_sid
+ $stdout.LogonType | Assert-Equal -Expected "Interactive"
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ $stdout.UserSid.Value | Assert-Equal -Expected $admin_user_sid
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $high_integrity_sid
}
"Runas SYSTEM" = {
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser("SYSTEM", $null,
"powershell.exe -NoProfile -ExecutionPolicy ByPass -File $tmp_script")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "System"
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- $stdout.UserSid.Value | Assert-Equals -Expected "S-1-5-18"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $system_integrity_sid
+ $stdout.LogonType | Assert-Equal -Expected "System"
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ $stdout.UserSid.Value | Assert-Equal -Expected "S-1-5-18"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $system_integrity_sid
$with_domain = [Ansible.Become.BecomeUtil]::CreateProcessAsUser("NT AUTHORITY\System", $null, "whoami.exe")
- $with_domain.StandardOut | Assert-Equals -Expected "nt authority\system`r`n"
+ $with_domain.StandardOut | Assert-Equal -Expected "nt authority\system`r`n"
}
"Runas LocalService" = {
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser("LocalService", $null,
"powershell.exe -NoProfile -ExecutionPolicy ByPass -File $tmp_script")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "Service"
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- $stdout.UserSid.Value | Assert-Equals -Expected "S-1-5-19"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $system_integrity_sid
+ $stdout.LogonType | Assert-Equal -Expected "Service"
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ $stdout.UserSid.Value | Assert-Equal -Expected "S-1-5-19"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $system_integrity_sid
$with_domain = [Ansible.Become.BecomeUtil]::CreateProcessAsUser("NT AUTHORITY\LocalService", $null, "whoami.exe")
- $with_domain.StandardOut | Assert-Equals -Expected "nt authority\local service`r`n"
+ $with_domain.StandardOut | Assert-Equal -Expected "nt authority\local service`r`n"
}
"Runas NetworkService" = {
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser("NetworkService", $null,
"powershell.exe -NoProfile -ExecutionPolicy ByPass -File $tmp_script")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "Service"
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- $stdout.UserSid.Value | Assert-Equals -Expected "S-1-5-20"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $system_integrity_sid
+ $stdout.LogonType | Assert-Equal -Expected "Service"
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ $stdout.UserSid.Value | Assert-Equal -Expected "S-1-5-20"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $system_integrity_sid
$with_domain = [Ansible.Become.BecomeUtil]::CreateProcessAsUser("NT AUTHORITY\NetworkService", $null, "whoami.exe")
- $with_domain.StandardOut | Assert-Equals -Expected "nt authority\network service`r`n"
+ $with_domain.StandardOut | Assert-Equal -Expected "nt authority\network service`r`n"
}
"Runas without working dir set" = {
$expected = "$env:SystemRoot\system32`r`n"
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($standard_user, $become_pass, 0, "Interactive", $null,
'powershell.exe $pwd.Path', $null, $null, "")
- $actual.StandardOut | Assert-Equals -Expected $expected
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected $expected
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"Runas with working dir set" = {
$expected = "$env:SystemRoot`r`n"
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($standard_user, $become_pass, 0, "Interactive", $null,
'powershell.exe $pwd.Path', $env:SystemRoot, $null, "")
- $actual.StandardOut | Assert-Equals -Expected $expected
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected $expected
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"Runas without environment set" = {
$expected = "Windows_NT`r`n"
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($standard_user, $become_pass, 0, "Interactive", $null,
'powershell.exe $env:TEST; $env:OS', $null, $null, "")
- $actual.StandardOut | Assert-Equals -Expected $expected
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected $expected
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"Runas with environment set" = {
@@ -541,52 +544,53 @@ $tests = @{
}
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($admin_user, $become_pass, 0, "Interactive", $null,
'cmd.exe /c set', $null, $env_vars, "")
- ("TEST=tesTing" -cin $actual.StandardOut.Split("`r`n")) | Assert-Equals -Expected $true
- ("TEST2=Testing 2" -cin $actual.StandardOut.Split("`r`n")) | Assert-Equals -Expected $true
- ("OS=Windows_NT" -cnotin $actual.StandardOut.Split("`r`n")) | Assert-Equals -Expected $true
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ ("TEST=tesTing" -cin $actual.StandardOut.Split("`r`n")) | Assert-Equal -Expected $true
+ ("TEST2=Testing 2" -cin $actual.StandardOut.Split("`r`n")) | Assert-Equal -Expected $true
+ ("OS=Windows_NT" -cnotin $actual.StandardOut.Split("`r`n")) | Assert-Equal -Expected $true
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"Runas with string stdin" = {
$expected = "input value`r`n`r`n"
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($admin_user, $become_pass, 0, "Interactive", $null,
'powershell.exe [System.Console]::In.ReadToEnd()', $null, $null, "input value")
- $actual.StandardOut | Assert-Equals -Expected $expected
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected $expected
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"Runas with string stdin and newline" = {
$expected = "input value`r`n`r`n"
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($admin_user, $become_pass, 0, "Interactive", $null,
'powershell.exe [System.Console]::In.ReadToEnd()', $null, $null, "input value`r`n")
- $actual.StandardOut | Assert-Equals -Expected $expected
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected $expected
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"Runas with byte stdin" = {
$expected = "input value`r`n"
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($admin_user, $become_pass, 0, "Interactive", $null,
'powershell.exe [System.Console]::In.ReadToEnd()', $null, $null, [System.Text.Encoding]::UTF8.GetBytes("input value"))
- $actual.StandardOut | Assert-Equals -Expected $expected
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected $expected
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"Missing executable" = {
$failed = $false
try {
[Ansible.Become.BecomeUtil]::CreateProcessAsUser("SYSTEM", $null, "fake.exe")
- } catch {
+ }
+ catch {
$failed = $true
- $_.Exception.InnerException.GetType().FullName | Assert-Equals -Expected "Ansible.Process.Win32Exception"
+ $_.Exception.InnerException.GetType().FullName | Assert-Equal -Expected "Ansible.Process.Win32Exception"
$expected = 'Exception calling "CreateProcessAsUser" with "3" argument(s): "CreateProcessWithTokenW() failed '
$expected += '(The system cannot find the file specified, Win32ErrorCode 2)"'
- $_.Exception.Message | Assert-Equals -Expected $expected
+ $_.Exception.Message | Assert-Equal -Expected $expected
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
"CreateProcessAsUser with lpApplicationName" = {
@@ -594,112 +598,114 @@ $tests = @{
$full_path = "$($env:SystemRoot)\System32\WindowsPowerShell\v1.0\powershell.exe"
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser("SYSTEM", $null, 0, "Interactive", $full_path,
"Write-Output 'abc'", $null, $null, "")
- $actual.StandardOut | Assert-Equals -Expected $expected
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected $expected
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser("SYSTEM", $null, 0, "Interactive", $full_path,
"powershell.exe Write-Output 'abc'", $null, $null, "")
- $actual.StandardOut | Assert-Equals -Expected $expected
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected $expected
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"CreateProcessAsUser with stderr" = {
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser("SYSTEM", $null, 0, "Interactive", $null,
"powershell.exe [System.Console]::Error.WriteLine('hi')", $null, $null, "")
- $actual.StandardOut | Assert-Equals -Expected ""
- $actual.StandardError | Assert-Equals -Expected "hi`r`n"
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected ""
+ $actual.StandardError | Assert-Equal -Expected "hi`r`n"
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"CreateProcessAsUser with exit code" = {
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser("SYSTEM", $null, 0, "Interactive", $null,
"powershell.exe exit 10", $null, $null, "")
- $actual.StandardOut | Assert-Equals -Expected ""
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 10
+ $actual.StandardOut | Assert-Equal -Expected ""
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 10
}
"Local account with computer name" = {
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser("$env:COMPUTERNAME\$standard_user", $become_pass,
"powershell.exe -NoProfile -ExecutionPolicy ByPass -File $tmp_script")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "Interactive"
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- $stdout.UserSid.Value | Assert-Equals -Expected $standard_user_sid
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $medium_integrity_sid
+ $stdout.LogonType | Assert-Equal -Expected "Interactive"
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ $stdout.UserSid.Value | Assert-Equal -Expected $standard_user_sid
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $medium_integrity_sid
}
"Local account with computer as period" = {
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser(".\$standard_user", $become_pass,
"powershell.exe -NoProfile -ExecutionPolicy ByPass -File $tmp_script")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "Interactive"
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- $stdout.UserSid.Value | Assert-Equals -Expected $standard_user_sid
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $medium_integrity_sid
+ $stdout.LogonType | Assert-Equal -Expected "Interactive"
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ $stdout.UserSid.Value | Assert-Equal -Expected $standard_user_sid
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $medium_integrity_sid
}
"Local account with invalid password" = {
$failed = $false
try {
[Ansible.Become.BecomeUtil]::CreateProcessAsUser($standard_user, "incorrect", "powershell.exe Write-Output abc")
- } catch {
+ }
+ catch {
$failed = $true
- $_.Exception.InnerException.GetType().FullName | Assert-Equals -Expected "Ansible.AccessToken.Win32Exception"
+ $_.Exception.InnerException.GetType().FullName | Assert-Equal -Expected "Ansible.AccessToken.Win32Exception"
# Server 2008 has a slightly different error msg, just assert we get the error 1326
- ($_.Exception.Message.Contains("Win32ErrorCode 1326")) | Assert-Equals -Expected $true
+ ($_.Exception.Message.Contains("Win32ErrorCode 1326")) | Assert-Equal -Expected $true
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
"Invalid account" = {
$failed = $false
try {
[Ansible.Become.BecomeUtil]::CreateProcessAsUser("incorrect", "incorrect", "powershell.exe Write-Output abc")
- } catch {
+ }
+ catch {
$failed = $true
- $_.Exception.InnerException.GetType().FullName | Assert-Equals -Expected "System.Security.Principal.IdentityNotMappedException"
+ $_.Exception.InnerException.GetType().FullName | Assert-Equal -Expected "System.Security.Principal.IdentityNotMappedException"
$expected = 'Exception calling "CreateProcessAsUser" with "3" argument(s): "Some or all '
$expected += 'identity references could not be translated."'
- $_.Exception.Message | Assert-Equals -Expected $expected
+ $_.Exception.Message | Assert-Equal -Expected $expected
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
"Interactive logon with standard" = {
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($standard_user, $become_pass, "WithProfile",
"Interactive", $null, "powershell.exe -NoProfile -", $tmp_dir, $null, $test_whoami + "`r`n")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "Interactive"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $medium_integrity_sid
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- $stdout.SourceName | Assert-Equals -Expected "Advapi"
- $stdout.UserSid.Value | Assert-Equals -Expected $standard_user_sid
+ $stdout.LogonType | Assert-Equal -Expected "Interactive"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $medium_integrity_sid
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ $stdout.SourceName | Assert-Equal -Expected "Advapi"
+ $stdout.UserSid.Value | Assert-Equal -Expected $standard_user_sid
}
"Batch logon with standard" = {
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($standard_user, $become_pass, "WithProfile",
"Batch", $null, "powershell.exe -NoProfile -", $tmp_dir, $null, $test_whoami + "`r`n")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "Batch"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $medium_integrity_sid
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- $stdout.SourceName | Assert-Equals -Expected "Advapi"
- $stdout.UserSid.Value | Assert-Equals -Expected $standard_user_sid
+ $stdout.LogonType | Assert-Equal -Expected "Batch"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $medium_integrity_sid
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ $stdout.SourceName | Assert-Equal -Expected "Advapi"
+ $stdout.UserSid.Value | Assert-Equal -Expected $standard_user_sid
}
"Network logon with standard" = {
@@ -709,15 +715,15 @@ $tests = @{
}
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($standard_user, $become_pass, "WithProfile",
"Network", $null, "powershell.exe -NoProfile -", $tmp_dir, $null, $test_whoami + "`r`n")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "Network"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $medium_integrity_sid
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- $stdout.SourceName | Assert-Equals -Expected "Advapi"
- $stdout.UserSid.Value | Assert-Equals -Expected $standard_user_sid
+ $stdout.LogonType | Assert-Equal -Expected "Network"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $medium_integrity_sid
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ $stdout.SourceName | Assert-Equal -Expected "Advapi"
+ $stdout.UserSid.Value | Assert-Equal -Expected $standard_user_sid
}
"Network with cleartext logon with standard" = {
@@ -727,31 +733,31 @@ $tests = @{
}
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($standard_user, $become_pass, "WithProfile",
"NetworkCleartext", $null, "powershell.exe -NoProfile -", $tmp_dir, $null, $test_whoami + "`r`n")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "NetworkCleartext"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $medium_integrity_sid
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- $stdout.SourceName | Assert-Equals -Expected "Advapi"
- $stdout.UserSid.Value | Assert-Equals -Expected $standard_user_sid
+ $stdout.LogonType | Assert-Equal -Expected "NetworkCleartext"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $medium_integrity_sid
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ $stdout.SourceName | Assert-Equal -Expected "Advapi"
+ $stdout.UserSid.Value | Assert-Equal -Expected $standard_user_sid
}
"Logon without password with standard" = {
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($standard_user, [NullString]::Value, "WithProfile",
"Interactive", $null, "powershell.exe -NoProfile -", $tmp_dir, $null, $test_whoami + "`r`n")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
# Too unstable, there might be another process still lingering which causes become to steal instead of using
# S4U. Just don't check the type and source to verify we can become without a password
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- # $stdout.LogonType | Assert-Equals -Expected "Batch"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $medium_integrity_sid
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- # $stdout.SourceName | Assert-Equals -Expected "ansible"
- $stdout.UserSid.Value | Assert-Equals -Expected $standard_user_sid
+ # $stdout.LogonType | Assert-Equal -Expected "Batch"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $medium_integrity_sid
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ # $stdout.SourceName | Assert-Equal -Expected "ansible"
+ $stdout.UserSid.Value | Assert-Equal -Expected $standard_user_sid
}
"Logon without password and network type with standard" = {
@@ -761,45 +767,45 @@ $tests = @{
}
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($standard_user, [NullString]::Value, "WithProfile",
"Network", $null, "powershell.exe -NoProfile -", $tmp_dir, $null, $test_whoami + "`r`n")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
# Too unstable, there might be another process still lingering which causes become to steal instead of using
# S4U. Just don't check the type and source to verify we can become without a password
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- # $stdout.LogonType | Assert-Equals -Expected "Network"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $medium_integrity_sid
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- # $stdout.SourceName | Assert-Equals -Expected "ansible"
- $stdout.UserSid.Value | Assert-Equals -Expected $standard_user_sid
+ # $stdout.LogonType | Assert-Equal -Expected "Network"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $medium_integrity_sid
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ # $stdout.SourceName | Assert-Equal -Expected "ansible"
+ $stdout.UserSid.Value | Assert-Equal -Expected $standard_user_sid
}
"Interactive logon with admin" = {
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($admin_user, $become_pass, "WithProfile",
"Interactive", $null, "powershell.exe -NoProfile -", $tmp_dir, $null, $test_whoami + "`r`n")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "Interactive"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $high_integrity_sid
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- $stdout.SourceName | Assert-Equals -Expected "Advapi"
- $stdout.UserSid.Value | Assert-Equals -Expected $admin_user_sid
+ $stdout.LogonType | Assert-Equal -Expected "Interactive"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $high_integrity_sid
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ $stdout.SourceName | Assert-Equal -Expected "Advapi"
+ $stdout.UserSid.Value | Assert-Equal -Expected $admin_user_sid
}
"Batch logon with admin" = {
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($admin_user, $become_pass, "WithProfile",
"Batch", $null, "powershell.exe -NoProfile -", $tmp_dir, $null, $test_whoami + "`r`n")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "Batch"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $high_integrity_sid
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- $stdout.SourceName | Assert-Equals -Expected "Advapi"
- $stdout.UserSid.Value | Assert-Equals -Expected $admin_user_sid
+ $stdout.LogonType | Assert-Equal -Expected "Batch"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $high_integrity_sid
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ $stdout.SourceName | Assert-Equal -Expected "Advapi"
+ $stdout.UserSid.Value | Assert-Equal -Expected $admin_user_sid
}
"Network logon with admin" = {
@@ -809,15 +815,15 @@ $tests = @{
}
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($admin_user, $become_pass, "WithProfile",
"Network", $null, "powershell.exe -NoProfile -", $tmp_dir, $null, $test_whoami + "`r`n")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "Network"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $high_integrity_sid
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- $stdout.SourceName | Assert-Equals -Expected "Advapi"
- $stdout.UserSid.Value | Assert-Equals -Expected $admin_user_sid
+ $stdout.LogonType | Assert-Equal -Expected "Network"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $high_integrity_sid
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ $stdout.SourceName | Assert-Equal -Expected "Advapi"
+ $stdout.UserSid.Value | Assert-Equal -Expected $admin_user_sid
}
"Network with cleartext logon with admin" = {
@@ -827,15 +833,15 @@ $tests = @{
}
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($admin_user, $become_pass, "WithProfile",
"NetworkCleartext", $null, "powershell.exe -NoProfile -", $tmp_dir, $null, $test_whoami + "`r`n")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "NetworkCleartext"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $high_integrity_sid
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- $stdout.SourceName | Assert-Equals -Expected "Advapi"
- $stdout.UserSid.Value | Assert-Equals -Expected $admin_user_sid
+ $stdout.LogonType | Assert-Equal -Expected "NetworkCleartext"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $high_integrity_sid
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ $stdout.SourceName | Assert-Equal -Expected "Advapi"
+ $stdout.UserSid.Value | Assert-Equal -Expected $admin_user_sid
}
"Fail to logon with null or empty password" = {
@@ -846,30 +852,31 @@ $tests = @{
# use [NullString]::Value instead if we want that behaviour. This just tests to see that an empty
# string won't go the S4U route.
[Ansible.Become.BecomeUtil]::CreateProcessAsUser($admin_user, $null, "WithProfile",
- "Interactive", $null, "powershell.exe -NoProfile -", $tmp_dir, $null, $test_whoami + "`r`n")
- } catch {
+ "Interactive", $null, "powershell.exe -NoProfile -", $tmp_dir, $null, $test_whoami + "`r`n")
+ }
+ catch {
$failed = $true
- $_.Exception.InnerException.GetType().FullName | Assert-Equals -Expected "Ansible.AccessToken.Win32Exception"
+ $_.Exception.InnerException.GetType().FullName | Assert-Equal -Expected "Ansible.AccessToken.Win32Exception"
# Server 2008 has a slightly different error msg, just assert we get the error 1326
- ($_.Exception.Message.Contains("Win32ErrorCode 1326")) | Assert-Equals -Expected $true
+ ($_.Exception.Message.Contains("Win32ErrorCode 1326")) | Assert-Equal -Expected $true
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
"Logon without password with admin" = {
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($admin_user, [NullString]::Value, "WithProfile",
"Interactive", $null, "powershell.exe -NoProfile -", $tmp_dir, $null, $test_whoami + "`r`n")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
# Too unstable, there might be another process still lingering which causes become to steal instead of using
# S4U. Just don't check the type and source to verify we can become without a password
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- # $stdout.LogonType | Assert-Equals -Expected "Batch"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $high_integrity_sid
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- # $stdout.SourceName | Assert-Equals -Expected "ansible"
- $stdout.UserSid.Value | Assert-Equals -Expected $admin_user_sid
+ # $stdout.LogonType | Assert-Equal -Expected "Batch"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $high_integrity_sid
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ # $stdout.SourceName | Assert-Equal -Expected "ansible"
+ $stdout.UserSid.Value | Assert-Equal -Expected $admin_user_sid
}
"Logon without password and network type with admin" = {
@@ -879,17 +886,17 @@ $tests = @{
}
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($admin_user, [NullString]::Value, "WithProfile",
"Network", $null, "powershell.exe -NoProfile -", $tmp_dir, $null, $test_whoami + "`r`n")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
# Too unstable, there might be another process still lingering which causes become to steal instead of using
# S4U. Just don't check the type and source to verify we can become without a password
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- # $stdout.LogonType | Assert-Equals -Expected "Network"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $high_integrity_sid
- $stdout.ProfileLoaded | Assert-Equals -Expected $true
- # $stdout.SourceName | Assert-Equals -Expected "ansible"
- $stdout.UserSid.Value | Assert-Equals -Expected $admin_user_sid
+ # $stdout.LogonType | Assert-Equal -Expected "Network"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $high_integrity_sid
+ $stdout.ProfileLoaded | Assert-Equal -Expected $true
+ # $stdout.SourceName | Assert-Equal -Expected "ansible"
+ $stdout.UserSid.Value | Assert-Equal -Expected $admin_user_sid
}
"Logon without profile with admin" = {
@@ -900,45 +907,45 @@ $tests = @{
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser($admin_user, $become_pass, 0,
"Interactive", $null, "powershell.exe -NoProfile -", $tmp_dir, $null, $test_whoami + "`r`n")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "Interactive"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $high_integrity_sid
- $stdout.ProfileLoaded | Assert-Equals -Expected $false
- $stdout.SourceName | Assert-Equals -Expected "Advapi"
- $stdout.UserSid.Value | Assert-Equals -Expected $admin_user_sid
+ $stdout.LogonType | Assert-Equal -Expected "Interactive"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $high_integrity_sid
+ $stdout.ProfileLoaded | Assert-Equal -Expected $false
+ $stdout.SourceName | Assert-Equal -Expected "Advapi"
+ $stdout.UserSid.Value | Assert-Equal -Expected $admin_user_sid
}
"Logon with network credentials and no profile" = {
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser("fakeuser", "fakepassword", "NetcredentialsOnly",
"NewCredentials", $null, "powershell.exe -NoProfile -", $tmp_dir, $null, $test_whoami + "`r`n")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "NewCredentials"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $current_user.MandatoryLabelSid.Value
+ $stdout.LogonType | Assert-Equal -Expected "NewCredentials"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $current_user.MandatoryLabelSid.Value
# while we didn't set WithProfile, the new process is based on the current process
- $stdout.ProfileLoaded | Assert-Equals -Expected $current_user.ProfileLoaded
- $stdout.SourceName | Assert-Equals -Expected "Advapi"
- $stdout.UserSid.Value | Assert-Equals -Expected $current_user.UserSid.Value
+ $stdout.ProfileLoaded | Assert-Equal -Expected $current_user.ProfileLoaded
+ $stdout.SourceName | Assert-Equal -Expected "Advapi"
+ $stdout.UserSid.Value | Assert-Equal -Expected $current_user.UserSid.Value
}
"Logon with network credentials and with profile" = {
$actual = [Ansible.Become.BecomeUtil]::CreateProcessAsUser("fakeuser", "fakepassword", "NetcredentialsOnly, WithProfile",
"NewCredentials", $null, "powershell.exe -NoProfile -", $tmp_dir, $null, $test_whoami + "`r`n")
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$stdout = ConvertFrom-Json -InputObject $actual.StandardOut
- $stdout.LogonType | Assert-Equals -Expected "NewCredentials"
- $stdout.MandatoryLabelSid.Value | Assert-Equals -Expected $current_user.MandatoryLabelSid.Value
- $stdout.ProfileLoaded | Assert-Equals -Expected $current_user.ProfileLoaded
- $stdout.SourceName | Assert-Equals -Expected "Advapi"
- $stdout.UserSid.Value | Assert-Equals -Expected $current_user.UserSid.Value
+ $stdout.LogonType | Assert-Equal -Expected "NewCredentials"
+ $stdout.MandatoryLabelSid.Value | Assert-Equal -Expected $current_user.MandatoryLabelSid.Value
+ $stdout.ProfileLoaded | Assert-Equal -Expected $current_user.ProfileLoaded
+ $stdout.SourceName | Assert-Equal -Expected "Advapi"
+ $stdout.UserSid.Value | Assert-Equal -Expected $current_user.UserSid.Value
}
}
@@ -965,7 +972,8 @@ try {
$user_obj = $adsi.Create("User", $user)
$user_obj.SetPassword($become_pass)
$user_obj.SetInfo()
- } else {
+ }
+ else {
$user_obj.SetPassword($become_pass)
}
$user_obj.RefreshCache()
@@ -973,13 +981,17 @@ try {
if ($user -eq $standard_user) {
$standard_user_sid = (New-Object -TypeName System.Security.Principal.SecurityIdentifier -ArgumentList @($user_obj.ObjectSid.Value, 0)).Value
$group = [System.Security.Principal.WellKnownSidType]::BuiltinUsersSid
- } else {
+ }
+ else {
$admin_user_sid = (New-Object -TypeName System.Security.Principal.SecurityIdentifier -ArgumentList @($user_obj.ObjectSid.Value, 0)).Value
$group = [System.Security.Principal.WellKnownSidType]::BuiltinAdministratorsSid
}
$group = (New-Object -TypeName System.Security.Principal.SecurityIdentifier -ArgumentList $group, $null).Value
[string[]]$current_groups = $user_obj.Groups() | ForEach-Object {
- New-Object -TypeName System.Security.Principal.SecurityIdentifier -ArgumentList @($_.GetType().InvokeMember("objectSID", "GetProperty", $null, $_, $null), 0)
+ New-Object -TypeName System.Security.Principal.SecurityIdentifier -ArgumentList @(
+ $_.GetType().InvokeMember("objectSID", "GetProperty", $null, $_, $null),
+ 0
+ )
}
if ($current_groups -notcontains $group) {
$group_obj = $adsi.Children | Where-Object {
@@ -995,7 +1007,8 @@ try {
$test = $test_impl.Key
&$test_impl.Value
}
-} finally {
+}
+finally {
Remove-Item -LiteralPath $tmp_dir -Force -Recurse
foreach ($user in $standard_user, $admin_user) {
$user_obj = $adsi.Children | Where-Object { $_.SchemaClassName -eq "User" -and $_.Name -eq $user }
diff --git a/test/integration/targets/module_utils_Ansible.ModuleUtils.AddType/library/add_type_test.ps1 b/test/integration/targets/module_utils_Ansible.ModuleUtils.AddType/library/add_type_test.ps1
index d6b05691..d18c42d7 100644
--- a/test/integration/targets/module_utils_Ansible.ModuleUtils.AddType/library/add_type_test.ps1
+++ b/test/integration/targets/module_utils_Ansible.ModuleUtils.AddType/library/add_type_test.ps1
@@ -9,10 +9,13 @@ $result = @{
changed = $false
}
-Function Assert-Equals($actual, $expected) {
+Function Assert-Equal($actual, $expected) {
if ($actual -cne $expected) {
$call_stack = (Get-PSCallStack)[1]
- $error_msg = "AssertionError:`r`nActual: `"$actual`" != Expected: `"$expected`"`r`nLine: $($call_stack.ScriptLineNumber), Method: $($call_stack.Position.Text)"
+ $error_msg = -join @(
+ "AssertionError:`r`nActual: `"$actual`" != Expected: `"$expected`"`r`nLine: "
+ "$($call_stack.ScriptLineNumber), Method: $($call_stack.Position.Text)"
+ )
Fail-Json -obj $result -message $error_msg
}
}
@@ -34,15 +37,16 @@ namespace Namespace1
}
'@
$res = Add-CSharpType -References $code
-Assert-Equals -actual $res -expected $null
+Assert-Equal -actual $res -expected $null
$actual = [Namespace1.Class1]::GetString($false)
-Assert-Equals $actual -expected "Hello World"
+Assert-Equal $actual -expected "Hello World"
try {
[Namespace1.Class1]::GetString($true)
-} catch {
- Assert-Equals ($_.Exception.ToString().Contains("at Namespace1.Class1.GetString(Boolean error)`r`n")) -expected $true
+}
+catch {
+ Assert-Equal ($_.Exception.ToString().Contains("at Namespace1.Class1.GetString(Boolean error)`r`n")) -expected $true
}
$code_debug = @'
@@ -62,17 +66,18 @@ namespace Namespace2
}
'@
$res = Add-CSharpType -References $code_debug -IncludeDebugInfo
-Assert-Equals -actual $res -expected $null
+Assert-Equal -actual $res -expected $null
$actual = [Namespace2.Class2]::GetString($false)
-Assert-Equals $actual -expected "Hello World"
+Assert-Equal $actual -expected "Hello World"
try {
[Namespace2.Class2]::GetString($true)
-} catch {
+}
+catch {
$tmp_path = [System.IO.Path]::GetFullPath($env:TMP).ToLower()
- Assert-Equals ($_.Exception.ToString().ToLower().Contains("at namespace2.class2.getstring(boolean error) in $tmp_path")) -expected $true
- Assert-Equals ($_.Exception.ToString().Contains(".cs:line 10")) -expected $true
+ Assert-Equal ($_.Exception.ToString().ToLower().Contains("at namespace2.class2.getstring(boolean error) in $tmp_path")) -expected $true
+ Assert-Equal ($_.Exception.ToString().Contains(".cs:line 10")) -expected $true
}
$code_tmp = @'
@@ -93,19 +98,21 @@ namespace Namespace3
'@
$tmp_path = $env:USERPROFILE
$res = Add-CSharpType -References $code_tmp -IncludeDebugInfo -TempPath $tmp_path -PassThru
-Assert-Equals -actual $res.GetType().Name -expected "RuntimeAssembly"
-Assert-Equals -actual $res.Location -expected ""
-Assert-Equals -actual $res.GetTypes().Length -expected 1
-Assert-Equals -actual $res.GetTypes()[0].Name -expected "Class3"
+Assert-Equal -actual $res.GetType().Name -expected "RuntimeAssembly"
+Assert-Equal -actual $res.Location -expected ""
+Assert-Equal -actual $res.GetTypes().Length -expected 1
+Assert-Equal -actual $res.GetTypes()[0].Name -expected "Class3"
$actual = [Namespace3.Class3]::GetString($false)
-Assert-Equals $actual -expected "Hello World"
+Assert-Equal $actual -expected "Hello World"
try {
[Namespace3.Class3]::GetString($true)
-} catch {
- Assert-Equals ($_.Exception.ToString().ToLower().Contains("at namespace3.class3.getstring(boolean error) in $($tmp_path.ToLower())")) -expected $true
- Assert-Equals ($_.Exception.ToString().Contains(".cs:line 10")) -expected $true
+}
+catch {
+ $actual = $_.Exception.ToString().ToLower().Contains("at namespace3.class3.getstring(boolean error) in $($tmp_path.ToLower())")
+ Assert-Equal $actual -expected $true
+ Assert-Equal ($_.Exception.ToString().Contains(".cs:line 10")) -expected $true
}
$warning_code = @'
@@ -130,15 +137,17 @@ namespace Namespace4
$failed = $false
try {
Add-CSharpType -References $warning_code
-} catch {
+}
+catch {
$failed = $true
- Assert-Equals -actual ($_.Exception.Message.Contains("error CS0219: Warning as Error: The variable 'a' is assigned but its value is never used")) -expected $true
+ $actual = $_.Exception.Message.Contains("error CS0219: Warning as Error: The variable 'a' is assigned but its value is never used")
+ Assert-Equal -actual $actual -expected $true
}
-Assert-Equals -actual $failed -expected $true
+Assert-Equal -actual $failed -expected $true
Add-CSharpType -References $warning_code -IgnoreWarnings
$actual = [Namespace4.Class4]::GetString($true)
-Assert-Equals -actual $actual -expected "Hello World"
+Assert-Equal -actual $actual -expected "Hello World"
$reference_1 = @'
using System;
@@ -181,7 +190,7 @@ namespace Namespace6
Add-CSharpType -References $reference_1, $reference_2
$actual = [Namespace6.Class6]::GetString()
-Assert-Equals -actual $actual -expected "Hello World"
+Assert-Equal -actual $actual -expected "Hello World"
$ignored_warning = @'
using System;
@@ -202,7 +211,7 @@ namespace Namespace7
'@
Add-CSharpType -References $ignored_warning
$actual = [Namespace7.Class7]::GetString()
-Assert-Equals -actual $actual -expected "abc"
+Assert-Equal -actual $actual -expected "abc"
$defined_symbol = @'
using System;
@@ -225,7 +234,7 @@ namespace Namespace8
'@
Add-CSharpType -References $defined_symbol -CompileSymbols "SYMBOL1"
$actual = [Namespace8.Class8]::GetString()
-Assert-Equals -actual $actual -expected "symbol"
+Assert-Equal -actual $actual -expected "symbol"
$type_accelerator = @'
using System;
@@ -245,7 +254,7 @@ namespace Namespace9
'@
Add-CSharpType -Reference $type_accelerator
$actual = [AnsibleType]::GetString()
-Assert-Equals -actual $actual -expected "a"
+Assert-Equal -actual $actual -expected "a"
$missing_type_class = @'
using System;
@@ -266,11 +275,12 @@ namespace Namespace10
$failed = $false
try {
Add-CSharpType -Reference $missing_type_class
-} catch {
+}
+catch {
$failed = $true
- Assert-Equals -actual $_.Exception.Message -expected "Failed to find compiled class 'MissingClass' for custom TypeAccelerator."
+ Assert-Equal -actual $_.Exception.Message -expected "Failed to find compiled class 'MissingClass' for custom TypeAccelerator."
}
-Assert-Equals -actual $failed -expected $true
+Assert-Equal -actual $failed -expected $true
$arch_class = @'
using System;
@@ -293,7 +303,7 @@ namespace Namespace11
}
'@
Add-CSharpType -Reference $arch_class
-Assert-Equals -actual ([Namespace11.Class11]::GetIntPtrSize()) -expected ([System.IntPtr]::Size)
+Assert-Equal -actual ([Namespace11.Class11]::GetIntPtrSize()) -expected ([System.IntPtr]::Size)
$lib_set = @'
using System;
@@ -316,7 +326,7 @@ try {
finally {
Remove-Item -LiteralPath env:\LIB
}
-Assert-Equals -actual ([Namespace12.Class12]::GetString()) -expected "b"
+Assert-Equal -actual ([Namespace12.Class12]::GetString()) -expected "b"
$result.res = "success"
Exit-Json -obj $result
diff --git a/test/integration/targets/module_utils_Ansible.ModuleUtils.Backup/library/backup_file_test.ps1 b/test/integration/targets/module_utils_Ansible.ModuleUtils.Backup/library/backup_file_test.ps1
index 15527560..39beab78 100644
--- a/test/integration/targets/module_utils_Ansible.ModuleUtils.Backup/library/backup_file_test.ps1
+++ b/test/integration/targets/module_utils_Ansible.ModuleUtils.Backup/library/backup_file_test.ps1
@@ -8,37 +8,40 @@
$module = [Ansible.Basic.AnsibleModule]::Create($args, @{})
-Function Assert-Equals {
+Function Assert-Equal {
param(
- [Parameter(Mandatory=$true, ValueFromPipeline=$true)][AllowNull()]$Actual,
- [Parameter(Mandatory=$true, Position=0)][AllowNull()]$Expected
+ [Parameter(Mandatory = $true, ValueFromPipeline = $true)][AllowNull()]$Actual,
+ [Parameter(Mandatory = $true, Position = 0)][AllowNull()]$Expected
)
- $matched = $false
- if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array]) {
- $Actual.Count | Assert-Equals -Expected $Expected.Count
- for ($i = 0; $i -lt $Actual.Count; $i++) {
- $actual_value = $Actual[$i]
- $expected_value = $Expected[$i]
- Assert-Equals -Actual $actual_value -Expected $expected_value
+ process {
+ $matched = $false
+ if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array]) {
+ $Actual.Count | Assert-Equal -Expected $Expected.Count
+ for ($i = 0; $i -lt $Actual.Count; $i++) {
+ $actual_value = $Actual[$i]
+ $expected_value = $Expected[$i]
+ Assert-Equal -Actual $actual_value -Expected $expected_value
+ }
+ $matched = $true
}
- $matched = $true
- } else {
- $matched = $Actual -ceq $Expected
- }
-
- if (-not $matched) {
- if ($Actual -is [PSObject]) {
- $Actual = $Actual.ToString()
+ else {
+ $matched = $Actual -ceq $Expected
}
- $call_stack = (Get-PSCallStack)[1]
- $module.Result.test = $test
- $module.Result.actual = $Actual
- $module.Result.expected = $Expected
- $module.Result.line = $call_stack.ScriptLineNumber
- $module.Result.method = $call_stack.Position.Text
- $module.FailJson("AssertionError: actual != expected")
+ if (-not $matched) {
+ if ($Actual -is [PSObject]) {
+ $Actual = $Actual.ToString()
+ }
+
+ $call_stack = (Get-PSCallStack)[1]
+ $module.Result.test = $test
+ $module.Result.actual = $Actual
+ $module.Result.expected = $Expected
+ $module.Result.line = $call_stack.ScriptLineNumber
+ $module.Result.method = $call_stack.Position.Text
+ $module.FailJson("AssertionError: actual != expected")
+ }
}
}
@@ -47,7 +50,7 @@ $tmp_dir = $module.Tmpdir
$tests = @{
"Test backup file with missing file" = {
$actual = Backup-File -path (Join-Path -Path $tmp_dir -ChildPath "missing")
- $actual | Assert-Equals -Expected $null
+ $actual | Assert-Equal -Expected $null
}
"Test backup file in check mode" = {
@@ -55,12 +58,12 @@ $tests = @{
Set-Content -LiteralPath $orig_file -Value "abc"
$actual = Backup-File -path $orig_file -WhatIf
- (Test-Path -LiteralPath $actual) | Assert-Equals -Expected $false
+ (Test-Path -LiteralPath $actual) | Assert-Equal -Expected $false
$parent_dir = Split-Path -LiteralPath $actual
$backup_file = Split-Path -Path $actual -Leaf
- $parent_dir | Assert-Equals -Expected $tmp_dir
- ($backup_file -match "^file-check\.txt\.$pid\.\d{8}-\d{6}\.bak$") | Assert-Equals -Expected $true
+ $parent_dir | Assert-Equal -Expected $tmp_dir
+ ($backup_file -match "^file-check\.txt\.$pid\.\d{8}-\d{6}\.bak$") | Assert-Equal -Expected $true
}
"Test backup file" = {
@@ -69,13 +72,13 @@ $tests = @{
Set-Content -LiteralPath $orig_file -Value $content
$actual = Backup-File -path $orig_file
- (Test-Path -LiteralPath $actual) | Assert-Equals -Expected $true
+ (Test-Path -LiteralPath $actual) | Assert-Equal -Expected $true
$parent_dir = Split-Path -LiteralPath $actual
$backup_file = Split-Path -Path $actual -Leaf
- $parent_dir | Assert-Equals -Expected $tmp_dir
- ($backup_file -match "^file\.txt\.$pid\.\d{8}-\d{6}\.bak$") | Assert-Equals -Expected $true
- (Get-Content -LiteralPath $actual -Raw) | Assert-Equals -Expected "$content`r`n"
+ $parent_dir | Assert-Equal -Expected $tmp_dir
+ ($backup_file -match "^file\.txt\.$pid\.\d{8}-\d{6}\.bak$") | Assert-Equal -Expected $true
+ (Get-Content -LiteralPath $actual -Raw) | Assert-Equal -Expected "$content`r`n"
}
}
diff --git a/test/integration/targets/module_utils_Ansible.ModuleUtils.CamelConversion/library/camel_conversion_test.ps1 b/test/integration/targets/module_utils_Ansible.ModuleUtils.CamelConversion/library/camel_conversion_test.ps1
index d3dc9d7b..bcb9558e 100644
--- a/test/integration/targets/module_utils_Ansible.ModuleUtils.CamelConversion/library/camel_conversion_test.ps1
+++ b/test/integration/targets/module_utils_Ansible.ModuleUtils.CamelConversion/library/camel_conversion_test.ps1
@@ -5,7 +5,7 @@
$ErrorActionPreference = 'Stop'
-Function Assert-Equals($actual, $expected) {
+Function Assert-Equal($actual, $expected) {
if ($actual -cne $expected) {
Fail-Json @{} "actual != expected`nActual: $actual`nExpected: $expected"
}
@@ -41,33 +41,40 @@ foreach ($entry in $output_dict.GetEnumerator()) {
$value = $entry.Value
if ($value -is [Hashtable]) {
- Assert-Equals -actual $key -expected "inner_hash_table"
+ Assert-Equal -actual $key -expected "inner_hash_table"
foreach ($inner_hash in $value.GetEnumerator()) {
- Assert-Equals -actual $inner_hash.Name -expected $inner_hash.Value
+ Assert-Equal -actual $inner_hash.Name -expected $inner_hash.Value
}
- } elseif ($value -is [Array] -or $value -is [System.Collections.ArrayList]) {
+ }
+ elseif ($value -is [Array] -or $value -is [System.Collections.ArrayList]) {
if ($key -eq "list_dict") {
foreach ($inner_list in $value) {
if ($inner_list -is [Hashtable]) {
foreach ($inner_list_hash in $inner_list.GetEnumerator()) {
- Assert-Equals -actual $inner_list_hash.Name -expected $inner_list_hash.Value
+ Assert-Equal -actual $inner_list_hash.Name -expected $inner_list_hash.Value
}
- } elseif ($inner_list -is [String]) {
+ }
+ elseif ($inner_list -is [String]) {
# this is not a string key so we need to keep it the same
- Assert-Equals -actual $inner_list -expected "stringTwo"
- } else {
- Assert-Equals -actual $inner_list -expected 0
+ Assert-Equal -actual $inner_list -expected "stringTwo"
+ }
+ else {
+ Assert-Equal -actual $inner_list -expected 0
}
}
- } elseif ($key -eq "empty_list") {
- Assert-Equals -actual $value.Count -expected 0
- } elseif ($key -eq "single_list") {
- Assert-Equals -actual $value.Count -expected 1
- } else {
+ }
+ elseif ($key -eq "empty_list") {
+ Assert-Equal -actual $value.Count -expected 0
+ }
+ elseif ($key -eq "single_list") {
+ Assert-Equal -actual $value.Count -expected 1
+ }
+ else {
Fail-Json -obj $result -message "invalid key found for list $key"
}
- } else {
- Assert-Equals -actual $key -expected $value
+ }
+ else {
+ Assert-Equal -actual $key -expected $value
}
}
diff --git a/test/integration/targets/module_utils_Ansible.ModuleUtils.CommandUtil/library/command_util_test.ps1 b/test/integration/targets/module_utils_Ansible.ModuleUtils.CommandUtil/library/command_util_test.ps1
index 6e644fe2..ebffae7f 100644
--- a/test/integration/targets/module_utils_Ansible.ModuleUtils.CommandUtil/library/command_util_test.ps1
+++ b/test/integration/targets/module_utils_Ansible.ModuleUtils.CommandUtil/library/command_util_test.ps1
@@ -16,7 +16,7 @@ $exe_directory = Split-Path -Path $exe -Parent
$exe_filename = Split-Path -Path $exe -Leaf
$test_name = $null
-Function Assert-Equals($actual, $expected) {
+Function Assert-Equal($actual, $expected) {
if ($actual -cne $expected) {
Fail-Json -obj $result -message "Test $test_name failed`nActual: '$actual' != Expected: '$expected'"
}
@@ -24,10 +24,10 @@ Function Assert-Equals($actual, $expected) {
$test_name = "full exe path"
$actual = Run-Command -command "`"$exe`" arg1 arg2 `"arg 3`""
-Assert-Equals -actual $actual.rc -expected 0
-Assert-Equals -actual $actual.stdout -expected "arg1`r`narg2`r`narg 3`r`n"
-Assert-Equals -actual $actual.stderr -expected ""
-Assert-Equals -actual $actual.executable -expected $exe
+Assert-Equal -actual $actual.rc -expected 0
+Assert-Equal -actual $actual.stdout -expected "arg1`r`narg2`r`narg 3`r`n"
+Assert-Equal -actual $actual.stderr -expected ""
+Assert-Equal -actual $actual.executable -expected $exe
$test_name = "exe in special char dir"
$tmp_dir = Join-Path -Path $env:TEMP -ChildPath "ansible .ÅÑŚÌβŁÈ [$!@^&test(;)]"
@@ -36,66 +36,70 @@ try {
$exe_special = Join-Path $tmp_dir -ChildPath "PrintArgv.exe"
Copy-Item -LiteralPath $exe -Destination $exe_special
$actual = Run-Command -command "`"$exe_special`" arg1 arg2 `"arg 3`""
-} finally {
+}
+finally {
Remove-Item -LiteralPath $tmp_dir -Force -Recurse
}
-Assert-Equals -actual $actual.rc -expected 0
-Assert-Equals -actual $actual.stdout -expected "arg1`r`narg2`r`narg 3`r`n"
-Assert-Equals -actual $actual.stderr -expected ""
-Assert-Equals -actual $actual.executable -expected $exe_special
+Assert-Equal -actual $actual.rc -expected 0
+Assert-Equal -actual $actual.stdout -expected "arg1`r`narg2`r`narg 3`r`n"
+Assert-Equal -actual $actual.stderr -expected ""
+Assert-Equal -actual $actual.executable -expected $exe_special
$test_name = "invalid exe path"
try {
$actual = Run-Command -command "C:\fakepath\$exe_filename arg1"
Fail-Json -obj $result -message "Test $test_name failed`nCommand should have thrown an exception"
-} catch {
- Assert-Equals -actual $_.Exception.Message -expected "Exception calling `"SearchPath`" with `"1`" argument(s): `"Could not find file 'C:\fakepath\$exe_filename'.`""
+}
+catch {
+ $expected = "Exception calling `"SearchPath`" with `"1`" argument(s): `"Could not find file 'C:\fakepath\$exe_filename'.`""
+ Assert-Equal -actual $_.Exception.Message -expected $expected
}
$test_name = "exe in current folder"
$actual = Run-Command -command "$exe_filename arg1" -working_directory $exe_directory
-Assert-Equals -actual $actual.rc -expected 0
-Assert-Equals -actual $actual.stdout -expected "arg1`r`n"
-Assert-Equals -actual $actual.stderr -expected ""
-Assert-Equals -actual $actual.executable -expected $exe
+Assert-Equal -actual $actual.rc -expected 0
+Assert-Equal -actual $actual.stdout -expected "arg1`r`n"
+Assert-Equal -actual $actual.stderr -expected ""
+Assert-Equal -actual $actual.executable -expected $exe
$test_name = "no working directory set"
$actual = Run-Command -command "cmd.exe /c cd"
-Assert-Equals -actual $actual.rc -expected 0
-Assert-Equals -actual $actual.stdout -expected "$($pwd.Path)`r`n"
-Assert-Equals -actual $actual.stderr -expected ""
-Assert-Equals -actual $actual.executable.ToUpper() -expected "$env:SystemRoot\System32\cmd.exe".ToUpper()
+Assert-Equal -actual $actual.rc -expected 0
+Assert-Equal -actual $actual.stdout -expected "$($pwd.Path)`r`n"
+Assert-Equal -actual $actual.stderr -expected ""
+Assert-Equal -actual $actual.executable.ToUpper() -expected "$env:SystemRoot\System32\cmd.exe".ToUpper()
$test_name = "working directory override"
$actual = Run-Command -command "cmd.exe /c cd" -working_directory $env:SystemRoot
-Assert-Equals -actual $actual.rc -expected 0
-Assert-Equals -actual $actual.stdout -expected "$env:SystemRoot`r`n"
-Assert-Equals -actual $actual.stderr -expected ""
-Assert-Equals -actual $actual.executable.ToUpper() -expected "$env:SystemRoot\System32\cmd.exe".ToUpper()
+Assert-Equal -actual $actual.rc -expected 0
+Assert-Equal -actual $actual.stdout -expected "$env:SystemRoot`r`n"
+Assert-Equal -actual $actual.stderr -expected ""
+Assert-Equal -actual $actual.executable.ToUpper() -expected "$env:SystemRoot\System32\cmd.exe".ToUpper()
$test_name = "working directory invalid path"
try {
$actual = Run-Command -command "doesn't matter" -working_directory "invalid path here"
Fail-Json -obj $result -message "Test $test_name failed`nCommand should have thrown an exception"
-} catch {
- Assert-Equals -actual $_.Exception.Message -expected "invalid working directory path 'invalid path here'"
+}
+catch {
+ Assert-Equal -actual $_.Exception.Message -expected "invalid working directory path 'invalid path here'"
}
$test_name = "invalid arguments"
$actual = Run-Command -command "ipconfig.exe /asdf"
-Assert-Equals -actual $actual.rc -expected 1
+Assert-Equal -actual $actual.rc -expected 1
$test_name = "test stdout and stderr streams"
$actual = Run-Command -command "cmd.exe /c echo stdout && echo stderr 1>&2"
-Assert-Equals -actual $actual.rc -expected 0
-Assert-Equals -actual $actual.stdout -expected "stdout `r`n"
-Assert-Equals -actual $actual.stderr -expected "stderr `r`n"
+Assert-Equal -actual $actual.rc -expected 0
+Assert-Equal -actual $actual.stdout -expected "stdout `r`n"
+Assert-Equal -actual $actual.stderr -expected "stderr `r`n"
$test_name = "Test UTF8 output from stdout stream"
$actual = Run-Command -command "powershell.exe -ExecutionPolicy ByPass -Command `"Write-Host '💩'`""
-Assert-Equals -actual $actual.rc -expected 0
-Assert-Equals -actual $actual.stdout -expected "💩`n"
-Assert-Equals -actual $actual.stderr -expected ""
+Assert-Equal -actual $actual.rc -expected 0
+Assert-Equal -actual $actual.stdout -expected "💩`n"
+Assert-Equal -actual $actual.stderr -expected ""
$test_name = "test default environment variable"
Set-Item -LiteralPath env:TESTENV -Value "test"
@@ -129,7 +133,7 @@ begin {
"@
$encoded_wrapper = [System.Convert]::ToBase64String([System.Text.Encoding]::Unicode.GetBytes($wrapper))
$actual = Run-Command -command "powershell.exe -ExecutionPolicy ByPass -EncodedCommand $encoded_wrapper" -stdin "Ansible"
-Assert-Equals -actual $actual.stdout -expected "Ansible`n"
+Assert-Equal -actual $actual.stdout -expected "Ansible`n"
$result.data = "success"
Exit-Json -obj $result
diff --git a/test/integration/targets/module_utils_Ansible.ModuleUtils.FileUtil/library/file_util_test.ps1 b/test/integration/targets/module_utils_Ansible.ModuleUtils.FileUtil/library/file_util_test.ps1
index ae3e68ec..c38f4e61 100644
--- a/test/integration/targets/module_utils_Ansible.ModuleUtils.FileUtil/library/file_util_test.ps1
+++ b/test/integration/targets/module_utils_Ansible.ModuleUtils.FileUtil/library/file_util_test.ps1
@@ -9,10 +9,13 @@ $result = @{
changed = $false
}
-Function Assert-Equals($actual, $expected) {
+Function Assert-Equal($actual, $expected) {
if ($actual -cne $expected) {
$call_stack = (Get-PSCallStack)[1]
- $error_msg = "AssertionError:`r`nActual: `"$actual`" != Expected: `"$expected`"`r`nLine: $($call_stack.ScriptLineNumber), Method: $($call_stack.Position.Text)"
+ $error_msg = -join @(
+ "AssertionError:`r`nActual: `"$actual`" != Expected: `"$expected`"`r`nLine: "
+ "$($call_stack.ScriptLineNumber), Method: $($call_stack.Position.Text)"
+ )
Fail-Json -obj $result -message $error_msg
}
}
@@ -22,7 +25,8 @@ Function Get-PagefilePath() {
$cs = Get-CimInstance -ClassName Win32_ComputerSystem
if ($cs.AutomaticManagedPagefile) {
$pagefile = "$($env:SystemRoot.Substring(0, 1)):\pagefile.sys"
- } else {
+ }
+ else {
$pf = Get-CimInstance -ClassName Win32_PageFileSetting
if ($null -ne $pf) {
$pagefile = $pf[0].Name
@@ -35,74 +39,76 @@ $pagefile = Get-PagefilePath
if ($pagefile) {
# Test-AnsiblePath Hidden system file
$actual = Test-AnsiblePath -Path $pagefile
- Assert-Equals -actual $actual -expected $true
+ Assert-Equal -actual $actual -expected $true
# Get-AnsibleItem file
$actual = Get-AnsibleItem -Path $pagefile
- Assert-Equals -actual $actual.FullName -expected $pagefile
- Assert-Equals -actual $actual.Attributes.HasFlag([System.IO.FileAttributes]::Directory) -expected $false
- Assert-Equals -actual $actual.Exists -expected $true
+ Assert-Equal -actual $actual.FullName -expected $pagefile
+ Assert-Equal -actual $actual.Attributes.HasFlag([System.IO.FileAttributes]::Directory) -expected $false
+ Assert-Equal -actual $actual.Exists -expected $true
}
# Test-AnsiblePath File that doesn't exist
$actual = Test-AnsiblePath -Path C:\fakefile
-Assert-Equals -actual $actual -expected $false
+Assert-Equal -actual $actual -expected $false
# Test-AnsiblePath Directory that doesn't exist
$actual = Test-AnsiblePath -Path C:\fakedirectory
-Assert-Equals -actual $actual -expected $false
+Assert-Equal -actual $actual -expected $false
# Test-AnsiblePath file in non-existant directory
$actual = Test-AnsiblePath -Path C:\fakedirectory\fakefile.txt
-Assert-Equals -actual $actual -expected $false
+Assert-Equal -actual $actual -expected $false
# Test-AnsiblePath Normal directory
$actual = Test-AnsiblePath -Path C:\Windows
-Assert-Equals -actual $actual -expected $true
+Assert-Equal -actual $actual -expected $true
# Test-AnsiblePath Normal file
$actual = Test-AnsiblePath -Path C:\Windows\System32\kernel32.dll
-Assert-Equals -actual $actual -expected $true
+Assert-Equal -actual $actual -expected $true
# Test-AnsiblePath fails with wildcard
$failed = $false
try {
Test-AnsiblePath -Path C:\Windows\*.exe
-} catch {
+}
+catch {
$failed = $true
- Assert-Equals -actual $_.Exception.Message -expected "Exception calling `"GetAttributes`" with `"1`" argument(s): `"Illegal characters in path.`""
+ Assert-Equal -actual $_.Exception.Message -expected "Exception calling `"GetAttributes`" with `"1`" argument(s): `"Illegal characters in path.`""
}
-Assert-Equals -actual $failed -expected $true
+Assert-Equal -actual $failed -expected $true
# Test-AnsiblePath on non file PS Provider object
$actual = Test-AnsiblePath -Path Cert:\LocalMachine\My
-Assert-Equals -actual $actual -expected $true
+Assert-Equal -actual $actual -expected $true
# Test-AnsiblePath on environment variable
$actual = Test-AnsiblePath -Path env:SystemDrive
-Assert-Equals -actual $actual -expected $true
+Assert-Equal -actual $actual -expected $true
# Test-AnsiblePath on environment variable that does not exist
$actual = Test-AnsiblePath -Path env:FakeEnvValue
-Assert-Equals -actual $actual -expected $false
+Assert-Equal -actual $actual -expected $false
# Get-AnsibleItem doesn't exist with -ErrorAction SilentlyContinue param
$actual = Get-AnsibleItem -Path C:\fakefile -ErrorAction SilentlyContinue
-Assert-Equals -actual $actual -expected $null
+Assert-Equal -actual $actual -expected $null
# Get-AnsibleItem directory
$actual = Get-AnsibleItem -Path C:\Windows
-Assert-Equals -actual $actual.FullName -expected C:\Windows
-Assert-Equals -actual $actual.Attributes.HasFlag([System.IO.FileAttributes]::Directory) -expected $true
-Assert-Equals -actual $actual.Exists -expected $true
+Assert-Equal -actual $actual.FullName -expected C:\Windows
+Assert-Equal -actual $actual.Attributes.HasFlag([System.IO.FileAttributes]::Directory) -expected $true
+Assert-Equal -actual $actual.Exists -expected $true
# ensure Get-AnsibleItem doesn't fail in a try/catch and -ErrorAction SilentlyContinue - stop's a trap from trapping it
try {
$actual = Get-AnsibleItem -Path C:\fakepath -ErrorAction SilentlyContinue
-} catch {
+}
+catch {
Fail-Json -obj $result -message "this should not fire"
}
-Assert-Equals -actual $actual -expected $null
+Assert-Equal -actual $actual -expected $null
$result.data = "success"
Exit-Json -obj $result
diff --git a/test/integration/targets/module_utils_Ansible.ModuleUtils.Legacy/library/testpath.ps1 b/test/integration/targets/module_utils_Ansible.ModuleUtils.Legacy/library/testpath.ps1
index 55cad70f..7a6ba0be 100644
--- a/test/integration/targets/module_utils_Ansible.ModuleUtils.Legacy/library/testpath.ps1
+++ b/test/integration/targets/module_utils_Ansible.ModuleUtils.Legacy/library/testpath.ps1
@@ -6,4 +6,4 @@ $params = Parse-Args $args
$path = Get-AnsibleParam -Obj $params -Name path -Type path
-Exit-Json @{ path=$path }
+Exit-Json @{ path = $path }
diff --git a/test/integration/targets/module_utils_Ansible.ModuleUtils.LinkUtil/library/symbolic_link_test.ps1 b/test/integration/targets/module_utils_Ansible.ModuleUtils.LinkUtil/library/symbolic_link_test.ps1
index 1decfe4f..de0bb8bb 100644
--- a/test/integration/targets/module_utils_Ansible.ModuleUtils.LinkUtil/library/symbolic_link_test.ps1
+++ b/test/integration/targets/module_utils_Ansible.ModuleUtils.LinkUtil/library/symbolic_link_test.ps1
@@ -25,7 +25,7 @@ New-Item -Path $folder_target -ItemType Directory | Out-Null
New-Item -Path $file_target -ItemType File | Out-Null
Set-Content -LiteralPath $file_target -Value "a"
-Function Assert-Equals($actual, $expected) {
+Function Assert-Equal($actual, $expected) {
if ($actual -ne $expected) {
Fail-Json @{} "actual != expected`nActual: $actual`nExpected: $expected"
}
@@ -48,105 +48,109 @@ Assert-True -expression ($null -eq $no_link_result) -message "did not return nul
try {
New-Link -link_path "$path\folder-hard" -link_target $folder_target -link_type "hard"
Assert-True -expression $false -message "creation of hard link should have failed if target was a directory"
-} catch {
- Assert-Equals -actual $_.Exception.Message -expected "cannot set the target for a hard link to a directory"
+}
+catch {
+ Assert-Equal -actual $_.Exception.Message -expected "cannot set the target for a hard link to a directory"
}
# fail to create a junction point pointed to a file
try {
New-Link -link_path "$path\junction-fail" -link_target $file_target -link_type "junction"
Assert-True -expression $false -message "creation of junction point should have failed if target was a file"
-} catch {
- Assert-Equals -actual $_.Exception.Message -expected "cannot set the target for a junction point to a file"
+}
+catch {
+ Assert-Equal -actual $_.Exception.Message -expected "cannot set the target for a junction point to a file"
}
# fail to create a symbolic link with non-existent target
try {
New-Link -link_path "$path\symlink-fail" -link_target "$path\fake-folder" -link_type "link"
Assert-True -expression $false -message "creation of symbolic link should have failed if target did not exist"
-} catch {
- Assert-Equals -actual $_.Exception.Message -expected "link_target '$path\fake-folder' does not exist, cannot create link"
+}
+catch {
+ Assert-Equal -actual $_.Exception.Message -expected "link_target '$path\fake-folder' does not exist, cannot create link"
}
# create recursive symlink
Run-Command -command "cmd.exe /c mklink /D symlink-rel folder" -working_directory $path | Out-Null
$rel_link_result = Get-Link -link_path "$path\symlink-rel"
-Assert-Equals -actual $rel_link_result.Type -expected "SymbolicLink"
-Assert-Equals -actual $rel_link_result.SubstituteName -expected "folder"
-Assert-Equals -actual $rel_link_result.PrintName -expected "folder"
-Assert-Equals -actual $rel_link_result.TargetPath -expected "folder"
-Assert-Equals -actual $rel_link_result.AbsolutePath -expected $folder_target
-Assert-Equals -actual $rel_link_result.HardTargets -expected $null
+Assert-Equal -actual $rel_link_result.Type -expected "SymbolicLink"
+Assert-Equal -actual $rel_link_result.SubstituteName -expected "folder"
+Assert-Equal -actual $rel_link_result.PrintName -expected "folder"
+Assert-Equal -actual $rel_link_result.TargetPath -expected "folder"
+Assert-Equal -actual $rel_link_result.AbsolutePath -expected $folder_target
+Assert-Equal -actual $rel_link_result.HardTargets -expected $null
# create a symbolic file test
New-Link -link_path $symlink_file_path -link_target $file_target -link_type "link"
$file_link_result = Get-Link -link_path $symlink_file_path
-Assert-Equals -actual $file_link_result.Type -expected "SymbolicLink"
-Assert-Equals -actual $file_link_result.SubstituteName -expected "\??\$file_target"
-Assert-Equals -actual $file_link_result.PrintName -expected $file_target
-Assert-Equals -actual $file_link_result.TargetPath -expected $file_target
-Assert-Equals -actual $file_link_result.AbsolutePath -expected $file_target
-Assert-Equals -actual $file_link_result.HardTargets -expected $null
+Assert-Equal -actual $file_link_result.Type -expected "SymbolicLink"
+Assert-Equal -actual $file_link_result.SubstituteName -expected "\??\$file_target"
+Assert-Equal -actual $file_link_result.PrintName -expected $file_target
+Assert-Equal -actual $file_link_result.TargetPath -expected $file_target
+Assert-Equal -actual $file_link_result.AbsolutePath -expected $file_target
+Assert-Equal -actual $file_link_result.HardTargets -expected $null
# create a symbolic link folder test
New-Link -link_path $symlink_folder_path -link_target $folder_target -link_type "link"
$folder_link_result = Get-Link -link_path $symlink_folder_path
-Assert-Equals -actual $folder_link_result.Type -expected "SymbolicLink"
-Assert-Equals -actual $folder_link_result.SubstituteName -expected "\??\$folder_target"
-Assert-Equals -actual $folder_link_result.PrintName -expected $folder_target
-Assert-Equals -actual $folder_link_result.TargetPath -expected $folder_target
-Assert-Equals -actual $folder_link_result.AbsolutePath -expected $folder_target
-Assert-Equals -actual $folder_link_result.HardTargets -expected $null
+Assert-Equal -actual $folder_link_result.Type -expected "SymbolicLink"
+Assert-Equal -actual $folder_link_result.SubstituteName -expected "\??\$folder_target"
+Assert-Equal -actual $folder_link_result.PrintName -expected $folder_target
+Assert-Equal -actual $folder_link_result.TargetPath -expected $folder_target
+Assert-Equal -actual $folder_link_result.AbsolutePath -expected $folder_target
+Assert-Equal -actual $folder_link_result.HardTargets -expected $null
# create a junction point test
New-Link -link_path $junction_point_path -link_target $folder_target -link_type "junction"
$junction_point_result = Get-Link -link_path $junction_point_path
-Assert-Equals -actual $junction_point_result.Type -expected "JunctionPoint"
-Assert-Equals -actual $junction_point_result.SubstituteName -expected "\??\$folder_target"
-Assert-Equals -actual $junction_point_result.PrintName -expected $folder_target
-Assert-Equals -actual $junction_point_result.TargetPath -expected $folder_target
-Assert-Equals -actual $junction_point_result.AbsolutePath -expected $folder_target
-Assert-Equals -actual $junction_point_result.HardTargets -expected $null
+Assert-Equal -actual $junction_point_result.Type -expected "JunctionPoint"
+Assert-Equal -actual $junction_point_result.SubstituteName -expected "\??\$folder_target"
+Assert-Equal -actual $junction_point_result.PrintName -expected $folder_target
+Assert-Equal -actual $junction_point_result.TargetPath -expected $folder_target
+Assert-Equal -actual $junction_point_result.AbsolutePath -expected $folder_target
+Assert-Equal -actual $junction_point_result.HardTargets -expected $null
# create a hard link test
New-Link -link_path $hardlink_path -link_target $file_target -link_type "hard"
$hardlink_result = Get-Link -link_path $hardlink_path
-Assert-Equals -actual $hardlink_result.Type -expected "HardLink"
-Assert-Equals -actual $hardlink_result.SubstituteName -expected $null
-Assert-Equals -actual $hardlink_result.PrintName -expected $null
-Assert-Equals -actual $hardlink_result.TargetPath -expected $null
-Assert-Equals -actual $hardlink_result.AbsolutePath -expected $null
+Assert-Equal -actual $hardlink_result.Type -expected "HardLink"
+Assert-Equal -actual $hardlink_result.SubstituteName -expected $null
+Assert-Equal -actual $hardlink_result.PrintName -expected $null
+Assert-Equal -actual $hardlink_result.TargetPath -expected $null
+Assert-Equal -actual $hardlink_result.AbsolutePath -expected $null
if ($hardlink_result.HardTargets[0] -ne $hardlink_path -and $hardlink_result.HardTargets[1] -ne $hardlink_path) {
Assert-True -expression $false -message "file $hardlink_path is not a target of the hard link"
}
if ($hardlink_result.HardTargets[0] -ne $file_target -and $hardlink_result.HardTargets[1] -ne $file_target) {
Assert-True -expression $false -message "file $file_target is not a target of the hard link"
}
-Assert-equals -actual (Get-Content -LiteralPath $hardlink_path -Raw) -expected (Get-Content -LiteralPath $file_target -Raw)
+Assert-Equal -actual (Get-Content -LiteralPath $hardlink_path -Raw) -expected (Get-Content -LiteralPath $file_target -Raw)
# create a new hard link and verify targets go to 3
New-Link -link_path $hardlink_path_2 -link_target $file_target -link_type "hard"
$hardlink_result_2 = Get-Link -link_path $hardlink_path
-Assert-True -expression ($hardlink_result_2.HardTargets.Count -eq 3) -message "did not return 3 targets for the hard link, actual $($hardlink_result_2.Targets.Count)"
+$expected = "did not return 3 targets for the hard link, actual $($hardlink_result_2.Targets.Count)"
+Assert-True -expression ($hardlink_result_2.HardTargets.Count -eq 3) -message $expected
# check if broken symbolic link still works
Remove-Item -LiteralPath $folder_target -Force | Out-Null
$broken_link_result = Get-Link -link_path $symlink_folder_path
-Assert-Equals -actual $broken_link_result.Type -expected "SymbolicLink"
-Assert-Equals -actual $broken_link_result.SubstituteName -expected "\??\$folder_target"
-Assert-Equals -actual $broken_link_result.PrintName -expected $folder_target
-Assert-Equals -actual $broken_link_result.TargetPath -expected $folder_target
-Assert-Equals -actual $broken_link_result.AbsolutePath -expected $folder_target
-Assert-Equals -actual $broken_link_result.HardTargets -expected $null
+Assert-Equal -actual $broken_link_result.Type -expected "SymbolicLink"
+Assert-Equal -actual $broken_link_result.SubstituteName -expected "\??\$folder_target"
+Assert-Equal -actual $broken_link_result.PrintName -expected $folder_target
+Assert-Equal -actual $broken_link_result.TargetPath -expected $folder_target
+Assert-Equal -actual $broken_link_result.AbsolutePath -expected $folder_target
+Assert-Equal -actual $broken_link_result.HardTargets -expected $null
# check if broken junction point still works
$broken_junction_result = Get-Link -link_path $junction_point_path
-Assert-Equals -actual $broken_junction_result.Type -expected "JunctionPoint"
-Assert-Equals -actual $broken_junction_result.SubstituteName -expected "\??\$folder_target"
-Assert-Equals -actual $broken_junction_result.PrintName -expected $folder_target
-Assert-Equals -actual $broken_junction_result.TargetPath -expected $folder_target
-Assert-Equals -actual $broken_junction_result.AbsolutePath -expected $folder_target
-Assert-Equals -actual $broken_junction_result.HardTargets -expected $null
+Assert-Equal -actual $broken_junction_result.Type -expected "JunctionPoint"
+Assert-Equal -actual $broken_junction_result.SubstituteName -expected "\??\$folder_target"
+Assert-Equal -actual $broken_junction_result.PrintName -expected $folder_target
+Assert-Equal -actual $broken_junction_result.TargetPath -expected $folder_target
+Assert-Equal -actual $broken_junction_result.AbsolutePath -expected $folder_target
+Assert-Equal -actual $broken_junction_result.HardTargets -expected $null
# delete file symbolic link
Remove-Link -link_path $symlink_file_path
diff --git a/test/integration/targets/module_utils_Ansible.ModuleUtils.PrivilegeUtil/library/privilege_util_test.ps1 b/test/integration/targets/module_utils_Ansible.ModuleUtils.PrivilegeUtil/library/privilege_util_test.ps1
index e1ca25da..414b80a9 100644
--- a/test/integration/targets/module_utils_Ansible.ModuleUtils.PrivilegeUtil/library/privilege_util_test.ps1
+++ b/test/integration/targets/module_utils_Ansible.ModuleUtils.PrivilegeUtil/library/privilege_util_test.ps1
@@ -5,7 +5,7 @@
$module = [Ansible.Basic.AnsibleModule]::Create($args, @{})
-Function Assert-Equals($actual, $expected) {
+Function Assert-Equal($actual, $expected) {
if ($actual -cne $expected) {
$call_stack = (Get-PSCallStack)[1]
$module.Result.actual = $actual
@@ -70,20 +70,21 @@ foreach ($privilege in $total_privileges) {
$expected = $actual_privileges.$privilege
}
$actual = Get-AnsiblePrivilege -Name $privilege
- Assert-Equals -actual $actual -expected $expected
+ Assert-Equal -actual $actual -expected $expected
}
# test c# GetAllPrivilegeInfo
$actual = [Ansible.Privilege.PrivilegeUtil]::GetAllPrivilegeInfo($process)
-Assert-Equals -actual $actual.GetType().Name -expected 'Dictionary`2'
-Assert-Equals -actual $actual.Count -expected $actual_privileges.Count
+Assert-Equal -actual $actual.GetType().Name -expected 'Dictionary`2'
+Assert-Equal -actual $actual.Count -expected $actual_privileges.Count
foreach ($privilege in $total_privileges) {
if ($actual_privileges.ContainsKey($privilege)) {
$actual_value = $actual.$privilege
if ($actual_privileges.$privilege) {
- Assert-Equals -actual $actual_value.HasFlag([Ansible.Privilege.PrivilegeAttributes]::Enabled) -expected $true
- } else {
- Assert-Equals -actual $actual_value.HasFlag([Ansible.Privilege.PrivilegeAttributes]::Enabled) -expected $false
+ Assert-Equal -actual $actual_value.HasFlag([Ansible.Privilege.PrivilegeAttributes]::Enabled) -expected $true
+ }
+ else {
+ Assert-Equal -actual $actual_value.HasFlag([Ansible.Privilege.PrivilegeAttributes]::Enabled) -expected $false
}
}
}
@@ -93,19 +94,19 @@ Set-AnsiblePrivilege -Name SeUndockPrivilege -Value $false # ensure we start wi
Set-AnsiblePrivilege -Name SeUndockPrivilege -Value $true -WhatIf
$actual = Get-AnsiblePrivilege -Name SeUndockPrivilege
-Assert-Equals -actual $actual -expected $false
+Assert-Equal -actual $actual -expected $false
Set-AnsiblePrivilege -Name SeUndockPrivilege -Value $true
$actual = Get-AnsiblePrivilege -Name SeUndockPrivilege
-Assert-Equals -actual $actual -expected $true
+Assert-Equal -actual $actual -expected $true
Set-AnsiblePrivilege -Name SeUndockPrivilege -Value $false -WhatIf
$actual = Get-AnsiblePrivilege -Name SeUndockPrivilege
-Assert-Equals -actual $actual -expected $true
+Assert-Equal -actual $actual -expected $true
Set-AnsiblePrivilege -Name SeUndockPrivilege -Value $false
$actual = Get-AnsiblePrivilege -Name SeUndockPrivilege
-Assert-Equals -actual $actual -expected $false
+Assert-Equal -actual $actual -expected $false
$module.Result.data = "success"
$module.ExitJson()
diff --git a/test/integration/targets/module_utils_Ansible.ModuleUtils.SID/library/sid_utils_test.ps1 b/test/integration/targets/module_utils_Ansible.ModuleUtils.SID/library/sid_utils_test.ps1
index eb376c81..85bfbe11 100644
--- a/test/integration/targets/module_utils_Ansible.ModuleUtils.SID/library/sid_utils_test.ps1
+++ b/test/integration/targets/module_utils_Ansible.ModuleUtils.SID/library/sid_utils_test.ps1
@@ -6,7 +6,7 @@
$params = Parse-Args $args
$sid_account = Get-AnsibleParam -obj $params -name "sid_account" -type "str" -failifempty $true
-Function Assert-Equals($actual, $expected) {
+Function Assert-Equal($actual, $expected) {
if ($actual -ne $expected) {
Fail-Json @{} "actual != expected`nActual: $actual`nExpected: $expected"
}
@@ -39,10 +39,18 @@ $tests = @(
@{ sid = "S-1-1-0"; full_name = "Everyone"; names = @("Everyone") },
@{ sid = "S-1-5-18"; full_name = "NT AUTHORITY\SYSTEM"; names = @("NT AUTHORITY\SYSTEM", "SYSTEM") },
@{ sid = "S-1-5-20"; full_name = "NT AUTHORITY\NETWORK SERVICE"; names = @("NT AUTHORITY\NETWORK SERVICE", "NETWORK SERVICE") },
- @{ sid = "$($default_admin.SID)"; full_name = "$($default_admin.FullName)"; names = @("$env:COMPUTERNAME\$($default_admin.Name)", "$($default_admin.Name)", ".\$($default_admin.Name)") },
+ @{
+ sid = "$($default_admin.SID)"
+ full_name = "$($default_admin.FullName)"
+ names = @("$env:COMPUTERNAME\$($default_admin.Name)", "$($default_admin.Name)", ".\$($default_admin.Name)")
+ },
# Local Groups
- @{ sid = "$($default_admin_group.SID)"; full_name = "BUILTIN\$($default_admin_group.Name)"; names = @("BUILTIN\$($default_admin_group.Name)", "$($default_admin_group.Name)", ".\$($default_admin_group.Name)") }
+ @{
+ sid = "$($default_admin_group.SID)"
+ full_name = "BUILTIN\$($default_admin_group.Name)"
+ names = @("BUILTIN\$($default_admin_group.Name)", "$($default_admin_group.Name)", ".\$($default_admin_group.Name)")
+ }
)
# Add domain tests if the domain name has been set
@@ -70,12 +78,12 @@ foreach ($test in $tests) {
$actual_account_name = Convert-FromSID -sid $test.sid
# renamed admins may have an empty FullName; skip comparison in that case
if ($test.full_name) {
- Assert-Equals -actual $actual_account_name -expected $test.full_name
+ Assert-Equal -actual $actual_account_name -expected $test.full_name
}
foreach ($test_name in $test.names) {
$actual_sid = Convert-ToSID -account_name $test_name
- Assert-Equals -actual $actual_sid -expected $test.sid
+ Assert-Equal -actual $actual_sid -expected $test.sid
}
}
@@ -83,11 +91,11 @@ foreach ($test in $tests) {
# in the normal test suite
# Calling Convert-ToSID with a string like a SID should return that SID back
$actual = Convert-ToSID -account_name $sid_account
-Assert-Equals -actual $actual -expected $sid_account
+Assert-Equal -actual $actual -expected $sid_account
# Calling COnvert-ToSID with a string prefixed with .\ should return the SID
# for a user that is called that SID and not the SID passed in
$actual = Convert-ToSID -account_name ".\$sid_account"
-Assert-Equals -actual ($actual -ne $sid_account) -expected $true
+Assert-Equal -actual ($actual -ne $sid_account) -expected $true
Exit-Json @{ data = "success" }
diff --git a/test/integration/targets/module_utils_Ansible.ModuleUtils.WebRequest/library/web_request_test.ps1 b/test/integration/targets/module_utils_Ansible.ModuleUtils.WebRequest/library/web_request_test.ps1
index a483698c..c168b92c 100644
--- a/test/integration/targets/module_utils_Ansible.ModuleUtils.WebRequest/library/web_request_test.ps1
+++ b/test/integration/targets/module_utils_Ansible.ModuleUtils.WebRequest/library/web_request_test.ps1
@@ -13,45 +13,48 @@ $module = [Ansible.Basic.AnsibleModule]::Create($args, $spec)
$httpbin_host = $module.Params.httpbin_host
-Function Assert-Equals {
+Function Assert-Equal {
param(
- [Parameter(Mandatory=$true, ValueFromPipeline=$true)][AllowNull()]$Actual,
- [Parameter(Mandatory=$true, Position=0)][AllowNull()]$Expected
+ [Parameter(Mandatory = $true, ValueFromPipeline = $true)][AllowNull()]$Actual,
+ [Parameter(Mandatory = $true, Position = 0)][AllowNull()]$Expected
)
- $matched = $false
- if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array] -or $Actual -is [System.Collections.IList]) {
- $Actual.Count | Assert-Equals -Expected $Expected.Count
- for ($i = 0; $i -lt $Actual.Count; $i++) {
- $actualValue = $Actual[$i]
- $expectedValue = $Expected[$i]
- Assert-Equals -Actual $actualValue -Expected $expectedValue
- }
- $matched = $true
- } else {
- $matched = $Actual -ceq $Expected
- }
-
- if (-not $matched) {
- if ($Actual -is [PSObject]) {
- $Actual = $Actual.ToString()
+ process {
+ $matched = $false
+ if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array] -or $Actual -is [System.Collections.IList]) {
+ $Actual.Count | Assert-Equal -Expected $Expected.Count
+ for ($i = 0; $i -lt $Actual.Count; $i++) {
+ $actualValue = $Actual[$i]
+ $expectedValue = $Expected[$i]
+ Assert-Equal -Actual $actualValue -Expected $expectedValue
+ }
+ $matched = $true
}
+ else {
+ $matched = $Actual -ceq $Expected
+ }
+
+ if (-not $matched) {
+ if ($Actual -is [PSObject]) {
+ $Actual = $Actual.ToString()
+ }
- $call_stack = (Get-PSCallStack)[1]
- $module.Result.test = $test
- $module.Result.actual = $Actual
- $module.Result.expected = $Expected
- $module.Result.line = $call_stack.ScriptLineNumber
- $module.Result.method = $call_stack.Position.Text
+ $call_stack = (Get-PSCallStack)[1]
+ $module.Result.test = $test
+ $module.Result.actual = $Actual
+ $module.Result.expected = $Expected
+ $module.Result.line = $call_stack.ScriptLineNumber
+ $module.Result.method = $call_stack.Position.Text
- $module.FailJson("AssertionError: actual != expected")
+ $module.FailJson("AssertionError: actual != expected")
+ }
}
}
Function Convert-StreamToString {
[CmdletBinding()]
param (
- [Parameter(Mandatory=$true)]
+ [Parameter(Mandatory = $true)]
[System.IO.Stream]
$Stream
)
@@ -60,7 +63,8 @@ Function Convert-StreamToString {
try {
$Stream.CopyTo($ms)
[System.Text.Encoding]::UTF8.GetString($ms.ToArray())
- } finally {
+ }
+ finally {
$ms.Dispose()
}
}
@@ -69,50 +73,50 @@ $tests = [Ordered]@{
'GET request over http' = {
$r = Get-AnsibleWebRequest -Uri "http://$httpbin_host/get"
- $r.Method | Assert-Equals -Expected 'GET'
- $r.Timeout | Assert-Equals -Expected 30000
- $r.UseDefaultCredentials | Assert-Equals -Expected $false
- $r.Credentials | Assert-Equals -Expected $null
- $r.ClientCertificates.Count | Assert-Equals -Expected 0
- $r.Proxy.Credentials | Assert-Equals -Expected $null
- $r.UserAgent | Assert-Equals -Expected 'ansible-httpget'
+ $r.Method | Assert-Equal -Expected 'GET'
+ $r.Timeout | Assert-Equal -Expected 30000
+ $r.UseDefaultCredentials | Assert-Equal -Expected $false
+ $r.Credentials | Assert-Equal -Expected $null
+ $r.ClientCertificates.Count | Assert-Equal -Expected 0
+ $r.Proxy.Credentials | Assert-Equal -Expected $null
+ $r.UserAgent | Assert-Equal -Expected 'ansible-httpget'
$actual = Invoke-WithWebRequest -Module $module -Request $r -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.StatusCode | Assert-Equals -Expected 200
+ $Response.StatusCode | Assert-Equal -Expected 200
Convert-StreamToString -Stream $Stream
} | ConvertFrom-Json
- $actual.headers.'User-Agent' | Assert-Equals -Expected 'ansible-httpget'
- $actual.headers.'Host' | Assert-Equals -Expected $httpbin_host
+ $actual.headers.'User-Agent' | Assert-Equal -Expected 'ansible-httpget'
+ $actual.headers.'Host' | Assert-Equal -Expected $httpbin_host
- $module.Result.msg | Assert-Equals -Expected 'OK'
- $module.Result.status_code | Assert-Equals -Expected 200
- $module.Result.ContainsKey('elapsed') | Assert-Equals -Expected $true
+ $module.Result.msg | Assert-Equal -Expected 'OK'
+ $module.Result.status_code | Assert-Equal -Expected 200
+ $module.Result.ContainsKey('elapsed') | Assert-Equal -Expected $true
}
'GET request over https' = {
# url is an alias for the -Uri parameter.
$r = Get-AnsibleWebRequest -url "https://$httpbin_host/get"
- $r.Method | Assert-Equals -Expected 'GET'
- $r.Timeout | Assert-Equals -Expected 30000
- $r.UseDefaultCredentials | Assert-Equals -Expected $false
- $r.Credentials | Assert-Equals -Expected $null
- $r.ClientCertificates.Count | Assert-Equals -Expected 0
- $r.Proxy.Credentials | Assert-Equals -Expected $null
- $r.UserAgent | Assert-Equals -Expected 'ansible-httpget'
+ $r.Method | Assert-Equal -Expected 'GET'
+ $r.Timeout | Assert-Equal -Expected 30000
+ $r.UseDefaultCredentials | Assert-Equal -Expected $false
+ $r.Credentials | Assert-Equal -Expected $null
+ $r.ClientCertificates.Count | Assert-Equal -Expected 0
+ $r.Proxy.Credentials | Assert-Equal -Expected $null
+ $r.UserAgent | Assert-Equal -Expected 'ansible-httpget'
$actual = Invoke-WithWebRequest -Module $module -Request $r -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.StatusCode | Assert-Equals -Expected 200
+ $Response.StatusCode | Assert-Equal -Expected 200
Convert-StreamToString -Stream $Stream
} | ConvertFrom-Json
- $actual.headers.'User-Agent' | Assert-Equals -Expected 'ansible-httpget'
- $actual.headers.'Host' | Assert-Equals -Expected $httpbin_host
+ $actual.headers.'User-Agent' | Assert-Equal -Expected 'ansible-httpget'
+ $actual.headers.'Host' | Assert-Equal -Expected $httpbin_host
}
'POST request' = {
@@ -125,14 +129,14 @@ $tests = [Ordered]@{
}
$r = Get-AnsibleWebRequest @getParams
- $r.Method | Assert-Equals -Expected 'POST'
- $r.Timeout | Assert-Equals -Expected 30000
- $r.UseDefaultCredentials | Assert-Equals -Expected $false
- $r.Credentials | Assert-Equals -Expected $null
- $r.ClientCertificates.Count | Assert-Equals -Expected 0
- $r.Proxy.Credentials | Assert-Equals -Expected $null
- $r.ContentType | Assert-Equals -Expected 'application/json'
- $r.UserAgent | Assert-Equals -Expected 'ansible-httpget'
+ $r.Method | Assert-Equal -Expected 'POST'
+ $r.Timeout | Assert-Equal -Expected 30000
+ $r.UseDefaultCredentials | Assert-Equal -Expected $false
+ $r.Credentials | Assert-Equal -Expected $null
+ $r.ClientCertificates.Count | Assert-Equal -Expected 0
+ $r.Proxy.Credentials | Assert-Equal -Expected $null
+ $r.ContentType | Assert-Equal -Expected 'application/json'
+ $r.UserAgent | Assert-Equal -Expected 'ansible-httpget'
$body = New-Object -TypeName System.IO.MemoryStream -ArgumentList @(,
([System.Text.Encoding]::UTF8.GetBytes('{"foo":"bar"}'))
@@ -140,13 +144,13 @@ $tests = [Ordered]@{
$actual = Invoke-WithWebRequest -Module $module -Request $r -Body $body -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.StatusCode | Assert-Equals -Expected 200
+ $Response.StatusCode | Assert-Equal -Expected 200
Convert-StreamToString -Stream $Stream
} | ConvertFrom-Json
- $actual.headers.'User-Agent' | Assert-Equals -Expected 'ansible-httpget'
- $actual.headers.'Host' | Assert-Equals -Expected $httpbin_host
- $actual.data | Assert-Equals -Expected '{"foo":"bar"}'
+ $actual.headers.'User-Agent' | Assert-Equal -Expected 'ansible-httpget'
+ $actual.headers.'Host' | Assert-Equal -Expected $httpbin_host
+ $actual.data | Assert-Equal -Expected '{"foo":"bar"}'
}
'Safe redirection of GET' = {
@@ -155,8 +159,8 @@ $tests = [Ordered]@{
Invoke-WithWebRequest -Module $module -Request $r -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.ResponseUri | Assert-Equals -Expected "http://$httpbin_host/get"
- $Response.StatusCode | Assert-Equals -Expected 200
+ $Response.ResponseUri | Assert-Equal -Expected "http://$httpbin_host/get"
+ $Response.StatusCode | Assert-Equal -Expected 200
}
}
@@ -166,8 +170,8 @@ $tests = [Ordered]@{
Invoke-WithWebRequest -Module $module -Request $r -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.ResponseUri | Assert-Equals -Expected "http://$httpbin_host/get"
- $Response.StatusCode | Assert-Equals -Expected 200
+ $Response.ResponseUri | Assert-Equal -Expected "http://$httpbin_host/get"
+ $Response.StatusCode | Assert-Equal -Expected 200
}
}
@@ -181,8 +185,8 @@ $tests = [Ordered]@{
Invoke-WithWebRequest -Module $module -Request $r -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.ResponseUri | Assert-Equals -Expected $r.RequestUri
- $Response.StatusCode | Assert-Equals -Expected 302
+ $Response.ResponseUri | Assert-Equal -Expected $r.RequestUri
+ $Response.StatusCode | Assert-Equal -Expected 302
}
}
@@ -196,8 +200,8 @@ $tests = [Ordered]@{
Invoke-WithWebRequest -Module $module -Request $r -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.ResponseUri | Assert-Equals -Expected $r.RequestUri
- $Response.StatusCode | Assert-Equals -Expected 302
+ $Response.ResponseUri | Assert-Equal -Expected $r.RequestUri
+ $Response.StatusCode | Assert-Equal -Expected 302
}
}
@@ -212,8 +216,8 @@ $tests = [Ordered]@{
Invoke-WithWebRequest -Module $module -Request $r -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.ResponseUri | Assert-Equals -Expected $r.RequestUri
- $Response.StatusCode | Assert-Equals -Expected 302
+ $Response.ResponseUri | Assert-Equal -Expected $r.RequestUri
+ $Response.StatusCode | Assert-Equal -Expected 302
}
}
@@ -228,8 +232,8 @@ $tests = [Ordered]@{
Invoke-WithWebRequest -Module $module -Request $r -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.ResponseUri | Assert-Equals -Expected $r.RequestUri
- $Response.StatusCode | Assert-Equals -Expected 302
+ $Response.ResponseUri | Assert-Equal -Expected $r.RequestUri
+ $Response.StatusCode | Assert-Equal -Expected 302
}
}
@@ -243,8 +247,8 @@ $tests = [Ordered]@{
Invoke-WithWebRequest -Module $module -Request $r -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.ResponseUri | Assert-Equals -Expected "http://$httpbin_host/get"
- $Response.StatusCode | Assert-Equals -Expected 200
+ $Response.ResponseUri | Assert-Equal -Expected "http://$httpbin_host/get"
+ $Response.StatusCode | Assert-Equal -Expected 200
}
}
@@ -259,8 +263,8 @@ $tests = [Ordered]@{
Invoke-WithWebRequest -Module $module -Request $r -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.ResponseUri | Assert-Equals -Expected "http://$httpbin_host/get"
- $Response.StatusCode | Assert-Equals -Expected 200
+ $Response.ResponseUri | Assert-Equal -Expected "http://$httpbin_host/get"
+ $Response.StatusCode | Assert-Equal -Expected 200
}
}
@@ -275,8 +279,8 @@ $tests = [Ordered]@{
Invoke-WithWebRequest -Module $module -Request $r -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.ResponseUri | Assert-Equals -Expected "https://$httpbin_host/put"
- $Response.StatusCode | Assert-Equals -Expected 200
+ $Response.ResponseUri | Assert-Equal -Expected "https://$httpbin_host/put"
+ $Response.StatusCode | Assert-Equal -Expected 200
}
}
@@ -290,8 +294,8 @@ $tests = [Ordered]@{
Invoke-WithWebRequest -Module $module -Request $r -IgnoreBadResponse -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.ResponseUri | Assert-Equals -Expected "https://$httpbin_host/relative-redirect/1"
- $Response.StatusCode | Assert-Equals -Expected 302
+ $Response.ResponseUri | Assert-Equal -Expected "https://$httpbin_host/relative-redirect/1"
+ $Response.StatusCode | Assert-Equal -Expected 302
}
}
@@ -305,12 +309,13 @@ $tests = [Ordered]@{
$failed = $false
try {
$null = Invoke-WithWebRequest -Module $module -Request $r -Script {}
- } catch {
- $_.Exception.GetType().Name | Assert-Equals -Expected 'WebException'
- $_.Exception.Message | Assert-Equals -Expected 'Too many automatic redirections were attempted.'
+ }
+ catch {
+ $_.Exception.GetType().Name | Assert-Equal -Expected 'WebException'
+ $_.Exception.Message | Assert-Equal -Expected 'Too many automatic redirections were attempted.'
$failed = $true
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
'Basic auth as Credential' = {
@@ -324,7 +329,7 @@ $tests = [Ordered]@{
Invoke-WithWebRequest -Module $module -Request $r -IgnoreBadResponse -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.StatusCode | Assert-Equals -Expected 200
+ $Response.StatusCode | Assert-Equal -Expected 200
}
}
@@ -340,7 +345,7 @@ $tests = [Ordered]@{
Invoke-WithWebRequest -Module $module -Request $r -IgnoreBadResponse -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.StatusCode | Assert-Equals -Expected 200
+ $Response.StatusCode | Assert-Equal -Expected 200
}
}
@@ -359,13 +364,13 @@ $tests = [Ordered]@{
$actual = Invoke-WithWebRequest -Module $module -Request $r -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.StatusCode | Assert-Equals -Expected 200
+ $Response.StatusCode | Assert-Equal -Expected 200
Convert-StreamToString -Stream $Stream
} | ConvertFrom-Json
- $actual.headers.'Testheader' | Assert-Equals -Expected 'test-header'
- $actual.headers.'testingheader' | Assert-Equals -Expected 'testing_header'
- $actual.Headers.'User-Agent' | Assert-Equals -Expected 'test-agent'
+ $actual.headers.'Testheader' | Assert-Equal -Expected 'test-header'
+ $actual.headers.'testingheader' | Assert-Equal -Expected 'testing_header'
+ $actual.Headers.'User-Agent' | Assert-Equal -Expected 'test-agent'
}
'Request with timeout' = {
@@ -378,12 +383,13 @@ $tests = [Ordered]@{
$failed = $false
try {
$null = Invoke-WithWebRequest -Module $module -Request $r -Script {}
- } catch {
+ }
+ catch {
$failed = $true
- $_.Exception.GetType().Name | Assert-Equals -Expected WebException
- $_.Exception.Message | Assert-Equals -Expected 'The operation has timed out'
+ $_.Exception.GetType().Name | Assert-Equal -Expected WebException
+ $_.Exception.Message | Assert-Equal -Expected 'The operation has timed out'
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
'Request with file URI' = {
@@ -395,12 +401,12 @@ $tests = [Ordered]@{
$actual = Invoke-WithWebRequest -Module $module -Request $r -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.ContentLength | Assert-Equals -Expected 6
+ $Response.ContentLength | Assert-Equal -Expected 6
Convert-StreamToString -Stream $Stream
}
- $actual | Assert-Equals -Expected "test`r`n"
- $module.Result.msg | Assert-Equals -Expected "OK"
- $module.Result.status_code | Assert-Equals -Expected 200
+ $actual | Assert-Equal -Expected "test`r`n"
+ $module.Result.msg | Assert-Equal -Expected "OK"
+ $module.Result.status_code | Assert-Equal -Expected 200
}
'Web request based on module options' = {
@@ -419,9 +425,9 @@ $tests = [Ordered]@{
$spec = @{
options = @{
url = @{ type = 'str'; required = $true }
- test = @{ type = 'str'; choices = 'abc', 'def'}
+ test = @{ type = 'str'; choices = 'abc', 'def' }
}
- mutually_exclusive = @(,@('url', 'test'))
+ mutually_exclusive = @(, @('url', 'test'))
}
$testModule = [Ansible.Basic.AnsibleModule]::Create(@(), $spec, @(Get-AnsibleWebRequestSpec))
@@ -430,10 +436,10 @@ $tests = [Ordered]@{
$actual = Invoke-WithWebRequest -Module $testModule -Request $r -Script {
Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
- $Response.ResponseUri | Assert-Equals -Expected "https://$httpbin_host/get"
+ $Response.ResponseUri | Assert-Equal -Expected "https://$httpbin_host/get"
Convert-StreamToString -Stream $Stream
} | ConvertFrom-Json
- $actual.headers.'User-Agent' | Assert-Equals -Expected 'actual-agent'
+ $actual.headers.'User-Agent' | Assert-Equal -Expected 'actual-agent'
}
'Web request with default proxy' = {
@@ -442,7 +448,7 @@ $tests = [Ordered]@{
}
$r = Get-AnsibleWebRequest @params
- $null -ne $r.Proxy | Assert-Equals -Expected $true
+ $null -ne $r.Proxy | Assert-Equal -Expected $true
}
'Web request with no proxy' = {
@@ -452,7 +458,7 @@ $tests = [Ordered]@{
}
$r = Get-AnsibleWebRequest @params
- $null -eq $r.Proxy | Assert-Equals -Expected $true
+ $null -eq $r.Proxy | Assert-Equal -Expected $true
}
}
diff --git a/test/integration/targets/module_utils_Ansible.Privilege/library/ansible_privilege_tests.ps1 b/test/integration/targets/module_utils_Ansible.Privilege/library/ansible_privilege_tests.ps1
index 7c76036a..58ee9c11 100644
--- a/test/integration/targets/module_utils_Ansible.Privilege/library/ansible_privilege_tests.ps1
+++ b/test/integration/targets/module_utils_Ansible.Privilege/library/ansible_privilege_tests.ps1
@@ -5,143 +5,82 @@
$module = [Ansible.Basic.AnsibleModule]::Create($args, @{})
-Function Assert-Equals {
+Function Assert-Equal {
param(
- [Parameter(Mandatory=$true, ValueFromPipeline=$true)][AllowNull()]$Actual,
- [Parameter(Mandatory=$true, Position=0)][AllowNull()]$Expected
+ [Parameter(Mandatory = $true, ValueFromPipeline = $true)][AllowNull()]$Actual,
+ [Parameter(Mandatory = $true, Position = 0)][AllowNull()]$Expected
)
- $matched = $false
- if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array]) {
- $Actual.Count | Assert-Equals -Expected $Expected.Count
- for ($i = 0; $i -lt $Actual.Count; $i++) {
- $actual_value = $Actual[$i]
- $expected_value = $Expected[$i]
- Assert-Equals -Actual $actual_value -Expected $expected_value
+ process {
+ $matched = $false
+ if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array]) {
+ $Actual.Count | Assert-Equal -Expected $Expected.Count
+ for ($i = 0; $i -lt $Actual.Count; $i++) {
+ $actual_value = $Actual[$i]
+ $expected_value = $Expected[$i]
+ Assert-Equal -Actual $actual_value -Expected $expected_value
+ }
+ $matched = $true
}
- $matched = $true
- } else {
- $matched = $Actual -ceq $Expected
- }
-
- if (-not $matched) {
- if ($Actual -is [PSObject]) {
- $Actual = $Actual.ToString()
+ else {
+ $matched = $Actual -ceq $Expected
}
- $call_stack = (Get-PSCallStack)[1]
- $module.Result.test = $test
- $module.Result.actual = $Actual
- $module.Result.expected = $Expected
- $module.Result.line = $call_stack.ScriptLineNumber
- $module.Result.method = $call_stack.Position.Text
- $module.FailJson("AssertionError: actual != expected")
- }
-}
-
-Function Assert-DictionaryEquals {
- param(
- [Parameter(Mandatory=$true, ValueFromPipeline=$true)][AllowNull()]$Actual,
- [Parameter(Mandatory=$true, Position=0)][AllowNull()]$Expected
- )
- $actual_keys = $Actual.Keys
- $expected_keys = $Expected.Keys
-
- $actual_keys.Count | Assert-Equals -Expected $expected_keys.Count
- foreach ($actual_entry in $Actual.GetEnumerator()) {
- $actual_key = $actual_entry.Key
- ($actual_key -cin $expected_keys) | Assert-Equals -Expected $true
- $actual_value = $actual_entry.Value
- $expected_value = $Expected.$actual_key
-
- if ($actual_value -is [System.Collections.IDictionary]) {
- $actual_value | Assert-DictionaryEquals -Expected $expected_value
- } elseif ($actual_value -is [System.Collections.ArrayList]) {
- for ($i = 0; $i -lt $actual_value.Count; $i++) {
- $actual_entry = $actual_value[$i]
- $expected_entry = $expected_value[$i]
- if ($actual_entry -is [System.Collections.IDictionary]) {
- $actual_entry | Assert-DictionaryEquals -Expected $expected_entry
- } else {
- Assert-Equals -Actual $actual_entry -Expected $expected_entry
- }
+ if (-not $matched) {
+ if ($Actual -is [PSObject]) {
+ $Actual = $Actual.ToString()
}
- } else {
- Assert-Equals -Actual $actual_value -Expected $expected_value
+
+ $call_stack = (Get-PSCallStack)[1]
+ $module.Result.test = $test
+ $module.Result.actual = $Actual
+ $module.Result.expected = $Expected
+ $module.Result.line = $call_stack.ScriptLineNumber
+ $module.Result.method = $call_stack.Position.Text
+ $module.FailJson("AssertionError: actual != expected")
}
}
- foreach ($expected_key in $expected_keys) {
- ($expected_key -cin $actual_keys) | Assert-Equals -Expected $true
- }
}
-Function Assert-Equals {
+Function Assert-DictionaryEqual {
param(
- [Parameter(Mandatory=$true, ValueFromPipeline=$true)][AllowNull()]$Actual,
- [Parameter(Mandatory=$true, Position=0)][AllowNull()]$Expected
+ [Parameter(Mandatory = $true, ValueFromPipeline = $true)][AllowNull()]$Actual,
+ [Parameter(Mandatory = $true, Position = 0)][AllowNull()]$Expected
)
- $matched = $false
- if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array]) {
- $Actual.Count | Assert-Equals -Expected $Expected.Count
- for ($i = 0; $i -lt $Actual.Count; $i++) {
- $actual_value = $Actual[$i]
- $expected_value = $Expected[$i]
- Assert-Equals -Actual $actual_value -Expected $expected_value
- }
- $matched = $true
- } else {
- $matched = $Actual -ceq $Expected
- }
+ process {
+ $actual_keys = $Actual.Keys
+ $expected_keys = $Expected.Keys
- if (-not $matched) {
- if ($Actual -is [PSObject]) {
- $Actual = $Actual.ToString()
- }
-
- $call_stack = (Get-PSCallStack)[1]
- $module.Result.test = $test
- $module.Result.actual = $Actual
- $module.Result.expected = $Expected
- $module.Result.line = $call_stack.ScriptLineNumber
- $module.Result.method = $call_stack.Position.Text
- $module.FailJson("AssertionError: actual != expected")
- }
-}
+ $actual_keys.Count | Assert-Equal -Expected $expected_keys.Count
+ foreach ($actual_entry in $Actual.GetEnumerator()) {
+ $actual_key = $actual_entry.Key
+ ($actual_key -cin $expected_keys) | Assert-Equal -Expected $true
+ $actual_value = $actual_entry.Value
+ $expected_value = $Expected.$actual_key
-Function Assert-DictionaryEquals {
- param(
- [Parameter(Mandatory=$true, ValueFromPipeline=$true)][AllowNull()]$Actual,
- [Parameter(Mandatory=$true, Position=0)][AllowNull()]$Expected
- )
- $actual_keys = $Actual.Keys
- $expected_keys = $Expected.Keys
-
- $actual_keys.Count | Assert-Equals -Expected $expected_keys.Count
- foreach ($actual_entry in $Actual.GetEnumerator()) {
- $actual_key = $actual_entry.Key
- ($actual_key -cin $expected_keys) | Assert-Equals -Expected $true
- $actual_value = $actual_entry.Value
- $expected_value = $Expected.$actual_key
-
- if ($actual_value -is [System.Collections.IDictionary]) {
- $actual_value | Assert-DictionaryEquals -Expected $expected_value
- } elseif ($actual_value -is [System.Collections.ArrayList]) {
- for ($i = 0; $i -lt $actual_value.Count; $i++) {
- $actual_entry = $actual_value[$i]
- $expected_entry = $expected_value[$i]
- if ($actual_entry -is [System.Collections.IDictionary]) {
- $actual_entry | Assert-DictionaryEquals -Expected $expected_entry
- } else {
- Assert-Equals -Actual $actual_entry -Expected $expected_entry
+ if ($actual_value -is [System.Collections.IDictionary]) {
+ $actual_value | Assert-DictionaryEqual -Expected $expected_value
+ }
+ elseif ($actual_value -is [System.Collections.ArrayList]) {
+ for ($i = 0; $i -lt $actual_value.Count; $i++) {
+ $actual_entry = $actual_value[$i]
+ $expected_entry = $expected_value[$i]
+ if ($actual_entry -is [System.Collections.IDictionary]) {
+ $actual_entry | Assert-DictionaryEqual -Expected $expected_entry
+ }
+ else {
+ Assert-Equal -Actual $actual_entry -Expected $expected_entry
+ }
}
}
- } else {
- Assert-Equals -Actual $actual_value -Expected $expected_value
+ else {
+ Assert-Equal -Actual $actual_value -Expected $expected_value
+ }
+ }
+ foreach ($expected_key in $expected_keys) {
+ ($expected_key -cin $actual_keys) | Assert-Equal -Expected $true
}
- }
- foreach ($expected_key in $expected_keys) {
- ($expected_key -cin $actual_keys) | Assert-Equals -Expected $true
}
}
@@ -150,12 +89,12 @@ $process = [Ansible.Privilege.PrivilegeUtil]::GetCurrentProcess()
$tests = @{
"Check valid privilege name" = {
$actual = [Ansible.Privilege.PrivilegeUtil]::CheckPrivilegeName("SeTcbPrivilege")
- $actual | Assert-Equals -Expected $true
+ $actual | Assert-Equal -Expected $true
}
"Check invalid privilege name" = {
$actual = [Ansible.Privilege.PrivilegeUtil]::CheckPrivilegeName("SeFake")
- $actual | Assert-Equals -Expected $false
+ $actual | Assert-Equal -Expected $false
}
"Disable a privilege" = {
@@ -163,14 +102,14 @@ $tests = @{
[Ansible.Privilege.PrivilegeUtil]::EnablePrivilege($process, "SeTimeZonePrivilege") > $null
$actual = [Ansible.Privilege.PrivilegeUtil]::DisablePrivilege($process, "SeTimeZonePrivilege")
- $actual.GetType().Name | Assert-Equals -Expected 'Dictionary`2'
- $actual.Count | Assert-Equals -Expected 1
- $actual.SeTimeZonePrivilege | Assert-Equals -Expected $true
+ $actual.GetType().Name | Assert-Equal -Expected 'Dictionary`2'
+ $actual.Count | Assert-Equal -Expected 1
+ $actual.SeTimeZonePrivilege | Assert-Equal -Expected $true
# Disable again
$actual = [Ansible.Privilege.PrivilegeUtil]::DisablePrivilege($process, "SeTimeZonePrivilege")
- $actual.GetType().Name | Assert-Equals -Expected 'Dictionary`2'
- $actual.Count | Assert-Equals -Expected 0
+ $actual.GetType().Name | Assert-Equal -Expected 'Dictionary`2'
+ $actual.Count | Assert-Equal -Expected 0
}
"Enable a privilege" = {
@@ -178,139 +117,154 @@ $tests = @{
[Ansible.Privilege.PrivilegeUtil]::DisablePrivilege($process, "SeTimeZonePrivilege") > $null
$actual = [Ansible.Privilege.PrivilegeUtil]::EnablePrivilege($process, "SeTimeZonePrivilege")
- $actual.GetType().Name | Assert-Equals -Expected 'Dictionary`2'
- $actual.Count | Assert-Equals -Expected 1
- $actual.SeTimeZonePrivilege | Assert-Equals -Expected $false
+ $actual.GetType().Name | Assert-Equal -Expected 'Dictionary`2'
+ $actual.Count | Assert-Equal -Expected 1
+ $actual.SeTimeZonePrivilege | Assert-Equal -Expected $false
# Disable again
$actual = [Ansible.Privilege.PrivilegeUtil]::EnablePrivilege($process, "SeTimeZonePrivilege")
- $actual.GetType().Name | Assert-Equals -Expected 'Dictionary`2'
- $actual.Count | Assert-Equals -Expected 0
+ $actual.GetType().Name | Assert-Equal -Expected 'Dictionary`2'
+ $actual.Count | Assert-Equal -Expected 0
}
"Disable and revert privileges" = {
$current_state = [Ansible.Privilege.PrivilegeUtil]::GetAllPrivilegeInfo($process)
$previous_state = [Ansible.Privilege.PrivilegeUtil]::DisableAllPrivileges($process)
- $previous_state.GetType().Name | Assert-Equals -Expected 'Dictionary`2'
+ $previous_state.GetType().Name | Assert-Equal -Expected 'Dictionary`2'
foreach ($previous_state_entry in $previous_state.GetEnumerator()) {
- $previous_state_entry.Value | Assert-Equals -Expected $true
+ $previous_state_entry.Value | Assert-Equal -Expected $true
}
# Disable again
$previous_state2 = [Ansible.Privilege.PrivilegeUtil]::DisableAllPrivileges($process)
- $previous_state2.Count | Assert-Equals -Expected 0
+ $previous_state2.Count | Assert-Equal -Expected 0
$actual = [Ansible.Privilege.PrivilegeUtil]::GetAllPrivilegeInfo($process)
foreach ($actual_entry in $actual.GetEnumerator()) {
- $actual_entry.Value -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected 0
+ $actual_entry.Value -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equal -Expected 0
}
[Ansible.Privilege.PrivilegeUtil]::SetTokenPrivileges($process, $previous_state) > $null
$actual = [Ansible.Privilege.PrivilegeUtil]::GetAllPrivilegeInfo($process)
- $actual | Assert-DictionaryEquals -Expected $current_state
+ $actual | Assert-DictionaryEqual -Expected $current_state
}
"Remove a privilege" = {
[Ansible.Privilege.PrivilegeUtil]::RemovePrivilege($process, "SeUndockPrivilege") > $null
$actual = [Ansible.Privilege.PrivilegeUtil]::GetAllPrivilegeInfo($process)
- $actual.ContainsKey("SeUndockPrivilege") | Assert-Equals -Expected $false
+ $actual.ContainsKey("SeUndockPrivilege") | Assert-Equal -Expected $false
}
"Test Enabler" = {
# Disable privilege at the start
$new_state = @{
- SeTimeZonePrivilege = $false
- SeShutdownPrivilege = $false
- SeIncreaseWorkingSetPrivilege = $false
+ SeTimeZonePrivilege = $false
+ SeShutdownPrivilege = $false
+ SeIncreaseWorkingSetPrivilege = $false
}
[Ansible.Privilege.PrivilegeUtil]::SetTokenPrivileges($process, $new_state) > $null
$check_state = [Ansible.Privilege.PrivilegeUtil]::GetAllPrivilegeInfo($process)
- $check_state.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected 0
- $check_state.SeShutdownPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected 0
- $check_state.SeIncreaseWorkingSetPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected 0
+ $check_state.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equal -Expected 0
+ $check_state.SeShutdownPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equal -Expected 0
+ $check_state.SeIncreaseWorkingSetPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equal -Expected 0
# Check that strict = false won't validate privileges not held but activates the ones we want
$enabler = New-Object -TypeName Ansible.Privilege.PrivilegeEnabler -ArgumentList $false, "SeTimeZonePrivilege", "SeShutdownPrivilege", "SeTcbPrivilege"
$actual = [Ansible.Privilege.PrivilegeUtil]::GetAllPrivilegeInfo($process)
- $actual.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected ([Ansible.Privilege.PrivilegeAttributes]::Enabled)
- $actual.SeShutdownPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected ([Ansible.Privilege.PrivilegeAttributes]::Enabled)
- $actual.SeIncreaseWorkingSetPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected 0
- $actual.ContainsKey("SeTcbPrivilege") | Assert-Equals -Expected $false
+ $actual.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled |
+ Assert-Equal -Expected ([Ansible.Privilege.PrivilegeAttributes]::Enabled)
+ $actual.SeShutdownPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled |
+ Assert-Equal -Expected ([Ansible.Privilege.PrivilegeAttributes]::Enabled)
+ $actual.SeIncreaseWorkingSetPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equal -Expected 0
+ $actual.ContainsKey("SeTcbPrivilege") | Assert-Equal -Expected $false
# Now verify a no-op enabler will not rever back to disabled
$enabler2 = New-Object -TypeName Ansible.Privilege.PrivilegeEnabler -ArgumentList $false, "SeTimeZonePrivilege", "SeShutdownPrivilege", "SeTcbPrivilege"
$enabler2.Dispose()
$actual = [Ansible.Privilege.PrivilegeUtil]::GetAllPrivilegeInfo($process)
- $actual.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected ([Ansible.Privilege.PrivilegeAttributes]::Enabled)
- $actual.SeShutdownPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected ([Ansible.Privilege.PrivilegeAttributes]::Enabled)
+ $actual.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled |
+ Assert-Equal -Expected ([Ansible.Privilege.PrivilegeAttributes]::Enabled)
+ $actual.SeShutdownPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled |
+ Assert-Equal -Expected ([Ansible.Privilege.PrivilegeAttributes]::Enabled)
# Verify that when disposing the object the privileges are reverted
$enabler.Dispose()
$actual = [Ansible.Privilege.PrivilegeUtil]::GetAllPrivilegeInfo($process)
- $actual.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected 0
- $actual.SeShutdownPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected 0
+ $actual.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equal -Expected 0
+ $actual.SeShutdownPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equal -Expected 0
}
"Test Enabler strict" = {
# Disable privilege at the start
$new_state = @{
- SeTimeZonePrivilege = $false
- SeShutdownPrivilege = $false
- SeIncreaseWorkingSetPrivilege = $false
+ SeTimeZonePrivilege = $false
+ SeShutdownPrivilege = $false
+ SeIncreaseWorkingSetPrivilege = $false
}
[Ansible.Privilege.PrivilegeUtil]::SetTokenPrivileges($process, $new_state) > $null
$check_state = [Ansible.Privilege.PrivilegeUtil]::GetAllPrivilegeInfo($process)
- $check_state.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected 0
- $check_state.SeShutdownPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected 0
- $check_state.SeIncreaseWorkingSetPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected 0
+ $check_state.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equal -Expected 0
+ $check_state.SeShutdownPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equal -Expected 0
+ $check_state.SeIncreaseWorkingSetPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equal -Expected 0
# Check that strict = false won't validate privileges not held but activates the ones we want
$enabler = New-Object -TypeName Ansible.Privilege.PrivilegeEnabler -ArgumentList $true, "SeTimeZonePrivilege", "SeShutdownPrivilege"
$actual = [Ansible.Privilege.PrivilegeUtil]::GetAllPrivilegeInfo($process)
- $actual.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected ([Ansible.Privilege.PrivilegeAttributes]::Enabled)
- $actual.SeShutdownPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected ([Ansible.Privilege.PrivilegeAttributes]::Enabled)
- $actual.SeIncreaseWorkingSetPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected 0
+ $actual.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled |
+ Assert-Equal -Expected ([Ansible.Privilege.PrivilegeAttributes]::Enabled)
+ $actual.SeShutdownPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled |
+ Assert-Equal -Expected ([Ansible.Privilege.PrivilegeAttributes]::Enabled)
+ $actual.SeIncreaseWorkingSetPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equal -Expected 0
# Now verify a no-op enabler will not rever back to disabled
$enabler2 = New-Object -TypeName Ansible.Privilege.PrivilegeEnabler -ArgumentList $true, "SeTimeZonePrivilege", "SeShutdownPrivilege"
$enabler2.Dispose()
$actual = [Ansible.Privilege.PrivilegeUtil]::GetAllPrivilegeInfo($process)
- $actual.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected ([Ansible.Privilege.PrivilegeAttributes]::Enabled)
- $actual.SeShutdownPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected ([Ansible.Privilege.PrivilegeAttributes]::Enabled)
+ $actual.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled |
+ Assert-Equal -Expected ([Ansible.Privilege.PrivilegeAttributes]::Enabled)
+ $actual.SeShutdownPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled |
+ Assert-Equal -Expected ([Ansible.Privilege.PrivilegeAttributes]::Enabled)
# Verify that when disposing the object the privileges are reverted
$enabler.Dispose()
$actual = [Ansible.Privilege.PrivilegeUtil]::GetAllPrivilegeInfo($process)
- $actual.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected 0
- $actual.SeShutdownPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected 0
+ $actual.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equal -Expected 0
+ $actual.SeShutdownPrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equal -Expected 0
}
"Test Enabler invalid privilege" = {
$failed = $false
try {
New-Object -TypeName Ansible.Privilege.PrivilegeEnabler -ArgumentList $false, "SeTimeZonePrivilege", "SeFake"
- } catch {
+ }
+ catch {
$failed = $true
- $_.Exception.InnerException.Message | Assert-Equals -Expected "Failed to enable privilege(s) SeTimeZonePrivilege, SeFake (A specified privilege does not exist, Win32ErrorCode 1313)"
+ $expected = "Failed to enable privilege(s) SeTimeZonePrivilege, SeFake (A specified privilege does not exist, Win32ErrorCode 1313)"
+ $_.Exception.InnerException.Message | Assert-Equal -Expected $expected
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
"Test Enabler strict failure" = {
# Start disabled
[Ansible.Privilege.PrivilegeUtil]::DisablePrivilege($process, "SeTimeZonePrivilege") > $null
$check_state = [Ansible.Privilege.PrivilegeUtil]::GetAllPrivilegeInfo($process)
- $check_state.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equals -Expected 0
+ $check_state.SeTimeZonePrivilege -band [Ansible.Privilege.PrivilegeAttributes]::Enabled | Assert-Equal -Expected 0
$failed = $false
try {
New-Object -TypeName Ansible.Privilege.PrivilegeEnabler -ArgumentList $true, "SeTimeZonePrivilege", "SeTcbPrivilege"
- } catch {
+ }
+ catch {
$failed = $true
- $_.Exception.InnerException.Message | Assert-Equals -Expected "Failed to enable privilege(s) SeTimeZonePrivilege, SeTcbPrivilege (Not all privileges or groups referenced are assigned to the caller, Win32ErrorCode 1300)"
+ $expected = -join @(
+ "Failed to enable privilege(s) SeTimeZonePrivilege, SeTcbPrivilege "
+ "(Not all privileges or groups referenced are assigned to the caller, Win32ErrorCode 1300)"
+ )
+ $_.Exception.InnerException.Message | Assert-Equal -Expected $expected
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
}
diff --git a/test/integration/targets/module_utils_Ansible.Process/library/ansible_process_tests.ps1 b/test/integration/targets/module_utils_Ansible.Process/library/ansible_process_tests.ps1
index d906dfc5..bca7eb1e 100644
--- a/test/integration/targets/module_utils_Ansible.Process/library/ansible_process_tests.ps1
+++ b/test/integration/targets/module_utils_Ansible.Process/library/ansible_process_tests.ps1
@@ -5,37 +5,40 @@
$module = [Ansible.Basic.AnsibleModule]::Create($args, @{})
-Function Assert-Equals {
+Function Assert-Equal {
param(
- [Parameter(Mandatory=$true, ValueFromPipeline=$true)][AllowNull()]$Actual,
- [Parameter(Mandatory=$true, Position=0)][AllowNull()]$Expected
+ [Parameter(Mandatory = $true, ValueFromPipeline = $true)][AllowNull()]$Actual,
+ [Parameter(Mandatory = $true, Position = 0)][AllowNull()]$Expected
)
- $matched = $false
- if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array]) {
- $Actual.Count | Assert-Equals -Expected $Expected.Count
- for ($i = 0; $i -lt $Actual.Count; $i++) {
- $actual_value = $Actual[$i]
- $expected_value = $Expected[$i]
- Assert-Equals -Actual $actual_value -Expected $expected_value
+ process {
+ $matched = $false
+ if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array]) {
+ $Actual.Count | Assert-Equal -Expected $Expected.Count
+ for ($i = 0; $i -lt $Actual.Count; $i++) {
+ $actual_value = $Actual[$i]
+ $expected_value = $Expected[$i]
+ Assert-Equal -Actual $actual_value -Expected $expected_value
+ }
+ $matched = $true
}
- $matched = $true
- } else {
- $matched = $Actual -ceq $Expected
- }
-
- if (-not $matched) {
- if ($Actual -is [PSObject]) {
- $Actual = $Actual.ToString()
+ else {
+ $matched = $Actual -ceq $Expected
}
- $call_stack = (Get-PSCallStack)[1]
- $module.Result.test = $test
- $module.Result.actual = $Actual
- $module.Result.expected = $Expected
- $module.Result.line = $call_stack.ScriptLineNumber
- $module.Result.method = $call_stack.Position.Text
- $module.FailJson("AssertionError: actual != expected")
+ if (-not $matched) {
+ if ($Actual -is [PSObject]) {
+ $Actual = $Actual.ToString()
+ }
+
+ $call_stack = (Get-PSCallStack)[1]
+ $module.Result.test = $test
+ $module.Result.actual = $Actual
+ $module.Result.expected = $Expected
+ $module.Result.line = $call_stack.ScriptLineNumber
+ $module.Result.method = $call_stack.Position.Text
+ $module.FailJson("AssertionError: actual != expected")
+ }
}
}
@@ -43,116 +46,118 @@ $tests = @{
"ParseCommandLine empty string" = {
$expected = @((Get-Process -Id $pid).Path)
$actual = [Ansible.Process.ProcessUtil]::ParseCommandLine("")
- Assert-Equals -Actual $actual -Expected $expected
+ Assert-Equal -Actual $actual -Expected $expected
}
"ParseCommandLine single argument" = {
$expected = @("powershell.exe")
$actual = [Ansible.Process.ProcessUtil]::ParseCommandLine("powershell.exe")
- Assert-Equals -Actual $actual -Expected $expected
+ Assert-Equal -Actual $actual -Expected $expected
}
"ParseCommandLine multiple arguments" = {
$expected = @("powershell.exe", "-File", "C:\temp\script.ps1")
$actual = [Ansible.Process.ProcessUtil]::ParseCommandLine("powershell.exe -File C:\temp\script.ps1")
- Assert-Equals -Actual $actual -Expected $expected
+ Assert-Equal -Actual $actual -Expected $expected
}
"ParseCommandLine comples arguments" = {
$expected = @('abc', 'd', 'ef gh', 'i\j', 'k"l', 'm\n op', 'ADDLOCAL=qr, s', 'tuv\', 'w''x', 'yz')
$actual = [Ansible.Process.ProcessUtil]::ParseCommandLine('abc d "ef gh" i\j k\"l m\\"n op" ADDLOCAL="qr, s" tuv\ w''x yz')
- Assert-Equals -Actual $actual -Expected $expected
+ Assert-Equal -Actual $actual -Expected $expected
}
"SearchPath normal" = {
$expected = "$($env:SystemRoot)\System32\WindowsPowerShell\v1.0\powershell.exe"
$actual = [Ansible.Process.ProcessUtil]::SearchPath("powershell.exe")
- $actual | Assert-Equals -Expected $expected
+ $actual | Assert-Equal -Expected $expected
}
"SearchPath missing" = {
$failed = $false
try {
[Ansible.Process.ProcessUtil]::SearchPath("fake.exe")
- } catch {
+ }
+ catch {
$failed = $true
- $_.Exception.InnerException.GetType().FullName | Assert-Equals -Expected "System.IO.FileNotFoundException"
+ $_.Exception.InnerException.GetType().FullName | Assert-Equal -Expected "System.IO.FileNotFoundException"
$expected = 'Exception calling "SearchPath" with "1" argument(s): "Could not find file ''fake.exe''."'
- $_.Exception.Message | Assert-Equals -Expected $expected
+ $_.Exception.Message | Assert-Equal -Expected $expected
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
"CreateProcess basic" = {
$actual = [Ansible.Process.ProcessUtil]::CreateProcess("whoami.exe")
- $actual.GetType().FullName | Assert-Equals -Expected "Ansible.Process.Result"
- $actual.StandardOut | Assert-Equals -Expected "$(&whoami.exe)`r`n"
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.GetType().FullName | Assert-Equal -Expected "Ansible.Process.Result"
+ $actual.StandardOut | Assert-Equal -Expected "$(&whoami.exe)`r`n"
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"CreateProcess stderr" = {
$actual = [Ansible.Process.ProcessUtil]::CreateProcess("powershell.exe [System.Console]::Error.WriteLine('hi')")
- $actual.StandardOut | Assert-Equals -Expected ""
- $actual.StandardError | Assert-Equals -Expected "hi`r`n"
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected ""
+ $actual.StandardError | Assert-Equal -Expected "hi`r`n"
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"CreateProcess exit code" = {
$actual = [Ansible.Process.ProcessUtil]::CreateProcess("powershell.exe exit 10")
- $actual.StandardOut | Assert-Equals -Expected ""
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 10
+ $actual.StandardOut | Assert-Equal -Expected ""
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 10
}
"CreateProcess bad executable" = {
$failed = $false
try {
[Ansible.Process.ProcessUtil]::CreateProcess("fake.exe")
- } catch {
+ }
+ catch {
$failed = $true
- $_.Exception.InnerException.GetType().FullName | Assert-Equals -Expected "Ansible.Process.Win32Exception"
+ $_.Exception.InnerException.GetType().FullName | Assert-Equal -Expected "Ansible.Process.Win32Exception"
$expected = 'Exception calling "CreateProcess" with "1" argument(s): "CreateProcessW() failed '
$expected += '(The system cannot find the file specified, Win32ErrorCode 2)"'
- $_.Exception.Message | Assert-Equals -Expected $expected
+ $_.Exception.Message | Assert-Equal -Expected $expected
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
"CreateProcess with unicode" = {
$actual = [Ansible.Process.ProcessUtil]::CreateProcess("cmd.exe /c echo 💩 café")
- $actual.StandardOut | Assert-Equals -Expected "💩 café`r`n"
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected "💩 café`r`n"
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$actual = [Ansible.Process.ProcessUtil]::CreateProcess($null, "cmd.exe /c echo 💩 café", $null, $null)
- $actual.StandardOut | Assert-Equals -Expected "💩 café`r`n"
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected "💩 café`r`n"
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"CreateProcess without working dir" = {
$expected = $pwd.Path + "`r`n"
$actual = [Ansible.Process.ProcessUtil]::CreateProcess($null, 'powershell.exe $pwd.Path', $null, $null)
- $actual.StandardOut | Assert-Equals -Expected $expected
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected $expected
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"CreateProcess with working dir" = {
$expected = "C:\Windows`r`n"
$actual = [Ansible.Process.ProcessUtil]::CreateProcess($null, 'powershell.exe $pwd.Path', "C:\Windows", $null)
- $actual.StandardOut | Assert-Equals -Expected $expected
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected $expected
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"CreateProcess without environment" = {
$expected = "$($env:USERNAME)`r`n"
$actual = [Ansible.Process.ProcessUtil]::CreateProcess($null, 'powershell.exe $env:TEST; $env:USERNAME', $null, $null)
- $actual.StandardOut | Assert-Equals -Expected $expected
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected $expected
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"CreateProcess with environment" = {
@@ -161,69 +166,70 @@ $tests = @{
TEST2 = "Testing 2"
}
$actual = [Ansible.Process.ProcessUtil]::CreateProcess($null, 'cmd.exe /c set', $null, $env_vars)
- ("TEST=tesTing" -cin $actual.StandardOut.Split("`r`n")) | Assert-Equals -Expected $true
- ("TEST2=Testing 2" -cin $actual.StandardOut.Split("`r`n")) | Assert-Equals -Expected $true
- ("USERNAME=$($env:USERNAME)" -cnotin $actual.StandardOut.Split("`r`n")) | Assert-Equals -Expected $true
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ ("TEST=tesTing" -cin $actual.StandardOut.Split("`r`n")) | Assert-Equal -Expected $true
+ ("TEST2=Testing 2" -cin $actual.StandardOut.Split("`r`n")) | Assert-Equal -Expected $true
+ ("USERNAME=$($env:USERNAME)" -cnotin $actual.StandardOut.Split("`r`n")) | Assert-Equal -Expected $true
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"CreateProcess with string stdin" = {
$expected = "input value`r`n`r`n"
$actual = [Ansible.Process.ProcessUtil]::CreateProcess($null, 'powershell.exe [System.Console]::In.ReadToEnd()',
$null, $null, "input value")
- $actual.StandardOut | Assert-Equals -Expected $expected
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected $expected
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"CreateProcess with string stdin and newline" = {
$expected = "input value`r`n`r`n"
$actual = [Ansible.Process.ProcessUtil]::CreateProcess($null, 'powershell.exe [System.Console]::In.ReadToEnd()',
$null, $null, "input value`r`n")
- $actual.StandardOut | Assert-Equals -Expected $expected
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected $expected
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"CreateProcess with byte stdin" = {
$expected = "input value`r`n"
$actual = [Ansible.Process.ProcessUtil]::CreateProcess($null, 'powershell.exe [System.Console]::In.ReadToEnd()',
$null, $null, [System.Text.Encoding]::UTF8.GetBytes("input value"))
- $actual.StandardOut | Assert-Equals -Expected $expected
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected $expected
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"CreateProcess with byte stdin and newline" = {
$expected = "input value`r`n`r`n"
$actual = [Ansible.Process.ProcessUtil]::CreateProcess($null, 'powershell.exe [System.Console]::In.ReadToEnd()',
$null, $null, [System.Text.Encoding]::UTF8.GetBytes("input value`r`n"))
- $actual.StandardOut | Assert-Equals -Expected $expected
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected $expected
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"CreateProcess with lpApplicationName" = {
$expected = "abc`r`n"
$full_path = "$($env:SystemRoot)\System32\WindowsPowerShell\v1.0\powershell.exe"
$actual = [Ansible.Process.ProcessUtil]::CreateProcess($full_path, "Write-Output 'abc'", $null, $null)
- $actual.StandardOut | Assert-Equals -Expected $expected
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected $expected
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
$actual = [Ansible.Process.ProcessUtil]::CreateProcess($full_path, "powershell.exe Write-Output 'abc'", $null, $null)
- $actual.StandardOut | Assert-Equals -Expected $expected
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected $expected
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
"CreateProcess with unicode and us-ascii encoding" = {
- $poop = [System.Char]::ConvertFromUtf32(0xE05A) # Coverage breaks due to script parsing encoding issues with unicode chars, just use the code point instead
+ # Coverage breaks due to script parsing encoding issues with unicode chars, just use the code point instead
+ $poop = [System.Char]::ConvertFromUtf32(0xE05A)
$actual = [Ansible.Process.ProcessUtil]::CreateProcess($null, "cmd.exe /c echo $poop café", $null, $null, '', 'us-ascii')
- $actual.StandardOut | Assert-Equals -Expected "??? caf??`r`n"
- $actual.StandardError | Assert-Equals -Expected ""
- $actual.ExitCode | Assert-Equals -Expected 0
+ $actual.StandardOut | Assert-Equal -Expected "??? caf??`r`n"
+ $actual.StandardError | Assert-Equal -Expected ""
+ $actual.ExitCode | Assert-Equal -Expected 0
}
}
diff --git a/test/integration/targets/module_utils_Ansible.Service/library/ansible_service_tests.ps1 b/test/integration/targets/module_utils_Ansible.Service/library/ansible_service_tests.ps1
index 6c8f729b..dab42d4c 100644
--- a/test/integration/targets/module_utils_Ansible.Service/library/ansible_service_tests.ps1
+++ b/test/integration/targets/module_utils_Ansible.Service/library/ansible_service_tests.ps1
@@ -9,49 +9,52 @@ $module = [Ansible.Basic.AnsibleModule]::Create($args, @{})
$path = "$env:SystemRoot\System32\svchost.exe"
-Function Assert-Equals {
+Function Assert-Equal {
param(
- [Parameter(Mandatory=$true, ValueFromPipeline=$true)][AllowNull()]$Actual,
- [Parameter(Mandatory=$true, Position=0)][AllowNull()]$Expected
+ [Parameter(Mandatory = $true, ValueFromPipeline = $true)][AllowNull()]$Actual,
+ [Parameter(Mandatory = $true, Position = 0)][AllowNull()]$Expected
)
- $matched = $false
- if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array] -or $Actual -is [System.Collections.IList]) {
- $Actual.Count | Assert-Equals -Expected $Expected.Count
- for ($i = 0; $i -lt $Actual.Count; $i++) {
- $actualValue = $Actual[$i]
- $expectedValue = $Expected[$i]
- Assert-Equals -Actual $actualValue -Expected $expectedValue
+ process {
+ $matched = $false
+ if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array] -or $Actual -is [System.Collections.IList]) {
+ $Actual.Count | Assert-Equal -Expected $Expected.Count
+ for ($i = 0; $i -lt $Actual.Count; $i++) {
+ $actualValue = $Actual[$i]
+ $expectedValue = $Expected[$i]
+ Assert-Equal -Actual $actualValue -Expected $expectedValue
+ }
+ $matched = $true
}
- $matched = $true
- } else {
- $matched = $Actual -ceq $Expected
- }
-
- if (-not $matched) {
- if ($Actual -is [PSObject]) {
- $Actual = $Actual.ToString()
+ else {
+ $matched = $Actual -ceq $Expected
}
- $call_stack = (Get-PSCallStack)[1]
- $module.Result.test = $test
- $module.Result.actual = $Actual
- $module.Result.expected = $Expected
- $module.Result.line = $call_stack.ScriptLineNumber
- $module.Result.method = $call_stack.Position.Text
+ if (-not $matched) {
+ if ($Actual -is [PSObject]) {
+ $Actual = $Actual.ToString()
+ }
+
+ $call_stack = (Get-PSCallStack)[1]
+ $module.Result.test = $test
+ $module.Result.actual = $Actual
+ $module.Result.expected = $Expected
+ $module.Result.line = $call_stack.ScriptLineNumber
+ $module.Result.method = $call_stack.Position.Text
- $module.FailJson("AssertionError: actual != expected")
+ $module.FailJson("AssertionError: actual != expected")
+ }
}
}
Function Invoke-Sc {
[CmdletBinding()]
param (
- [Parameter(Mandatory=$true)]
+ [Parameter(Mandatory = $true)]
[String]
$Action,
- [Parameter(Mandatory=$true)]
+ [Parameter(Mandatory = $true)]
[String]
$Name,
@@ -66,7 +69,8 @@ Function Invoke-Sc {
$commandArgs.Add("$($arg.Key)=")
$commandArgs.Add($arg.Value)
}
- } else {
+ }
+ else {
foreach ($arg in $Arguments) {
$commandArgs.Add($arg)
}
@@ -118,7 +122,8 @@ Function Invoke-Sc {
}
$v = $lineSplit[1].Trim()
- } else {
+ }
+ else {
$k = $currentKey
$v = $line
}
@@ -126,18 +131,21 @@ Function Invoke-Sc {
if ($qtriggerSection.Count -gt 0) {
if ($k -eq 'DATA') {
$qtriggerSection.Data.Add($v)
- } else {
+ }
+ else {
$qtriggerSection.Type = $k
$qtriggerSection.SubType = $v
$qtriggerSection.Data = [System.Collections.Generic.List[String]]@()
}
- } else {
+ }
+ else {
if ($info.ContainsKey($k)) {
if ($info[$k] -isnot [System.Collections.Generic.List[String]]) {
$info[$k] = [System.Collections.Generic.List[String]]@($info[$k])
}
$info[$k].Add($v)
- } else {
+ }
+ else {
$currentKey = $k
$info[$k] = $v
}
@@ -155,42 +163,43 @@ $tests = [Ordered]@{
"Props on service created by New-Service" = {
$actual = New-Object -TypeName Ansible.Service.Service -ArgumentList $serviceName
- $actual.ServiceName | Assert-Equals -Expected $serviceName
- $actual.ServiceType | Assert-Equals -Expected ([Ansible.Service.ServiceType]::Win32OwnProcess)
- $actual.StartType | Assert-Equals -Expected ([Ansible.Service.ServiceStartType]::DemandStart)
- $actual.ErrorControl | Assert-Equals -Expected ([Ansible.Service.ErrorControl]::Normal)
- $actual.Path | Assert-Equals -Expected ('"{0}"' -f $path)
- $actual.LoadOrderGroup | Assert-Equals -Expected ""
- $actual.DependentOn.Count | Assert-Equals -Expected 0
- $actual.Account | Assert-Equals -Expected (
+ $actual.ServiceName | Assert-Equal -Expected $serviceName
+ $actual.ServiceType | Assert-Equal -Expected ([Ansible.Service.ServiceType]::Win32OwnProcess)
+ $actual.StartType | Assert-Equal -Expected ([Ansible.Service.ServiceStartType]::DemandStart)
+ $actual.ErrorControl | Assert-Equal -Expected ([Ansible.Service.ErrorControl]::Normal)
+ $actual.Path | Assert-Equal -Expected ('"{0}"' -f $path)
+ $actual.LoadOrderGroup | Assert-Equal -Expected ""
+ $actual.DependentOn.Count | Assert-Equal -Expected 0
+ $actual.Account | Assert-Equal -Expected (
[System.Security.Principal.SecurityIdentifier]'S-1-5-18').Translate([System.Security.Principal.NTAccount]
)
- $actual.DisplayName | Assert-Equals -Expected $serviceName
- $actual.Description | Assert-Equals -Expected $null
- $actual.FailureActions.ResetPeriod | Assert-Equals -Expected 0
- $actual.FailureActions.RebootMsg | Assert-Equals -Expected $null
- $actual.FailureActions.Command | Assert-Equals -Expected $null
- $actual.FailureActions.Actions.Count | Assert-Equals -Expected 0
- $actual.FailureActionsOnNonCrashFailures | Assert-Equals -Expected $false
- $actual.ServiceSidInfo | Assert-Equals -Expected ([Ansible.Service.ServiceSidInfo]::None)
- $actual.RequiredPrivileges.Count | Assert-Equals -Expected 0
+ $actual.DisplayName | Assert-Equal -Expected $serviceName
+ $actual.Description | Assert-Equal -Expected $null
+ $actual.FailureActions.ResetPeriod | Assert-Equal -Expected 0
+ $actual.FailureActions.RebootMsg | Assert-Equal -Expected $null
+ $actual.FailureActions.Command | Assert-Equal -Expected $null
+ $actual.FailureActions.Actions.Count | Assert-Equal -Expected 0
+ $actual.FailureActionsOnNonCrashFailures | Assert-Equal -Expected $false
+ $actual.ServiceSidInfo | Assert-Equal -Expected ([Ansible.Service.ServiceSidInfo]::None)
+ $actual.RequiredPrivileges.Count | Assert-Equal -Expected 0
# Cannot test default values as it differs per OS version
- $null -ne $actual.PreShutdownTimeout | Assert-Equals -Expected $true
- $actual.Triggers.Count | Assert-Equals -Expected 0
- $actual.PreferredNode | Assert-Equals -Expected $null
+ $null -ne $actual.PreShutdownTimeout | Assert-Equal -Expected $true
+ $actual.Triggers.Count | Assert-Equal -Expected 0
+ $actual.PreferredNode | Assert-Equal -Expected $null
if ([Environment]::OSVersion.Version -ge [Version]'6.3') {
- $actual.LaunchProtection | Assert-Equals -Expected ([Ansible.Service.LaunchProtection]::None)
- } else {
- $actual.LaunchProtection | Assert-Equals -Expected $null
+ $actual.LaunchProtection | Assert-Equal -Expected ([Ansible.Service.LaunchProtection]::None)
+ }
+ else {
+ $actual.LaunchProtection | Assert-Equal -Expected $null
}
- $actual.State | Assert-Equals -Expected ([Ansible.Service.ServiceStatus]::Stopped)
- $actual.Win32ExitCode | Assert-Equals -Expected 1077 # ERROR_SERVICE_NEVER_STARTED
- $actual.ServiceExitCode | Assert-Equals -Expected 0
- $actual.Checkpoint | Assert-Equals -Expected 0
- $actual.WaitHint | Assert-Equals -Expected 0
- $actual.ProcessId | Assert-Equals -Expected 0
- $actual.ServiceFlags | Assert-Equals -Expected ([Ansible.Service.ServiceFlags]::None)
- $actual.DependedBy.Count | Assert-Equals 0
+ $actual.State | Assert-Equal -Expected ([Ansible.Service.ServiceStatus]::Stopped)
+ $actual.Win32ExitCode | Assert-Equal -Expected 1077 # ERROR_SERVICE_NEVER_STARTED
+ $actual.ServiceExitCode | Assert-Equal -Expected 0
+ $actual.Checkpoint | Assert-Equal -Expected 0
+ $actual.WaitHint | Assert-Equal -Expected 0
+ $actual.ProcessId | Assert-Equal -Expected 0
+ $actual.ServiceFlags | Assert-Equal -Expected ([Ansible.Service.ServiceFlags]::None)
+ $actual.DependedBy.Count | Assert-Equal 0
}
"Service creation through util" = {
@@ -199,44 +208,46 @@ $tests = [Ordered]@{
try {
$cmdletService = Get-Service -Name $testName -ErrorAction SilentlyContinue
- $null -ne $cmdletService | Assert-Equals -Expected $true
-
- $actual.ServiceName | Assert-Equals -Expected $testName
- $actual.ServiceType | Assert-Equals -Expected ([Ansible.Service.ServiceType]::Win32OwnProcess)
- $actual.StartType | Assert-Equals -Expected ([Ansible.Service.ServiceStartType]::DemandStart)
- $actual.ErrorControl | Assert-Equals -Expected ([Ansible.Service.ErrorControl]::Normal)
- $actual.Path | Assert-Equals -Expected ('"{0}"' -f $path)
- $actual.LoadOrderGroup | Assert-Equals -Expected ""
- $actual.DependentOn.Count | Assert-Equals -Expected 0
- $actual.Account | Assert-Equals -Expected (
+ $null -ne $cmdletService | Assert-Equal -Expected $true
+
+ $actual.ServiceName | Assert-Equal -Expected $testName
+ $actual.ServiceType | Assert-Equal -Expected ([Ansible.Service.ServiceType]::Win32OwnProcess)
+ $actual.StartType | Assert-Equal -Expected ([Ansible.Service.ServiceStartType]::DemandStart)
+ $actual.ErrorControl | Assert-Equal -Expected ([Ansible.Service.ErrorControl]::Normal)
+ $actual.Path | Assert-Equal -Expected ('"{0}"' -f $path)
+ $actual.LoadOrderGroup | Assert-Equal -Expected ""
+ $actual.DependentOn.Count | Assert-Equal -Expected 0
+ $actual.Account | Assert-Equal -Expected (
[System.Security.Principal.SecurityIdentifier]'S-1-5-18').Translate([System.Security.Principal.NTAccount]
)
- $actual.DisplayName | Assert-Equals -Expected $testName
- $actual.Description | Assert-Equals -Expected $null
- $actual.FailureActions.ResetPeriod | Assert-Equals -Expected 0
- $actual.FailureActions.RebootMsg | Assert-Equals -Expected $null
- $actual.FailureActions.Command | Assert-Equals -Expected $null
- $actual.FailureActions.Actions.Count | Assert-Equals -Expected 0
- $actual.FailureActionsOnNonCrashFailures | Assert-Equals -Expected $false
- $actual.ServiceSidInfo | Assert-Equals -Expected ([Ansible.Service.ServiceSidInfo]::None)
- $actual.RequiredPrivileges.Count | Assert-Equals -Expected 0
- $null -ne $actual.PreShutdownTimeout | Assert-Equals -Expected $true
- $actual.Triggers.Count | Assert-Equals -Expected 0
- $actual.PreferredNode | Assert-Equals -Expected $null
+ $actual.DisplayName | Assert-Equal -Expected $testName
+ $actual.Description | Assert-Equal -Expected $null
+ $actual.FailureActions.ResetPeriod | Assert-Equal -Expected 0
+ $actual.FailureActions.RebootMsg | Assert-Equal -Expected $null
+ $actual.FailureActions.Command | Assert-Equal -Expected $null
+ $actual.FailureActions.Actions.Count | Assert-Equal -Expected 0
+ $actual.FailureActionsOnNonCrashFailures | Assert-Equal -Expected $false
+ $actual.ServiceSidInfo | Assert-Equal -Expected ([Ansible.Service.ServiceSidInfo]::None)
+ $actual.RequiredPrivileges.Count | Assert-Equal -Expected 0
+ $null -ne $actual.PreShutdownTimeout | Assert-Equal -Expected $true
+ $actual.Triggers.Count | Assert-Equal -Expected 0
+ $actual.PreferredNode | Assert-Equal -Expected $null
if ([Environment]::OSVersion.Version -ge [Version]'6.3') {
- $actual.LaunchProtection | Assert-Equals -Expected ([Ansible.Service.LaunchProtection]::None)
- } else {
- $actual.LaunchProtection | Assert-Equals -Expected $null
+ $actual.LaunchProtection | Assert-Equal -Expected ([Ansible.Service.LaunchProtection]::None)
+ }
+ else {
+ $actual.LaunchProtection | Assert-Equal -Expected $null
}
- $actual.State | Assert-Equals -Expected ([Ansible.Service.ServiceStatus]::Stopped)
- $actual.Win32ExitCode | Assert-Equals -Expected 1077 # ERROR_SERVICE_NEVER_STARTED
- $actual.ServiceExitCode | Assert-Equals -Expected 0
- $actual.Checkpoint | Assert-Equals -Expected 0
- $actual.WaitHint | Assert-Equals -Expected 0
- $actual.ProcessId | Assert-Equals -Expected 0
- $actual.ServiceFlags | Assert-Equals -Expected ([Ansible.Service.ServiceFlags]::None)
- $actual.DependedBy.Count | Assert-Equals 0
- } finally {
+ $actual.State | Assert-Equal -Expected ([Ansible.Service.ServiceStatus]::Stopped)
+ $actual.Win32ExitCode | Assert-Equal -Expected 1077 # ERROR_SERVICE_NEVER_STARTED
+ $actual.ServiceExitCode | Assert-Equal -Expected 0
+ $actual.Checkpoint | Assert-Equal -Expected 0
+ $actual.WaitHint | Assert-Equal -Expected 0
+ $actual.ProcessId | Assert-Equal -Expected 0
+ $actual.ServiceFlags | Assert-Equal -Expected ([Ansible.Service.ServiceFlags]::None)
+ $actual.DependedBy.Count | Assert-Equal 0
+ }
+ finally {
$actual.Delete()
}
}
@@ -245,13 +256,14 @@ $tests = [Ordered]@{
$failed = $false
try {
$null = New-Object -TypeName Ansible.Service.Service -ArgumentList 'fake_service'
- } catch [Ansible.Service.ServiceManagerException] {
+ }
+ catch [Ansible.Service.ServiceManagerException] {
# 1060 == ERROR_SERVICE_DOES_NOT_EXIST
- $_.Exception.Message -like '*Win32ErrorCode 1060 - 0x00000424*' | Assert-Equals -Expected $true
+ $_.Exception.Message -like '*Win32ErrorCode 1060 - 0x00000424*' | Assert-Equal -Expected $true
$failed = $true
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
"Open with specific access rights" = {
@@ -260,19 +272,20 @@ $tests = [Ordered]@{
)
# QueryStatus can get the status
- $service.State | Assert-Equals -Expected ([Ansible.Service.ServiceStatus]::Stopped)
+ $service.State | Assert-Equal -Expected ([Ansible.Service.ServiceStatus]::Stopped)
# Should fail to get the config because we did not request that right
$failed = $false
try {
$service.Path = 'fail'
- } catch [Ansible.Service.ServiceManagerException] {
+ }
+ catch [Ansible.Service.ServiceManagerException] {
# 5 == ERROR_ACCESS_DENIED
- $_.Exception.Message -like '*Win32ErrorCode 5 - 0x00000005*' | Assert-Equals -Expected $true
+ $_.Exception.Message -like '*Win32ErrorCode 5 - 0x00000005*' | Assert-Equal -Expected $true
$failed = $true
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
}
@@ -281,12 +294,12 @@ $tests = [Ordered]@{
$service.ServiceType = [Ansible.Service.ServiceType]::Win32ShareProcess
$actual = Invoke-Sc -Action qc -Name $serviceName
- $service.ServiceType | Assert-Equals -Expected ([Ansible.Service.ServiceType]::Win32ShareProcess)
- $actual.TYPE | Assert-Equals -Expected "20 WIN32_SHARE_PROCESS"
+ $service.ServiceType | Assert-Equal -Expected ([Ansible.Service.ServiceType]::Win32ShareProcess)
+ $actual.TYPE | Assert-Equal -Expected "20 WIN32_SHARE_PROCESS"
- $null = Invoke-Sc -Action config -Name $serviceName -Arguments @{type="own"}
+ $null = Invoke-Sc -Action config -Name $serviceName -Arguments @{type = "own" }
$service.Refresh()
- $service.ServiceType | Assert-Equals -Expected ([Ansible.Service.ServiceType]::Win32OwnProcess)
+ $service.ServiceType | Assert-Equal -Expected ([Ansible.Service.ServiceType]::Win32OwnProcess)
}
"Create desktop interactive service" = {
@@ -294,29 +307,30 @@ $tests = [Ordered]@{
$service.ServiceType = [Ansible.Service.ServiceType]'Win32OwnProcess, InteractiveProcess'
$actual = Invoke-Sc -Action qc -Name $serviceName
- $actual.TYPE | Assert-Equals -Expected "110 WIN32_OWN_PROCESS (interactive)"
- $service.ServiceType | Assert-Equals -Expected ([Ansible.Service.ServiceType]'Win32OwnProcess, InteractiveProcess')
+ $actual.TYPE | Assert-Equal -Expected "110 WIN32_OWN_PROCESS (interactive)"
+ $service.ServiceType | Assert-Equal -Expected ([Ansible.Service.ServiceType]'Win32OwnProcess, InteractiveProcess')
# Change back from interactive process
$service.ServiceType = [Ansible.Service.ServiceType]::Win32OwnProcess
$actual = Invoke-Sc -Action qc -Name $serviceName
- $actual.TYPE | Assert-Equals -Expected "10 WIN32_OWN_PROCESS"
- $service.ServiceType | Assert-Equals -Expected ([Ansible.Service.ServiceType]::Win32OwnProcess)
+ $actual.TYPE | Assert-Equal -Expected "10 WIN32_OWN_PROCESS"
+ $service.ServiceType | Assert-Equal -Expected ([Ansible.Service.ServiceType]::Win32OwnProcess)
$service.Account = [System.Security.Principal.SecurityIdentifier]'S-1-5-20'
$failed = $false
try {
$service.ServiceType = [Ansible.Service.ServiceType]'Win32OwnProcess, InteractiveProcess'
- } catch [Ansible.Service.ServiceManagerException] {
+ }
+ catch [Ansible.Service.ServiceManagerException] {
$failed = $true
- $_.Exception.NativeErrorCode | Assert-Equals -Expected 87 # ERROR_INVALID_PARAMETER
+ $_.Exception.NativeErrorCode | Assert-Equal -Expected 87 # ERROR_INVALID_PARAMETER
}
- $failed | Assert-Equals -Expected $true
+ $failed | Assert-Equal -Expected $true
$actual = Invoke-Sc -Action qc -Name $serviceName
- $actual.TYPE | Assert-Equals -Expected "10 WIN32_OWN_PROCESS"
+ $actual.TYPE | Assert-Equal -Expected "10 WIN32_OWN_PROCESS"
}
"Modify StartType" = {
@@ -324,12 +338,12 @@ $tests = [Ordered]@{
$service.StartType = [Ansible.Service.ServiceStartType]::Disabled
$actual = Invoke-Sc -Action qc -Name $serviceName
- $service.StartType | Assert-Equals -Expected ([Ansible.Service.ServiceStartType]::Disabled)
- $actual.START_TYPE | Assert-Equals -Expected "4 DISABLED"
+ $service.StartType | Assert-Equal -Expected ([Ansible.Service.ServiceStartType]::Disabled)
+ $actual.START_TYPE | Assert-Equal -Expected "4 DISABLED"
- $null = Invoke-Sc -Action config -Name $serviceName -Arguments @{start="demand"}
+ $null = Invoke-Sc -Action config -Name $serviceName -Arguments @{start = "demand" }
$service.Refresh()
- $service.StartType | Assert-Equals -Expected ([Ansible.Service.ServiceStartType]::DemandStart)
+ $service.StartType | Assert-Equal -Expected ([Ansible.Service.ServiceStartType]::DemandStart)
}
"Modify StartType auto delayed" = {
@@ -342,29 +356,29 @@ $tests = [Ordered]@{
$service.StartType = [Ansible.Service.ServiceStartType]::AutoStartDelayed
$actual = Invoke-Sc -Action qc -Name $serviceName
- $service.StartType | Assert-Equals -Expected ([Ansible.Service.ServiceStartType]::AutoStartDelayed)
- $actual.START_TYPE | Assert-Equals -Expected "2 AUTO_START (DELAYED)"
+ $service.StartType | Assert-Equal -Expected ([Ansible.Service.ServiceStartType]::AutoStartDelayed)
+ $actual.START_TYPE | Assert-Equal -Expected "2 AUTO_START (DELAYED)"
# Auto Start Delayed -> Auto Start
$service.StartType = [Ansible.Service.ServiceStartType]::AutoStart
$actual = Invoke-Sc -Action qc -Name $serviceName
- $service.StartType | Assert-Equals -Expected ([Ansible.Service.ServiceStartType]::AutoStart)
- $actual.START_TYPE | Assert-Equals -Expected "2 AUTO_START"
+ $service.StartType | Assert-Equal -Expected ([Ansible.Service.ServiceStartType]::AutoStart)
+ $actual.START_TYPE | Assert-Equal -Expected "2 AUTO_START"
# Auto Start -> Auto Start Delayed
$service.StartType = [Ansible.Service.ServiceStartType]::AutoStartDelayed
$actual = Invoke-Sc -Action qc -Name $serviceName
- $service.StartType | Assert-Equals -Expected ([Ansible.Service.ServiceStartType]::AutoStartDelayed)
- $actual.START_TYPE | Assert-Equals -Expected "2 AUTO_START (DELAYED)"
+ $service.StartType | Assert-Equal -Expected ([Ansible.Service.ServiceStartType]::AutoStartDelayed)
+ $actual.START_TYPE | Assert-Equal -Expected "2 AUTO_START (DELAYED)"
# Auto Start Delayed -> Manual
$service.StartType = [Ansible.Service.ServiceStartType]::DemandStart
$actual = Invoke-Sc -Action qc -Name $serviceName
- $service.StartType | Assert-Equals -Expected ([Ansible.Service.ServiceStartType]::DemandStart)
- $actual.START_TYPE | Assert-Equals -Expected "3 DEMAND_START"
+ $service.StartType | Assert-Equal -Expected ([Ansible.Service.ServiceStartType]::DemandStart)
+ $actual.START_TYPE | Assert-Equal -Expected "3 DEMAND_START"
}
"Modify ErrorControl" = {
@@ -372,12 +386,12 @@ $tests = [Ordered]@{
$service.ErrorControl = [Ansible.Service.ErrorControl]::Severe
$actual = Invoke-Sc -Action qc -Name $serviceName
- $service.ErrorControl | Assert-Equals -Expected ([Ansible.Service.ErrorControl]::Severe)
- $actual.ERROR_CONTROL | Assert-Equals -Expected "2 SEVERE"
+ $service.ErrorControl | Assert-Equal -Expected ([Ansible.Service.ErrorControl]::Severe)
+ $actual.ERROR_CONTROL | Assert-Equal -Expected "2 SEVERE"
- $null = Invoke-Sc -Action config -Name $serviceName -Arguments @{error="ignore"}
+ $null = Invoke-Sc -Action config -Name $serviceName -Arguments @{error = "ignore" }
$service.Refresh()
- $service.ErrorControl | Assert-Equals -Expected ([Ansible.Service.ErrorControl]::Ignore)
+ $service.ErrorControl | Assert-Equal -Expected ([Ansible.Service.ErrorControl]::Ignore)
}
"Modify Path" = {
@@ -385,12 +399,12 @@ $tests = [Ordered]@{
$service.Path = "Fake path"
$actual = Invoke-Sc -Action qc -Name $serviceName
- $service.Path | Assert-Equals -Expected "Fake path"
- $actual.BINARY_PATH_NAME | Assert-Equals -Expected "Fake path"
+ $service.Path | Assert-Equal -Expected "Fake path"
+ $actual.BINARY_PATH_NAME | Assert-Equal -Expected "Fake path"
- $null = Invoke-Sc -Action config -Name $serviceName -Arguments @{binpath="other fake path"}
+ $null = Invoke-Sc -Action config -Name $serviceName -Arguments @{binpath = "other fake path" }
$service.Refresh()
- $service.Path | Assert-Equals -Expected "other fake path"
+ $service.Path | Assert-Equal -Expected "other fake path"
}
"Modify LoadOrderGroup" = {
@@ -398,12 +412,12 @@ $tests = [Ordered]@{
$service.LoadOrderGroup = "my group"
$actual = Invoke-Sc -Action qc -Name $serviceName
- $service.LoadOrderGroup | Assert-Equals -Expected "my group"
- $actual.LOAD_ORDER_GROUP | Assert-Equals -Expected "my group"
+ $service.LoadOrderGroup | Assert-Equal -Expected "my group"
+ $actual.LOAD_ORDER_GROUP | Assert-Equal -Expected "my group"
- $null = Invoke-Sc -Action config -Name $serviceName -Arguments @{group=""}
+ $null = Invoke-Sc -Action config -Name $serviceName -Arguments @{group = "" }
$service.Refresh()
- $service.LoadOrderGroup | Assert-Equals -Expected ""
+ $service.LoadOrderGroup | Assert-Equal -Expected ""
}
"Modify DependentOn" = {
@@ -411,17 +425,17 @@ $tests = [Ordered]@{
$service.DependentOn = @("HTTP", "WinRM")
$actual = Invoke-Sc -Action qc -Name $serviceName
- @(,$service.DependentOn) | Assert-Equals -Expected @("HTTP", "WinRM")
- @(,$actual.DEPENDENCIES) | Assert-Equals -Expected @("HTTP", "WinRM")
+ @(, $service.DependentOn) | Assert-Equal -Expected @("HTTP", "WinRM")
+ @(, $actual.DEPENDENCIES) | Assert-Equal -Expected @("HTTP", "WinRM")
- $null = Invoke-Sc -Action config -Name $serviceName -Arguments @{depend=""}
+ $null = Invoke-Sc -Action config -Name $serviceName -Arguments @{depend = "" }
$service.Refresh()
- $service.DependentOn.Count | Assert-Equals -Expected 0
+ $service.DependentOn.Count | Assert-Equal -Expected 0
}
"Modify Account - service account" = {
$systemSid = [System.Security.Principal.SecurityIdentifier]'S-1-5-18'
- $systemName =$systemSid.Translate([System.Security.Principal.NTAccount])
+ $systemName = $systemSid.Translate([System.Security.Principal.NTAccount])
$localSid = [System.Security.Principal.SecurityIdentifier]'S-1-5-19'
$localName = $localSid.Translate([System.Security.Principal.NTAccount])
$networkSid = [System.Security.Principal.SecurityIdentifier]'S-1-5-20'
@@ -431,17 +445,17 @@ $tests = [Ordered]@{
$service.Account = $networkSid
$actual = Invoke-Sc -Action qc -Name $serviceName
- $service.Account | Assert-Equals -Expected $networkName
- $actual.SERVICE_START_NAME | Assert-Equals -Expected $networkName.Value
+ $service.Account | Assert-Equal -Expected $networkName
+ $actual.SERVICE_START_NAME | Assert-Equal -Expected $networkName.Value
- $null = Invoke-Sc -Action config -Name $serviceName -Arguments @{obj=$localName.Value}
+ $null = Invoke-Sc -Action config -Name $serviceName -Arguments @{obj = $localName.Value }
$service.Refresh()
- $service.Account | Assert-Equals -Expected $localName
+ $service.Account | Assert-Equal -Expected $localName
$service.Account = $systemSid
$actual = Invoke-Sc -Action qc -Name $serviceName
- $service.Account | Assert-Equals -Expected $systemName
- $actual.SERVICE_START_NAME | Assert-Equals -Expected "LocalSystem"
+ $service.Account | Assert-Equal -Expected $systemName
+ $actual.SERVICE_START_NAME | Assert-Equal -Expected "LocalSystem"
}
"Modify Account - user" = {
@@ -459,19 +473,20 @@ $tests = [Ordered]@{
$actualSid = ([System.Security.Principal.NTAccount]"$env:COMPUTERNAME\$username").Translate(
[System.Security.Principal.SecurityIdentifier]
)
- } else {
+ }
+ else {
$actualSid = $service.Account.Translate([System.Security.Principal.SecurityIdentifier])
}
- $actualSid.Value | Assert-Equals -Expected $currentSid.Value
- $actual.SERVICE_START_NAME | Assert-Equals -Expected $service.Account.Value
+ $actualSid.Value | Assert-Equal -Expected $currentSid.Value
+ $actual.SERVICE_START_NAME | Assert-Equal -Expected $service.Account.Value
# Go back to SYSTEM from account
$systemSid = [System.Security.Principal.SecurityIdentifier]'S-1-5-18'
$service.Account = $systemSid
$actual = Invoke-Sc -Action qc -Name $serviceName
- $service.Account | Assert-Equals -Expected $systemSid.Translate([System.Security.Principal.NTAccount])
- $actual.SERVICE_START_NAME | Assert-Equals -Expected "LocalSystem"
+ $service.Account | Assert-Equal -Expected $systemSid.Translate([System.Security.Principal.NTAccount])
+ $actual.SERVICE_START_NAME | Assert-Equal -Expected "LocalSystem"
}
"Modify Account - virtual account" = {
@@ -481,8 +496,8 @@ $tests = [Ordered]@{
$service.Account = $account
$actual = Invoke-Sc -Action qc -Name $serviceName
- $service.Account | Assert-Equals -Expected $account
- $actual.SERVICE_START_NAME | Assert-Equals -Expected $account.Value
+ $service.Account | Assert-Equal -Expected $account
+ $actual.SERVICE_START_NAME | Assert-Equal -Expected $account.Value
}
"Modify Account - gMSA" = {
@@ -497,8 +512,8 @@ $tests = [Ordered]@{
$service.Account = $gmsaName
$actual = Invoke-Sc -Action qc -Name $serviceName
- $service.Account | Assert-Equals -Expected $gmsaName
- $actual.SERVICE_START_NAME | Assert-Equals -Expected $gmsaName
+ $service.Account | Assert-Equal -Expected $gmsaName
+ $actual.SERVICE_START_NAME | Assert-Equal -Expected $gmsaName
# Go from gMSA to account and back to verify the Password doesn't matter.
$currentUser = [System.Security.Principal.WindowsIdentity]::GetCurrent().User
@@ -510,8 +525,8 @@ $tests = [Ordered]@{
$service.Account = $gmsaSid
$actual = Invoke-Sc -Action qc -Name $serviceName
- $service.Account | Assert-Equals -Expected $gmsaNetlogon
- $actual.SERVICE_START_NAME | Assert-Equals -Expected $gmsaNetlogon.Value
+ $service.Account | Assert-Equal -Expected $gmsaNetlogon
+ $actual.SERVICE_START_NAME | Assert-Equal -Expected $gmsaNetlogon.Value
}
"Modify DisplayName" = {
@@ -519,12 +534,12 @@ $tests = [Ordered]@{
$service.DisplayName = "Custom Service Name"
$actual = Invoke-Sc -Action qc -Name $serviceName
- $service.DisplayName | Assert-Equals -Expected "Custom Service Name"
- $actual.DISPLAY_NAME | Assert-Equals -Expected "Custom Service Name"
+ $service.DisplayName | Assert-Equal -Expected "Custom Service Name"
+ $actual.DISPLAY_NAME | Assert-Equal -Expected "Custom Service Name"
- $null = Invoke-Sc -Action config -Name $serviceName -Arguments @{displayname="New Service Name"}
+ $null = Invoke-Sc -Action config -Name $serviceName -Arguments @{displayname = "New Service Name" }
$service.Refresh()
- $service.DisplayName | Assert-Equals -Expected "New Service Name"
+ $service.DisplayName | Assert-Equal -Expected "New Service Name"
}
"Modify Description" = {
@@ -532,17 +547,17 @@ $tests = [Ordered]@{
$service.Description = "My custom service description"
$actual = Invoke-Sc -Action qdescription -Name $serviceName
- $service.Description | Assert-Equals -Expected "My custom service description"
- $actual.DESCRIPTION | Assert-Equals -Expected "My custom service description"
+ $service.Description | Assert-Equal -Expected "My custom service description"
+ $actual.DESCRIPTION | Assert-Equal -Expected "My custom service description"
- $null = Invoke-Sc -Action description -Name $serviceName -Arguments @(,"new description")
- $service.Description | Assert-Equals -Expected "new description"
+ $null = Invoke-Sc -Action description -Name $serviceName -Arguments @(, "new description")
+ $service.Description | Assert-Equal -Expected "new description"
$service.Description = $null
$actual = Invoke-Sc -Action qdescription -Name $serviceName
- $service.Description | Assert-Equals -Expected $null
- $actual.DESCRIPTION | Assert-Equals -Expected ""
+ $service.Description | Assert-Equal -Expected $null
+ $actual.DESCRIPTION | Assert-Equal -Expected ""
}
"Modify FailureActions" = {
@@ -551,93 +566,93 @@ $tests = [Ordered]@{
RebootMsg = 'Reboot msg'
Command = 'Command line'
Actions = @(
- [Ansible.Service.Action]@{Type = [Ansible.Service.FailureAction]::RunCommand; Delay = 1000},
- [Ansible.Service.Action]@{Type = [Ansible.Service.FailureAction]::RunCommand; Delay = 2000},
- [Ansible.Service.Action]@{Type = [Ansible.Service.FailureAction]::Restart; Delay = 1000},
- [Ansible.Service.Action]@{Type = [Ansible.Service.FailureAction]::Reboot; Delay = 1000}
+ [Ansible.Service.Action]@{Type = [Ansible.Service.FailureAction]::RunCommand; Delay = 1000 },
+ [Ansible.Service.Action]@{Type = [Ansible.Service.FailureAction]::RunCommand; Delay = 2000 },
+ [Ansible.Service.Action]@{Type = [Ansible.Service.FailureAction]::Restart; Delay = 1000 },
+ [Ansible.Service.Action]@{Type = [Ansible.Service.FailureAction]::Reboot; Delay = 1000 }
)
}
$service = New-Object -TypeName Ansible.Service.Service -ArgumentList $serviceName
$service.FailureActions = $newAction
$actual = Invoke-Sc -Action qfailure -Name $serviceName
- $actual.'RESET_PERIOD (in seconds)' | Assert-Equals -Expected 86400
- $actual.REBOOT_MESSAGE | Assert-Equals -Expected 'Reboot msg'
- $actual.COMMAND_LINE | Assert-Equals -Expected 'Command line'
- $actual.FAILURE_ACTIONS.Count | Assert-Equals -Expected 4
- $actual.FAILURE_ACTIONS[0] | Assert-Equals -Expected "RUN PROCESS -- Delay = 1000 milliseconds."
- $actual.FAILURE_ACTIONS[1] | Assert-Equals -Expected "RUN PROCESS -- Delay = 2000 milliseconds."
- $actual.FAILURE_ACTIONS[2] | Assert-Equals -Expected "RESTART -- Delay = 1000 milliseconds."
- $actual.FAILURE_ACTIONS[3] | Assert-Equals -Expected "REBOOT -- Delay = 1000 milliseconds."
- $service.FailureActions.Actions.Count | Assert-Equals -Expected 4
+ $actual.'RESET_PERIOD (in seconds)' | Assert-Equal -Expected 86400
+ $actual.REBOOT_MESSAGE | Assert-Equal -Expected 'Reboot msg'
+ $actual.COMMAND_LINE | Assert-Equal -Expected 'Command line'
+ $actual.FAILURE_ACTIONS.Count | Assert-Equal -Expected 4
+ $actual.FAILURE_ACTIONS[0] | Assert-Equal -Expected "RUN PROCESS -- Delay = 1000 milliseconds."
+ $actual.FAILURE_ACTIONS[1] | Assert-Equal -Expected "RUN PROCESS -- Delay = 2000 milliseconds."
+ $actual.FAILURE_ACTIONS[2] | Assert-Equal -Expected "RESTART -- Delay = 1000 milliseconds."
+ $actual.FAILURE_ACTIONS[3] | Assert-Equal -Expected "REBOOT -- Delay = 1000 milliseconds."
+ $service.FailureActions.Actions.Count | Assert-Equal -Expected 4
# Test that we can change individual settings and it doesn't change all
- $service.FailureActions = [Ansible.Service.FailureActions]@{ResetPeriod = 172800}
+ $service.FailureActions = [Ansible.Service.FailureActions]@{ResetPeriod = 172800 }
$actual = Invoke-Sc -Action qfailure -Name $serviceName
- $actual.'RESET_PERIOD (in seconds)' | Assert-Equals -Expected 172800
- $actual.REBOOT_MESSAGE | Assert-Equals -Expected 'Reboot msg'
- $actual.COMMAND_LINE | Assert-Equals -Expected 'Command line'
- $actual.FAILURE_ACTIONS.Count | Assert-Equals -Expected 4
- $service.FailureActions.Actions.Count | Assert-Equals -Expected 4
+ $actual.'RESET_PERIOD (in seconds)' | Assert-Equal -Expected 172800
+ $actual.REBOOT_MESSAGE | Assert-Equal -Expected 'Reboot msg'
+ $actual.COMMAND_LINE | Assert-Equal -Expected 'Command line'
+ $actual.FAILURE_ACTIONS.Count | Assert-Equal -Expected 4
+ $service.FailureActions.Actions.Count | Assert-Equal -Expected 4
- $service.FailureActions = [Ansible.Service.FailureActions]@{RebootMsg = "New reboot msg"}
+ $service.FailureActions = [Ansible.Service.FailureActions]@{RebootMsg = "New reboot msg" }
$actual = Invoke-Sc -Action qfailure -Name $serviceName
- $actual.'RESET_PERIOD (in seconds)' | Assert-Equals -Expected 172800
- $actual.REBOOT_MESSAGE | Assert-Equals -Expected 'New reboot msg'
- $actual.COMMAND_LINE | Assert-Equals -Expected 'Command line'
- $actual.FAILURE_ACTIONS.Count | Assert-Equals -Expected 4
- $service.FailureActions.Actions.Count | Assert-Equals -Expected 4
+ $actual.'RESET_PERIOD (in seconds)' | Assert-Equal -Expected 172800
+ $actual.REBOOT_MESSAGE | Assert-Equal -Expected 'New reboot msg'
+ $actual.COMMAND_LINE | Assert-Equal -Expected 'Command line'
+ $actual.FAILURE_ACTIONS.Count | Assert-Equal -Expected 4
+ $service.FailureActions.Actions.Count | Assert-Equal -Expected 4
- $service.FailureActions = [Ansible.Service.FailureActions]@{Command = "New command line"}
+ $service.FailureActions = [Ansible.Service.FailureActions]@{Command = "New command line" }
$actual = Invoke-Sc -Action qfailure -Name $serviceName
- $actual.'RESET_PERIOD (in seconds)' | Assert-Equals -Expected 172800
- $actual.REBOOT_MESSAGE | Assert-Equals -Expected 'New reboot msg'
- $actual.COMMAND_LINE | Assert-Equals -Expected 'New command line'
- $actual.FAILURE_ACTIONS.Count | Assert-Equals -Expected 4
- $service.FailureActions.Actions.Count | Assert-Equals -Expected 4
+ $actual.'RESET_PERIOD (in seconds)' | Assert-Equal -Expected 172800
+ $actual.REBOOT_MESSAGE | Assert-Equal -Expected 'New reboot msg'
+ $actual.COMMAND_LINE | Assert-Equal -Expected 'New command line'
+ $actual.FAILURE_ACTIONS.Count | Assert-Equal -Expected 4
+ $service.FailureActions.Actions.Count | Assert-Equal -Expected 4
# Test setting both ResetPeriod and Actions together
$service.FailureActions = [Ansible.Service.FailureActions]@{
ResetPeriod = 86400
Actions = @(
- [Ansible.Service.Action]@{Type = [Ansible.Service.FailureAction]::RunCommand; Delay = 5000},
- [Ansible.Service.Action]@{Type = [Ansible.Service.FailureAction]::None; Delay = 0}
+ [Ansible.Service.Action]@{Type = [Ansible.Service.FailureAction]::RunCommand; Delay = 5000 },
+ [Ansible.Service.Action]@{Type = [Ansible.Service.FailureAction]::None; Delay = 0 }
)
}
$actual = Invoke-Sc -Action qfailure -Name $serviceName
- $actual.'RESET_PERIOD (in seconds)' | Assert-Equals -Expected 86400
- $actual.REBOOT_MESSAGE | Assert-Equals -Expected 'New reboot msg'
- $actual.COMMAND_LINE | Assert-Equals -Expected 'New command line'
+ $actual.'RESET_PERIOD (in seconds)' | Assert-Equal -Expected 86400
+ $actual.REBOOT_MESSAGE | Assert-Equal -Expected 'New reboot msg'
+ $actual.COMMAND_LINE | Assert-Equal -Expected 'New command line'
# sc.exe does not show the None action it just ends the list, so we verify from get_FailureActions
- $actual.FAILURE_ACTIONS | Assert-Equals -Expected "RUN PROCESS -- Delay = 5000 milliseconds."
- $service.FailureActions.Actions.Count | Assert-Equals -Expected 2
- $service.FailureActions.Actions[1].Type | Assert-Equals -Expected ([Ansible.Service.FailureAction]::None)
+ $actual.FAILURE_ACTIONS | Assert-Equal -Expected "RUN PROCESS -- Delay = 5000 milliseconds."
+ $service.FailureActions.Actions.Count | Assert-Equal -Expected 2
+ $service.FailureActions.Actions[1].Type | Assert-Equal -Expected ([Ansible.Service.FailureAction]::None)
# Test setting just Actions without ResetPeriod
$service.FailureActions = [Ansible.Service.FailureActions]@{
- Actions = [Ansible.Service.Action]@{Type = [Ansible.Service.FailureAction]::RunCommand; Delay = 10000}
+ Actions = [Ansible.Service.Action]@{Type = [Ansible.Service.FailureAction]::RunCommand; Delay = 10000 }
}
$actual = Invoke-Sc -Action qfailure -Name $serviceName
- $actual.'RESET_PERIOD (in seconds)' | Assert-Equals -Expected 86400
- $actual.REBOOT_MESSAGE | Assert-Equals -Expected 'New reboot msg'
- $actual.COMMAND_LINE | Assert-Equals -Expected 'New command line'
- $actual.FAILURE_ACTIONS | Assert-Equals -Expected "RUN PROCESS -- Delay = 10000 milliseconds."
- $service.FailureActions.Actions.Count | Assert-Equals -Expected 1
+ $actual.'RESET_PERIOD (in seconds)' | Assert-Equal -Expected 86400
+ $actual.REBOOT_MESSAGE | Assert-Equal -Expected 'New reboot msg'
+ $actual.COMMAND_LINE | Assert-Equal -Expected 'New command line'
+ $actual.FAILURE_ACTIONS | Assert-Equal -Expected "RUN PROCESS -- Delay = 10000 milliseconds."
+ $service.FailureActions.Actions.Count | Assert-Equal -Expected 1
# Test removing all actions
$service.FailureActions = [Ansible.Service.FailureActions]@{
Actions = @()
}
$actual = Invoke-Sc -Action qfailure -Name $serviceName
- $actual.'RESET_PERIOD (in seconds)' | Assert-Equals -Expected 0 # ChangeServiceConfig2W resets this back to 0.
- $actual.REBOOT_MESSAGE | Assert-Equals -Expected 'New reboot msg'
- $actual.COMMAND_LINE | Assert-Equals -Expected 'New command line'
- $actual.PSObject.Properties.Name.Contains('FAILURE_ACTIONS') | Assert-Equals -Expected $false
- $service.FailureActions.Actions.Count | Assert-Equals -Expected 0
+ $actual.'RESET_PERIOD (in seconds)' | Assert-Equal -Expected 0 # ChangeServiceConfig2W resets this back to 0.
+ $actual.REBOOT_MESSAGE | Assert-Equal -Expected 'New reboot msg'
+ $actual.COMMAND_LINE | Assert-Equal -Expected 'New command line'
+ $actual.PSObject.Properties.Name.Contains('FAILURE_ACTIONS') | Assert-Equal -Expected $false
+ $service.FailureActions.Actions.Count | Assert-Equal -Expected 0
# Test that we are reading the right values
$null = Invoke-Sc -Action failure -Name $serviceName -Arguments @{
@@ -648,14 +663,14 @@ $tests = [Ordered]@{
}
$actual = $service.FailureActions
- $actual.ResetPeriod | Assert-Equals -Expected 172800
- $actual.RebootMsg | Assert-Equals -Expected "sc reboot msg"
- $actual.Command | Assert-Equals -Expected "sc command line"
- $actual.Actions.Count | Assert-Equals -Expected 2
- $actual.Actions[0].Type | Assert-Equals -Expected ([Ansible.Service.FailureAction]::RunCommand)
- $actual.Actions[0].Delay | Assert-Equals -Expected 5000
- $actual.Actions[1].Type | Assert-Equals -Expected ([Ansible.Service.FailureAction]::Reboot)
- $actual.Actions[1].Delay | Assert-Equals -Expected 800
+ $actual.ResetPeriod | Assert-Equal -Expected 172800
+ $actual.RebootMsg | Assert-Equal -Expected "sc reboot msg"
+ $actual.Command | Assert-Equal -Expected "sc command line"
+ $actual.Actions.Count | Assert-Equal -Expected 2
+ $actual.Actions[0].Type | Assert-Equal -Expected ([Ansible.Service.FailureAction]::RunCommand)
+ $actual.Actions[0].Delay | Assert-Equal -Expected 5000
+ $actual.Actions[1].Type | Assert-Equal -Expected ([Ansible.Service.FailureAction]::Reboot)
+ $actual.Actions[1].Delay | Assert-Equal -Expected 800
}
"Modify FailureActionsOnNonCrashFailures" = {
@@ -663,11 +678,11 @@ $tests = [Ordered]@{
$service.FailureActionsOnNonCrashFailures = $true
$actual = Invoke-Sc -Action qfailureflag -Name $serviceName
- $service.FailureActionsOnNonCrashFailures | Assert-Equals -Expected $true
- $actual.FAILURE_ACTIONS_ON_NONCRASH_FAILURES | Assert-Equals -Expected "TRUE"
+ $service.FailureActionsOnNonCrashFailures | Assert-Equal -Expected $true
+ $actual.FAILURE_ACTIONS_ON_NONCRASH_FAILURES | Assert-Equal -Expected "TRUE"
- $null = Invoke-Sc -Action failureflag -Name $serviceName -Arguments @(,0)
- $service.FailureActionsOnNonCrashFailures | Assert-Equals -Expected $false
+ $null = Invoke-Sc -Action failureflag -Name $serviceName -Arguments @(, 0)
+ $service.FailureActionsOnNonCrashFailures | Assert-Equal -Expected $false
}
"Modify ServiceSidInfo" = {
@@ -675,17 +690,17 @@ $tests = [Ordered]@{
$service.ServiceSidInfo = [Ansible.Service.ServiceSidInfo]::None
$actual = Invoke-Sc -Action qsidtype -Name $serviceName
- $service.ServiceSidInfo | Assert-Equals -Expected ([Ansible.Service.ServiceSidInfo]::None)
- $actual.SERVICE_SID_TYPE | Assert-Equals -Expected 'NONE'
+ $service.ServiceSidInfo | Assert-Equal -Expected ([Ansible.Service.ServiceSidInfo]::None)
+ $actual.SERVICE_SID_TYPE | Assert-Equal -Expected 'NONE'
- $null = Invoke-Sc -Action sidtype -Name $serviceName -Arguments @(,'unrestricted')
- $service.ServiceSidInfo | Assert-Equals -Expected ([Ansible.Service.ServiceSidInfo]::Unrestricted)
+ $null = Invoke-Sc -Action sidtype -Name $serviceName -Arguments @(, 'unrestricted')
+ $service.ServiceSidInfo | Assert-Equal -Expected ([Ansible.Service.ServiceSidInfo]::Unrestricted)
$service.ServiceSidInfo = [Ansible.Service.ServiceSidInfo]::Restricted
$actual = Invoke-Sc -Action qsidtype -Name $serviceName
- $service.ServiceSidInfo | Assert-Equals -Expected ([Ansible.Service.ServiceSidInfo]::Restricted)
- $actual.SERVICE_SID_TYPE | Assert-Equals -Expected 'RESTRICTED'
+ $service.ServiceSidInfo | Assert-Equal -Expected ([Ansible.Service.ServiceSidInfo]::Restricted)
+ $actual.SERVICE_SID_TYPE | Assert-Equal -Expected 'RESTRICTED'
}
"Modify RequiredPrivileges" = {
@@ -693,25 +708,25 @@ $tests = [Ordered]@{
$service.RequiredPrivileges = @("SeBackupPrivilege", "SeTcbPrivilege")
$actual = Invoke-Sc -Action qprivs -Name $serviceName
- ,$service.RequiredPrivileges | Assert-Equals -Expected @("SeBackupPrivilege", "SeTcbPrivilege")
- ,$actual.PRIVILEGES | Assert-Equals -Expected @("SeBackupPrivilege", "SeTcbPrivilege")
+ , $service.RequiredPrivileges | Assert-Equal -Expected @("SeBackupPrivilege", "SeTcbPrivilege")
+ , $actual.PRIVILEGES | Assert-Equal -Expected @("SeBackupPrivilege", "SeTcbPrivilege")
# Ensure setting to $null is the same as an empty array
$service.RequiredPrivileges = $null
$actual = Invoke-Sc -Action qprivs -Name $serviceName
- ,$service.RequiredPrivileges | Assert-Equals -Expected @()
- ,$actual.PRIVILEGES | Assert-Equals -Expected @()
+ , $service.RequiredPrivileges | Assert-Equal -Expected @()
+ , $actual.PRIVILEGES | Assert-Equal -Expected @()
$service.RequiredPrivileges = @("SeBackupPrivilege", "SeTcbPrivilege")
$service.RequiredPrivileges = @()
$actual = Invoke-Sc -Action qprivs -Name $serviceName
- ,$service.RequiredPrivileges | Assert-Equals -Expected @()
- ,$actual.PRIVILEGES | Assert-Equals -Expected @()
+ , $service.RequiredPrivileges | Assert-Equal -Expected @()
+ , $actual.PRIVILEGES | Assert-Equal -Expected @()
- $null = Invoke-Sc -Action privs -Name $serviceName -Arguments @(,"SeCreateTokenPrivilege/SeRestorePrivilege")
- ,$service.RequiredPrivileges | Assert-Equals -Expected @("SeCreateTokenPrivilege", "SeRestorePrivilege")
+ $null = Invoke-Sc -Action privs -Name $serviceName -Arguments @(, "SeCreateTokenPrivilege/SeRestorePrivilege")
+ , $service.RequiredPrivileges | Assert-Equal -Expected @("SeCreateTokenPrivilege", "SeRestorePrivilege")
}
"Modify PreShutdownTimeout" = {
@@ -722,7 +737,7 @@ $tests = [Ordered]@{
$actual = (
Get-ItemProperty -LiteralPath "HKLM:\SYSTEM\CurrentControlSet\Services\$serviceName" -Name PreshutdownTimeout
).PreshutdownTimeout
- $actual | Assert-Equals -Expected 60000
+ $actual | Assert-Equal -Expected 60000
}
"Modify Triggers" = {
@@ -794,49 +809,49 @@ $tests = [Ordered]@{
$actual = Invoke-Sc -Action qtriggerinfo -Name $serviceName
- $actual.Triggers.Count | Assert-Equals -Expected 6
- $actual.Triggers[0].Type | Assert-Equals -Expected 'DOMAIN JOINED STATUS'
- $actual.Triggers[0].Action | Assert-Equals -Expected 'STOP SERVICE'
- $actual.Triggers[0].SubType | Assert-Equals -Expected "$([Ansible.Service.Trigger]::DOMAIN_JOIN_GUID) [DOMAIN JOINED]"
- $actual.Triggers[0].Data.Count | Assert-Equals -Expected 0
-
- $actual.Triggers[1].Type | Assert-Equals -Expected 'NETWORK EVENT'
- $actual.Triggers[1].Action | Assert-Equals -Expected 'START SERVICE'
- $actual.Triggers[1].SubType | Assert-Equals -Expected "$([Ansible.Service.Trigger]::NAMED_PIPE_EVENT_GUID) [NAMED PIPE EVENT]"
- $actual.Triggers[1].Data.Count | Assert-Equals -Expected 1
- $actual.Triggers[1].Data[0] | Assert-Equals -Expected 'my named pipe'
-
- $actual.Triggers[2].Type | Assert-Equals -Expected 'NETWORK EVENT'
- $actual.Triggers[2].Action | Assert-Equals -Expected 'START SERVICE'
- $actual.Triggers[2].SubType | Assert-Equals -Expected "$([Ansible.Service.Trigger]::NAMED_PIPE_EVENT_GUID) [NAMED PIPE EVENT]"
- $actual.Triggers[2].Data.Count | Assert-Equals -Expected 1
- $actual.Triggers[2].Data[0] | Assert-Equals -Expected 'my named pipe 2'
-
- $actual.Triggers[3].Type | Assert-Equals -Expected 'CUSTOM'
- $actual.Triggers[3].Action | Assert-Equals -Expected 'START SERVICE'
- $actual.Triggers[3].SubType | Assert-Equals -Expected '9bf04e57-05dc-4914-9ed9-84bf992db88c [ETW PROVIDER UUID]'
- $actual.Triggers[3].Data.Count | Assert-Equals -Expected 2
- $actual.Triggers[3].Data[0] | Assert-Equals -Expected '01 02 03 04'
- $actual.Triggers[3].Data[1] | Assert-Equals -Expected '05 06 07 08 09'
-
- $actual.Triggers[4].Type | Assert-Equals -Expected 'CUSTOM'
- $actual.Triggers[4].Action | Assert-Equals -Expected 'START SERVICE'
- $actual.Triggers[4].SubType | Assert-Equals -Expected '9fbcfc7e-7581-4d46-913b-53bb15c80c51 [ETW PROVIDER UUID]'
- $actual.Triggers[4].Data.Count | Assert-Equals -Expected 2
- $actual.Triggers[4].Data[0] | Assert-Equals -Expected "entry 1"
- $actual.Triggers[4].Data[1] | Assert-Equals -Expected "entry 2"
-
- $actual.Triggers[5].Type | Assert-Equals -Expected 'FIREWALL PORT EVENT'
- $actual.Triggers[5].Action | Assert-Equals -Expected 'STOP SERVICE'
- $actual.Triggers[5].SubType | Assert-Equals -Expected "$([Ansible.Service.Trigger]::FIREWALL_PORT_CLOSE_GUID) [PORT CLOSE]"
- $actual.Triggers[5].Data.Count | Assert-Equals -Expected 1
- $actual.Triggers[5].Data[0] | Assert-Equals -Expected '1234;tcp;imagepath;servicename'
+ $actual.Triggers.Count | Assert-Equal -Expected 6
+ $actual.Triggers[0].Type | Assert-Equal -Expected 'DOMAIN JOINED STATUS'
+ $actual.Triggers[0].Action | Assert-Equal -Expected 'STOP SERVICE'
+ $actual.Triggers[0].SubType | Assert-Equal -Expected "$([Ansible.Service.Trigger]::DOMAIN_JOIN_GUID) [DOMAIN JOINED]"
+ $actual.Triggers[0].Data.Count | Assert-Equal -Expected 0
+
+ $actual.Triggers[1].Type | Assert-Equal -Expected 'NETWORK EVENT'
+ $actual.Triggers[1].Action | Assert-Equal -Expected 'START SERVICE'
+ $actual.Triggers[1].SubType | Assert-Equal -Expected "$([Ansible.Service.Trigger]::NAMED_PIPE_EVENT_GUID) [NAMED PIPE EVENT]"
+ $actual.Triggers[1].Data.Count | Assert-Equal -Expected 1
+ $actual.Triggers[1].Data[0] | Assert-Equal -Expected 'my named pipe'
+
+ $actual.Triggers[2].Type | Assert-Equal -Expected 'NETWORK EVENT'
+ $actual.Triggers[2].Action | Assert-Equal -Expected 'START SERVICE'
+ $actual.Triggers[2].SubType | Assert-Equal -Expected "$([Ansible.Service.Trigger]::NAMED_PIPE_EVENT_GUID) [NAMED PIPE EVENT]"
+ $actual.Triggers[2].Data.Count | Assert-Equal -Expected 1
+ $actual.Triggers[2].Data[0] | Assert-Equal -Expected 'my named pipe 2'
+
+ $actual.Triggers[3].Type | Assert-Equal -Expected 'CUSTOM'
+ $actual.Triggers[3].Action | Assert-Equal -Expected 'START SERVICE'
+ $actual.Triggers[3].SubType | Assert-Equal -Expected '9bf04e57-05dc-4914-9ed9-84bf992db88c [ETW PROVIDER UUID]'
+ $actual.Triggers[3].Data.Count | Assert-Equal -Expected 2
+ $actual.Triggers[3].Data[0] | Assert-Equal -Expected '01 02 03 04'
+ $actual.Triggers[3].Data[1] | Assert-Equal -Expected '05 06 07 08 09'
+
+ $actual.Triggers[4].Type | Assert-Equal -Expected 'CUSTOM'
+ $actual.Triggers[4].Action | Assert-Equal -Expected 'START SERVICE'
+ $actual.Triggers[4].SubType | Assert-Equal -Expected '9fbcfc7e-7581-4d46-913b-53bb15c80c51 [ETW PROVIDER UUID]'
+ $actual.Triggers[4].Data.Count | Assert-Equal -Expected 2
+ $actual.Triggers[4].Data[0] | Assert-Equal -Expected "entry 1"
+ $actual.Triggers[4].Data[1] | Assert-Equal -Expected "entry 2"
+
+ $actual.Triggers[5].Type | Assert-Equal -Expected 'FIREWALL PORT EVENT'
+ $actual.Triggers[5].Action | Assert-Equal -Expected 'STOP SERVICE'
+ $actual.Triggers[5].SubType | Assert-Equal -Expected "$([Ansible.Service.Trigger]::FIREWALL_PORT_CLOSE_GUID) [PORT CLOSE]"
+ $actual.Triggers[5].Data.Count | Assert-Equal -Expected 1
+ $actual.Triggers[5].Data[0] | Assert-Equal -Expected '1234;tcp;imagepath;servicename'
# Remove trigger with $null
$service.Triggers = $null
$actual = Invoke-Sc -Action qtriggerinfo -Name $serviceName
- $actual.Triggers.Count | Assert-Equals -Expected 0
+ $actual.Triggers.Count | Assert-Equal -Expected 0
# Add a single trigger
$service.Triggers = [Ansible.Service.Trigger]@{
@@ -846,17 +861,17 @@ $tests = [Ordered]@{
}
$actual = Invoke-Sc -Action qtriggerinfo -Name $serviceName
- $actual.Triggers.Count | Assert-Equals -Expected 1
- $actual.Triggers[0].Type | Assert-Equals -Expected 'GROUP POLICY'
- $actual.Triggers[0].Action | Assert-Equals -Expected 'START SERVICE'
- $actual.Triggers[0].SubType | Assert-Equals -Expected "$([Ansible.Service.Trigger]::MACHINE_POLICY_PRESENT_GUID) [MACHINE POLICY PRESENT]"
- $actual.Triggers[0].Data.Count | Assert-Equals -Expected 0
+ $actual.Triggers.Count | Assert-Equal -Expected 1
+ $actual.Triggers[0].Type | Assert-Equal -Expected 'GROUP POLICY'
+ $actual.Triggers[0].Action | Assert-Equal -Expected 'START SERVICE'
+ $actual.Triggers[0].SubType | Assert-Equal -Expected "$([Ansible.Service.Trigger]::MACHINE_POLICY_PRESENT_GUID) [MACHINE POLICY PRESENT]"
+ $actual.Triggers[0].Data.Count | Assert-Equal -Expected 0
# Remove trigger with empty list
$service.Triggers = @()
$actual = Invoke-Sc -Action qtriggerinfo -Name $serviceName
- $actual.Triggers.Count | Assert-Equals -Expected 0
+ $actual.Triggers.Count | Assert-Equal -Expected 0
# Add triggers through sc and check we get the values correctly
$null = Invoke-Sc -Action triggerinfo -Name $serviceName -Arguments @(
@@ -869,51 +884,51 @@ $tests = [Ordered]@{
)
$actual = $service.Triggers
- $actual.Count | Assert-Equals -Expected 6
+ $actual.Count | Assert-Equal -Expected 6
- $actual[0].Type | Assert-Equals -Expected ([Ansible.Service.TriggerType]::NetworkEndpoint)
- $actual[0].Action | Assert-Equals -Expected ([Ansible.Service.TriggerAction]::ServiceStart)
+ $actual[0].Type | Assert-Equal -Expected ([Ansible.Service.TriggerType]::NetworkEndpoint)
+ $actual[0].Action | Assert-Equal -Expected ([Ansible.Service.TriggerAction]::ServiceStart)
$actual[0].SubType = [Guid][Ansible.Service.Trigger]::NAMED_PIPE_EVENT_GUID
- $actual[0].DataItems.Count | Assert-Equals -Expected 1
- $actual[0].DataItems[0].Type | Assert-Equals -Expected ([Ansible.Service.TriggerDataType]::String)
- $actual[0].DataItems[0].Data | Assert-Equals -Expected 'abc'
+ $actual[0].DataItems.Count | Assert-Equal -Expected 1
+ $actual[0].DataItems[0].Type | Assert-Equal -Expected ([Ansible.Service.TriggerDataType]::String)
+ $actual[0].DataItems[0].Data | Assert-Equal -Expected 'abc'
- $actual[1].Type | Assert-Equals -Expected ([Ansible.Service.TriggerType]::NetworkEndpoint)
- $actual[1].Action | Assert-Equals -Expected ([Ansible.Service.TriggerAction]::ServiceStart)
+ $actual[1].Type | Assert-Equal -Expected ([Ansible.Service.TriggerType]::NetworkEndpoint)
+ $actual[1].Action | Assert-Equal -Expected ([Ansible.Service.TriggerAction]::ServiceStart)
$actual[1].SubType = [Guid][Ansible.Service.Trigger]::NAMED_PIPE_EVENT_GUID
- $actual[1].DataItems.Count | Assert-Equals -Expected 1
- $actual[1].DataItems[0].Type | Assert-Equals -Expected ([Ansible.Service.TriggerDataType]::String)
- $actual[1].DataItems[0].Data | Assert-Equals -Expected 'def'
+ $actual[1].DataItems.Count | Assert-Equal -Expected 1
+ $actual[1].DataItems[0].Type | Assert-Equal -Expected ([Ansible.Service.TriggerDataType]::String)
+ $actual[1].DataItems[0].Data | Assert-Equal -Expected 'def'
- $actual[2].Type | Assert-Equals -Expected ([Ansible.Service.TriggerType]::Custom)
- $actual[2].Action | Assert-Equals -Expected ([Ansible.Service.TriggerAction]::ServiceStart)
+ $actual[2].Type | Assert-Equal -Expected ([Ansible.Service.TriggerType]::Custom)
+ $actual[2].Action | Assert-Equal -Expected ([Ansible.Service.TriggerAction]::ServiceStart)
$actual[2].SubType = [Guid]'d4497e12-ac36-4823-af61-92db0dbd4a76'
- $actual[2].DataItems.Count | Assert-Equals -Expected 2
- $actual[2].DataItems[0].Type | Assert-Equals -Expected ([Ansible.Service.TriggerDataType]::Binary)
- ,$actual[2].DataItems[0].Data | Assert-Equals -Expected ([byte[]]@(17, 34, 51, 68))
- $actual[2].DataItems[1].Type | Assert-Equals -Expected ([Ansible.Service.TriggerDataType]::Binary)
- ,$actual[2].DataItems[1].Data | Assert-Equals -Expected ([byte[]]@(170, 187, 204, 221))
-
- $actual[3].Type | Assert-Equals -Expected ([Ansible.Service.TriggerType]::Custom)
- $actual[3].Action | Assert-Equals -Expected ([Ansible.Service.TriggerAction]::ServiceStart)
+ $actual[2].DataItems.Count | Assert-Equal -Expected 2
+ $actual[2].DataItems[0].Type | Assert-Equal -Expected ([Ansible.Service.TriggerDataType]::Binary)
+ , $actual[2].DataItems[0].Data | Assert-Equal -Expected ([byte[]]@(17, 34, 51, 68))
+ $actual[2].DataItems[1].Type | Assert-Equal -Expected ([Ansible.Service.TriggerDataType]::Binary)
+ , $actual[2].DataItems[1].Data | Assert-Equal -Expected ([byte[]]@(170, 187, 204, 221))
+
+ $actual[3].Type | Assert-Equal -Expected ([Ansible.Service.TriggerType]::Custom)
+ $actual[3].Action | Assert-Equal -Expected ([Ansible.Service.TriggerAction]::ServiceStart)
$actual[3].SubType = [Guid]'435a1742-22c5-4234-9db3-e32dafde695c'
- $actual[3].DataItems.Count | Assert-Equals -Expected 2
- $actual[3].DataItems[0].Type | Assert-Equals -Expected ([Ansible.Service.TriggerDataType]::String)
- $actual[3].DataItems[0].Data | Assert-Equals -Expected '11223344'
- $actual[3].DataItems[1].Type | Assert-Equals -Expected ([Ansible.Service.TriggerDataType]::String)
- $actual[3].DataItems[1].Data | Assert-Equals -Expected 'aabbccdd'
-
- $actual[4].Type | Assert-Equals -Expected ([Ansible.Service.TriggerType]::FirewallPortEvent)
- $actual[4].Action | Assert-Equals -Expected ([Ansible.Service.TriggerAction]::ServiceStop)
+ $actual[3].DataItems.Count | Assert-Equal -Expected 2
+ $actual[3].DataItems[0].Type | Assert-Equal -Expected ([Ansible.Service.TriggerDataType]::String)
+ $actual[3].DataItems[0].Data | Assert-Equal -Expected '11223344'
+ $actual[3].DataItems[1].Type | Assert-Equal -Expected ([Ansible.Service.TriggerDataType]::String)
+ $actual[3].DataItems[1].Data | Assert-Equal -Expected 'aabbccdd'
+
+ $actual[4].Type | Assert-Equal -Expected ([Ansible.Service.TriggerType]::FirewallPortEvent)
+ $actual[4].Action | Assert-Equal -Expected ([Ansible.Service.TriggerAction]::ServiceStop)
$actual[4].SubType = [Guid][Ansible.Service.Trigger]::FIREWALL_PORT_CLOSE_GUID
- $actual[4].DataItems.Count | Assert-Equals -Expected 1
- $actual[4].DataItems[0].Type | Assert-Equals -Expected ([Ansible.Service.TriggerDataType]::String)
- ,$actual[4].DataItems[0].Data | Assert-Equals -Expected @('1234', 'tcp', 'imagepath', 'servicename')
+ $actual[4].DataItems.Count | Assert-Equal -Expected 1
+ $actual[4].DataItems[0].Type | Assert-Equal -Expected ([Ansible.Service.TriggerDataType]::String)
+ , $actual[4].DataItems[0].Data | Assert-Equal -Expected @('1234', 'tcp', 'imagepath', 'servicename')
- $actual[5].Type | Assert-Equals -Expected ([Ansible.Service.TriggerType]::IpAddressAvailability)
- $actual[5].Action | Assert-Equals -Expected ([Ansible.Service.TriggerAction]::ServiceStop)
+ $actual[5].Type | Assert-Equal -Expected ([Ansible.Service.TriggerType]::IpAddressAvailability)
+ $actual[5].Action | Assert-Equal -Expected ([Ansible.Service.TriggerAction]::ServiceStop)
$actual[5].SubType = [Guid][Ansible.Service.Trigger]::NETWORK_MANAGER_LAST_IP_ADDRESS_REMOVAL_GUID
- $actual[5].DataItems.Count | Assert-Equals -Expected 0
+ $actual[5].DataItems.Count | Assert-Equal -Expected 0
}
# Cannot test PreferredNode as we can't guarantee CI is set up with NUMA support.
@@ -928,7 +943,8 @@ foreach ($testImpl in $tests.GetEnumerator()) {
try {
$test = $testImpl.Key
&$testImpl.Value
- } finally {
+ }
+ finally {
$null = Invoke-Sc -Action delete -Name $serviceName
}
}
diff --git a/test/integration/targets/package/aliases b/test/integration/targets/package/aliases
index 0b484bba..6eae8bd8 100644
--- a/test/integration/targets/package/aliases
+++ b/test/integration/targets/package/aliases
@@ -1,3 +1,2 @@
shippable/posix/group1
destructive
-skip/aix
diff --git a/test/integration/targets/package/tasks/main.yml b/test/integration/targets/package/tasks/main.yml
index c8b75da4..c17525d8 100644
--- a/test/integration/targets/package/tasks/main.yml
+++ b/test/integration/targets/package/tasks/main.yml
@@ -16,14 +16,6 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-- set_fact: output_dir_test={{output_dir}}/at
-
-- name: make sure our testing sub-directory does not exist
- file: path="{{ output_dir_test }}" state=absent
-
-- name: create our testing sub-directory
- file: path="{{ output_dir_test }}" state=directory
-
# Verify correct default package manager for Fedora
# Validates: https://github.com/ansible/ansible/issues/34014
- block:
diff --git a/test/integration/targets/package_facts/aliases b/test/integration/targets/package_facts/aliases
index 6c62b9a7..738ccdde 100644
--- a/test/integration/targets/package_facts/aliases
+++ b/test/integration/targets/package_facts/aliases
@@ -1,4 +1,3 @@
shippable/posix/group3
-skip/aix
skip/osx
skip/macos
diff --git a/test/integration/targets/pause/test-pause.yml b/test/integration/targets/pause/test-pause.yml
index 6fefbaa1..1c8045b3 100644
--- a/test/integration/targets/pause/test-pause.yml
+++ b/test/integration/targets/pause/test-pause.yml
@@ -13,7 +13,7 @@
- assert:
that:
- result is failed
- - "'non-integer' in result.msg"
+ - "'unable to convert to int' in result.msg"
- name: non-boolean for echo (EXPECTED FAILURE)
pause:
@@ -26,7 +26,8 @@
- result is failed
- "'not a valid boolean' in result.msg"
- - pause:
+ - name: Less than 1
+ pause:
seconds: 0.1
register: results
@@ -34,7 +35,8 @@
that:
- results.stdout is search('Paused for \d+\.\d+ seconds')
- - pause:
+ - name: 1 second
+ pause:
seconds: 1
register: results
@@ -42,10 +44,29 @@
that:
- results.stdout is search('Paused for \d+\.\d+ seconds')
- - pause:
+ - name: 1 minute
+ pause:
minutes: 1
register: results
- assert:
that:
- results.stdout is search('Paused for \d+\.\d+ minutes')
+
+ - name: minutes and seconds
+ pause:
+ minutes: 1
+ seconds: 1
+ register: exclusive
+ ignore_errors: yes
+
+ - name: invalid arg
+ pause:
+ foo: bar
+ register: invalid
+ ignore_errors: yes
+
+ - assert:
+ that:
+ - '"parameters are mutually exclusive: minutes|seconds" in exclusive.msg'
+ - '"Unsupported parameters for (pause) module: foo." in invalid.msg'
diff --git a/test/integration/targets/pip/aliases b/test/integration/targets/pip/aliases
index 8d8cc50e..0d91b7de 100644
--- a/test/integration/targets/pip/aliases
+++ b/test/integration/targets/pip/aliases
@@ -1,3 +1,2 @@
destructive
shippable/posix/group5
-skip/aix
diff --git a/test/integration/targets/pip/meta/main.yml b/test/integration/targets/pip/meta/main.yml
index 07faa217..cb6005d0 100644
--- a/test/integration/targets/pip/meta/main.yml
+++ b/test/integration/targets/pip/meta/main.yml
@@ -1,2 +1,3 @@
dependencies:
- prepare_tests
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/pip/tasks/pip.yml b/test/integration/targets/pip/tasks/pip.yml
index 2ffb3dfa..39480614 100644
--- a/test/integration/targets/pip/tasks/pip.yml
+++ b/test/integration/targets/pip/tasks/pip.yml
@@ -88,24 +88,17 @@
- name: "make sure the test env doesn't exist"
file:
state: absent
- name: "{{ output_dir }}/pipenv"
-
-- name: install a working version of setuptools in the virtualenv
- pip:
- name: setuptools
- virtualenv: "{{ output_dir }}/pipenv"
- state: present
- version: 33.1.1
+ name: "{{ remote_tmp_dir }}/pipenv"
- name: create a requirement file with an vcs url
copy:
- dest: "{{ output_dir }}/pipreq.txt"
+ dest: "{{ remote_tmp_dir }}/pipreq.txt"
content: "-e git+https://github.com/dvarrazzo/pyiso8601#egg=iso8601"
- name: install the requirement file in a virtualenv
pip:
- requirements: "{{ output_dir}}/pipreq.txt"
- virtualenv: "{{ output_dir }}/pipenv"
+ requirements: "{{ remote_tmp_dir}}/pipreq.txt"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
register: req_installed
- name: check that a change occurred
@@ -115,8 +108,8 @@
- name: "repeat installation to check status didn't change"
pip:
- requirements: "{{ output_dir}}/pipreq.txt"
- virtualenv: "{{ output_dir }}/pipenv"
+ requirements: "{{ remote_tmp_dir}}/pipreq.txt"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
register: req_installed
- name: "check that a change didn't occurr this time (bug ansible#1705)"
@@ -127,7 +120,7 @@
- name: install the same module from url
pip:
name: "git+https://github.com/dvarrazzo/pyiso8601#egg=iso8601"
- virtualenv: "{{ output_dir }}/pipenv"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
editable: True
register: url_installed
@@ -142,13 +135,13 @@
- name: check for pip package
pip:
name: pip
- virtualenv: "{{ output_dir }}/pipenv"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
state: present
- name: check for pip package in check_mode
pip:
name: pip
- virtualenv: "{{ output_dir }}/pipenv"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
state: present
check_mode: True
register: pip_check_mode
@@ -162,13 +155,13 @@
- name: check for setuptools package
pip:
name: setuptools
- virtualenv: "{{ output_dir }}/pipenv"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
state: present
- name: check for setuptools package in check_mode
pip:
name: setuptools
- virtualenv: "{{ output_dir }}/pipenv"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
state: present
check_mode: True
register: setuptools_check_mode
@@ -183,13 +176,13 @@
- name: check for q package
pip:
name: q
- virtualenv: "{{ output_dir }}/pipenv"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
state: present
- name: check for q package in check_mode
pip:
name: q
- virtualenv: "{{ output_dir }}/pipenv"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
state: present
check_mode: True
register: q_check_mode
@@ -204,13 +197,13 @@
- name: check for Junit-XML package
pip:
name: Junit-XML
- virtualenv: "{{ output_dir }}/pipenv"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
state: present
- name: check for Junit-XML package in check_mode
pip:
name: Junit-XML
- virtualenv: "{{ output_dir }}/pipenv"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
state: present
check_mode: True
register: diff_case_check_mode
@@ -224,12 +217,12 @@
- name: ensure is a fresh virtualenv
file:
state: absent
- name: "{{ output_dir }}/pipenv"
+ name: "{{ remote_tmp_dir }}/pipenv"
- name: install pip throught pip into fresh virtualenv
pip:
name: pip
- virtualenv: "{{ output_dir }}/pipenv"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
register: pip_install_venv
- name: make sure pip in fresh virtualenv report changed
@@ -242,7 +235,7 @@
- name: create chdir test directories
file:
state: directory
- name: "{{ output_dir }}/{{ item }}"
+ name: "{{ remote_tmp_dir }}/{{ item }}"
loop:
- pip_module
- pip_root
@@ -251,7 +244,7 @@
- name: copy test module
copy:
src: "{{ item }}"
- dest: "{{ output_dir }}/pip_module/{{ item }}"
+ dest: "{{ remote_tmp_dir }}/pip_module/{{ item }}"
loop:
- setup.py
- ansible_test_pip_chdir/__init__.py
@@ -259,8 +252,8 @@
- name: install test module
pip:
name: .
- chdir: "{{ output_dir }}/pip_module"
- extra_args: --user --upgrade --root {{ output_dir }}/pip_root
+ chdir: "{{ remote_tmp_dir }}/pip_module"
+ extra_args: --user --upgrade --root {{ remote_tmp_dir }}/pip_root
- name: register python_site_lib
command: '{{ ansible_python.executable }} -c "import site; print(site.USER_SITE)"'
@@ -271,7 +264,7 @@
register: pip_python_user_base
- name: run test module
- shell: "PYTHONPATH=$(echo {{ output_dir }}/pip_root{{ pip_python_site_lib.stdout }}) {{ output_dir }}/pip_root{{ pip_python_user_base.stdout }}/bin/ansible_test_pip_chdir"
+ shell: "PYTHONPATH=$(echo {{ remote_tmp_dir }}/pip_root{{ pip_python_site_lib.stdout }}) {{ remote_tmp_dir }}/pip_root{{ pip_python_user_base.stdout }}/bin/ansible_test_pip_chdir"
register: pip_chdir_command
- name: make sure command ran
@@ -283,12 +276,12 @@
- name: ensure is a fresh virtualenv
file:
state: absent
- name: "{{ output_dir }}/pipenv"
+ name: "{{ remote_tmp_dir }}/pipenv"
- name: install requirements file into virtual + chdir
pip:
name: q
- chdir: "{{ output_dir }}/"
+ chdir: "{{ remote_tmp_dir }}/"
virtualenv: "pipenv"
state: present
register: venv_chdir
@@ -314,14 +307,14 @@
- name: Ensure previous virtualenv no longer exists
file:
state: absent
- name: "{{ output_dir }}/pipenv"
+ name: "{{ remote_tmp_dir }}/pipenv"
- name: do not consider an empty string as a version
pip:
name: q
state: present
version: ""
- virtualenv: "{{ output_dir }}/pipenv"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
register: pip_empty_version_string
- name: test idempotency with empty string
@@ -329,14 +322,14 @@
name: q
state: present
version: ""
- virtualenv: "{{ output_dir }}/pipenv"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
register: pip_empty_version_string_idempotency
- name: test idempotency without empty string
pip:
name: q
state: present
- virtualenv: "{{ output_dir }}/pipenv"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
register: pip_no_empty_version_string_idempotency
# 'present' and version=="" is analogous to latest when first installed
@@ -344,7 +337,7 @@
pip:
name: q
state: latest
- virtualenv: "{{ output_dir }}/pipenv"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
register: pip_empty_version_idempotency
- name: ensure that installation worked and is idempotent
@@ -526,7 +519,7 @@
- name: make sure the virtualenv does not exist
file:
state: absent
- name: "{{ output_dir }}/pipenv"
+ name: "{{ remote_tmp_dir }}/pipenv"
- name: install distribute in the virtualenv
pip:
@@ -535,14 +528,14 @@
name:
- distribute
- setuptools<45 # setuptools 45 and later require python 3.5 or later
- virtualenv: "{{ output_dir }}/pipenv"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
state: present
- name: try to remove distribute
pip:
state: "absent"
name: "distribute"
- virtualenv: "{{ output_dir }}/pipenv"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
ignore_errors: yes
register: remove_distribute
@@ -559,13 +552,13 @@
- name: make sure the virtualenv does not exist
file:
state: absent
- name: "{{ output_dir }}/pipenv"
+ name: "{{ remote_tmp_dir }}/pipenv"
# ref: https://github.com/ansible/ansible/issues/52275
- name: install using virtualenv_command with arguments
pip:
name: "{{ pip_test_package }}"
- virtualenv: "{{ output_dir }}/pipenv"
+ virtualenv: "{{ remote_tmp_dir }}/pipenv"
virtualenv_command: "{{ command.stdout_lines[0] | basename }} --verbose"
state: present
register: version13
diff --git a/test/integration/targets/plugin_loader/normal/library/_symlink.py b/test/integration/targets/plugin_loader/normal/library/_symlink.py
deleted file mode 120000
index c4142e74..00000000
--- a/test/integration/targets/plugin_loader/normal/library/_symlink.py
+++ /dev/null
@@ -1 +0,0 @@
-_underscore.py \ No newline at end of file
diff --git a/test/integration/targets/plugin_loader/runme.sh b/test/integration/targets/plugin_loader/runme.sh
index 2a1bdeda..8ce7803a 100755
--- a/test/integration/targets/plugin_loader/runme.sh
+++ b/test/integration/targets/plugin_loader/runme.sh
@@ -2,6 +2,15 @@
set -ux
+cleanup() {
+ unlink normal/library/_symlink.py
+}
+
+pushd normal/library
+ln -s _underscore.py _symlink.py
+popd
+
+trap 'cleanup' EXIT
# check normal execution
for myplay in normal/*.yml
diff --git a/test/integration/targets/prepare_http_tests/tasks/main.yml b/test/integration/targets/prepare_http_tests/tasks/main.yml
index 9ab00221..8d34a3cd 100644
--- a/test/integration/targets/prepare_http_tests/tasks/main.yml
+++ b/test/integration/targets/prepare_http_tests/tasks/main.yml
@@ -16,6 +16,7 @@
- include_tasks: "{{ lookup('first_found', files)}}"
vars:
files:
+ - "{{ ansible_os_family | lower }}-{{ ansible_distribution_major_version }}.yml"
- "{{ ansible_os_family | lower }}.yml"
- "default.yml"
when:
diff --git a/test/integration/targets/prepare_http_tests/vars/RedHat-9.yml b/test/integration/targets/prepare_http_tests/vars/RedHat-9.yml
new file mode 100644
index 00000000..2618233c
--- /dev/null
+++ b/test/integration/targets/prepare_http_tests/vars/RedHat-9.yml
@@ -0,0 +1,4 @@
+krb5_packages:
+- krb5-devel
+- krb5-workstation
+- redhat-rpm-config # needed for gssapi install
diff --git a/test/integration/targets/pyyaml/aliases b/test/integration/targets/pyyaml/aliases
new file mode 100644
index 00000000..1d28bdb2
--- /dev/null
+++ b/test/integration/targets/pyyaml/aliases
@@ -0,0 +1,2 @@
+shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/pyyaml/runme.sh b/test/integration/targets/pyyaml/runme.sh
new file mode 100755
index 00000000..0361835a
--- /dev/null
+++ b/test/integration/targets/pyyaml/runme.sh
@@ -0,0 +1,11 @@
+#!/usr/bin/env bash
+
+set -eu
+source virtualenv.sh
+set +x
+
+# deps are already installed, using --no-deps to avoid re-installing them
+# Install PyYAML without libyaml to validate ansible can run
+PYYAML_FORCE_LIBYAML=0 pip install --no-binary PyYAML --ignore-installed --no-cache-dir --no-deps PyYAML
+
+ansible --version | tee /dev/stderr | grep 'libyaml = False'
diff --git a/test/integration/targets/raw/aliases b/test/integration/targets/raw/aliases
index a6dafcf8..f5bd1a5b 100644
--- a/test/integration/targets/raw/aliases
+++ b/test/integration/targets/raw/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+needs/target/setup_remote_tmp_dir
diff --git a/test/integration/targets/raw/meta/main.yml b/test/integration/targets/raw/meta/main.yml
index 07faa217..cb6005d0 100644
--- a/test/integration/targets/raw/meta/main.yml
+++ b/test/integration/targets/raw/meta/main.yml
@@ -1,2 +1,3 @@
dependencies:
- prepare_tests
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/raw/runme.sh b/test/integration/targets/raw/runme.sh
index 07955427..26275996 100755
--- a/test/integration/targets/raw/runme.sh
+++ b/test/integration/targets/raw/runme.sh
@@ -3,4 +3,4 @@
set -ux
export ANSIBLE_BECOME_ALLOW_SAME_USER=1
export ANSIBLE_ROLES_PATH=../
-ansible-playbook -i ../../inventory runme.yml -e "output_dir=${OUTPUT_DIR}" -v "$@"
+ansible-playbook -i ../../inventory runme.yml -v "$@"
diff --git a/test/integration/targets/raw/tasks/main.yml b/test/integration/targets/raw/tasks/main.yml
index 7f99eadf..ce03c72a 100644
--- a/test/integration/targets/raw/tasks/main.yml
+++ b/test/integration/targets/raw/tasks/main.yml
@@ -16,24 +16,24 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-- set_fact: output_dir_test={{output_dir}}/test_command_raw
+- set_fact: remote_tmp_dir_test={{remote_tmp_dir}}/test_command_raw
- name: make sure our testing sub-directory does not exist
- file: path="{{ output_dir_test }}" state=absent
+ file: path="{{ remote_tmp_dir_test }}" state=absent
- name: create our testing sub-directory
- file: path="{{ output_dir_test }}" state=directory
+ file: path="{{ remote_tmp_dir_test }}" state=directory
##
## raw
##
- name: touch a file
- raw: "touch {{output_dir_test | expanduser}}/test.txt"
+ raw: "touch {{remote_tmp_dir_test | expanduser}}/test.txt"
register: raw_result0
- debug: var=raw_result0
- stat:
- path: "{{output_dir_test | expanduser}}/test.txt"
+ path: "{{remote_tmp_dir_test | expanduser}}/test.txt"
register: raw_result0_stat
- debug: var=raw_result0_stat
- name: ensure proper results
@@ -66,7 +66,7 @@
shell: which bash
register: bash_path
- name: run exmample non-posix command with bash
- raw: "echo 'foobar' > {{output_dir_test | expanduser}}/test.txt ; cat < {{output_dir_test | expanduser}}/test.txt"
+ raw: "echo 'foobar' > {{remote_tmp_dir_test | expanduser}}/test.txt ; cat < {{remote_tmp_dir_test | expanduser}}/test.txt"
args:
executable: "{{ bash_path.stdout }}"
register: raw_result2
diff --git a/test/integration/targets/rel_plugin_loading/subdir/inventory_plugins/notyaml.py b/test/integration/targets/rel_plugin_loading/subdir/inventory_plugins/notyaml.py
index d013fc48..e542913d 100644
--- a/test/integration/targets/rel_plugin_loading/subdir/inventory_plugins/notyaml.py
+++ b/test/integration/targets/rel_plugin_loading/subdir/inventory_plugins/notyaml.py
@@ -60,10 +60,11 @@ all: # keys must be unique, i.e. only one 'hosts' per group
import os
+from collections.abc import MutableMapping
+
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_native, to_text
-from ansible.module_utils.common._collections_compat import MutableMapping
from ansible.plugins.inventory import BaseFileInventoryPlugin
NoneType = type(None)
diff --git a/test/integration/targets/remote_tmp/aliases b/test/integration/targets/remote_tmp/aliases
index 4b8559d9..0418db38 100644
--- a/test/integration/targets/remote_tmp/aliases
+++ b/test/integration/targets/remote_tmp/aliases
@@ -1,4 +1,3 @@
shippable/posix/group3
-skip/aix
context/target
needs/target/setup_remote_tmp_dir
diff --git a/test/integration/targets/replace/meta/main.yml b/test/integration/targets/replace/meta/main.yml
index 07faa217..cb6005d0 100644
--- a/test/integration/targets/replace/meta/main.yml
+++ b/test/integration/targets/replace/meta/main.yml
@@ -1,2 +1,3 @@
dependencies:
- prepare_tests
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/replace/tasks/main.yml b/test/integration/targets/replace/tasks/main.yml
index 24146ff3..d267b783 100644
--- a/test/integration/targets/replace/tasks/main.yml
+++ b/test/integration/targets/replace/tasks/main.yml
@@ -1,11 +1,11 @@
# setup
-- set_fact: output_dir_test={{output_dir}}/test_replace
+- set_fact: remote_tmp_dir_test={{remote_tmp_dir}}/test_replace
- name: make sure our testing sub-directory does not exist
- file: path="{{ output_dir_test }}" state=absent
+ file: path="{{ remote_tmp_dir_test }}" state=absent
- name: create our testing sub-directory
- file: path="{{ output_dir_test }}" state=directory
+ file: path="{{ remote_tmp_dir_test }}" state=directory
# tests
- name: create test files
@@ -15,19 +15,19 @@
We promptly judged antique ivory buckles for the next prize.
Jinxed wizards pluck ivy from the big quilt.
Jaded zombies acted quaintly but kept driving their oxen forward.
- dest: "{{ output_dir_test }}/pangrams.{{ item }}.txt"
+ dest: "{{ remote_tmp_dir_test }}/pangrams.{{ item }}.txt"
with_sequence: start=0 end=6 format=%02x #increment as needed
## test `before` option
- name: remove all spaces before "quilt"
replace:
- path: "{{ output_dir_test }}/pangrams.00.txt"
+ path: "{{ remote_tmp_dir_test }}/pangrams.00.txt"
before: 'quilt'
regexp: ' '
register: replace_test0
-- command: "cat {{ output_dir_test }}/pangrams.00.txt"
+- command: "cat {{ remote_tmp_dir_test }}/pangrams.00.txt"
register: replace_cat0
- name: validate before assertions
@@ -42,12 +42,12 @@
## test `after` option
- name: remove all spaces after "promptly"
replace:
- path: "{{ output_dir_test }}/pangrams.01.txt"
+ path: "{{ remote_tmp_dir_test }}/pangrams.01.txt"
after: 'promptly'
regexp: ' '
register: replace_test1
-- command: "cat {{ output_dir_test }}/pangrams.01.txt"
+- command: "cat {{ remote_tmp_dir_test }}/pangrams.01.txt"
register: replace_cat1
- name: validate after assertions
@@ -62,7 +62,7 @@
## test combined `before` and `after` options
- name: before "promptly" but after "quilt", replace every "e" with a "3"
replace:
- path: "{{ output_dir_test }}/pangrams.02.txt"
+ path: "{{ remote_tmp_dir_test }}/pangrams.02.txt"
before: 'promptly'
after: 'quilt'
regexp: 'e'
@@ -78,14 +78,14 @@
- name: before "quilt" but after "promptly", replace every "e" with a "3"
replace:
- path: "{{ output_dir_test }}/pangrams.03.txt"
+ path: "{{ remote_tmp_dir_test }}/pangrams.03.txt"
before: 'quilt'
after: 'promptly'
regexp: 'e'
replace: '3'
register: replace_test3
-- command: "cat {{ output_dir_test }}/pangrams.03.txt"
+- command: "cat {{ remote_tmp_dir_test }}/pangrams.03.txt"
register: replace_cat3
- name: validate before+after assertions
@@ -99,22 +99,22 @@
## test ^$ behavior in MULTILINE, and . behavior in absense of DOTALL
- name: quote everything between bof and eof
replace:
- path: "{{ output_dir_test }}/pangrams.04.txt"
+ path: "{{ remote_tmp_dir_test }}/pangrams.04.txt"
regexp: ^([\S\s]+)$
replace: '"\1"'
register: replace_test4_0
-- command: "cat {{ output_dir_test }}/pangrams.04.txt"
+- command: "cat {{ remote_tmp_dir_test }}/pangrams.04.txt"
register: replace_cat4_0
- name: quote everything between bol and eol
replace:
- path: "{{ output_dir_test }}/pangrams.04.txt"
+ path: "{{ remote_tmp_dir_test }}/pangrams.04.txt"
regexp: ^(.+)$
replace: '"\1"'
register: replace_test4_1
-- command: "cat {{ output_dir_test }}/pangrams.04.txt"
+- command: "cat {{ remote_tmp_dir_test }}/pangrams.04.txt"
register: replace_cat4_1
- name: validate before+after assertions
@@ -132,29 +132,29 @@
## test \b escaping in short and long form
- name: short form with unescaped word boundaries
- replace: path="{{ output_dir_test }}/pangrams.05.txt" regexp='\b(.+)\b' replace='"\1"'
+ replace: path="{{ remote_tmp_dir_test }}/pangrams.05.txt" regexp='\b(.+)\b' replace='"\1"'
register: replace_test5_0
- name: short form with escaped word boundaries
- replace: path="{{ output_dir_test }}/pangrams.05.txt" regexp='\\b(.+)\\b' replace='"\1"'
+ replace: path="{{ remote_tmp_dir_test }}/pangrams.05.txt" regexp='\\b(.+)\\b' replace='"\1"'
register: replace_test5_1
-- command: "cat {{ output_dir_test }}/pangrams.05.txt"
+- command: "cat {{ remote_tmp_dir_test }}/pangrams.05.txt"
register: replace_cat5_1
- name: long form with unescaped word boundaries
replace:
- path: "{{ output_dir_test }}/pangrams.05.txt"
+ path: "{{ remote_tmp_dir_test }}/pangrams.05.txt"
regexp: '\b(.+)\b'
replace: '"\1"'
register: replace_test5_2
-- command: "cat {{ output_dir_test }}/pangrams.05.txt"
+- command: "cat {{ remote_tmp_dir_test }}/pangrams.05.txt"
register: replace_cat5_2
- name: long form with escaped word boundaries
replace:
- path: "{{ output_dir_test }}/pangrams.05.txt"
+ path: "{{ remote_tmp_dir_test }}/pangrams.05.txt"
regexp: '\\b(.+)\\b'
replace: '"\1"'
register: replace_test5_3
@@ -175,13 +175,13 @@
## test backup behaviors
- name: replacement with backup
replace:
- path: "{{ output_dir_test }}/pangrams.06.txt"
+ path: "{{ remote_tmp_dir_test }}/pangrams.06.txt"
regexp: ^(.+)$
replace: '"\1"'
backup: true
register: replace_test6
-- command: "cat {{ output_dir_test }}/pangrams.06.txt"
+- command: "cat {{ remote_tmp_dir_test }}/pangrams.06.txt"
register: replace_cat6_0
- command: "cat {{ replace_test6.backup_file }}"
@@ -199,14 +199,14 @@
## test filesystem failures
- name: fail on directory
replace:
- path: "{{ output_dir_test }}"
+ path: "{{ remote_tmp_dir_test }}"
regexp: ^(.+)$
register: replace_test7_1
ignore_errors: true
- name: fail on missing file
replace:
- path: "{{ output_dir_test }}/missing_file.txt"
+ path: "{{ remote_tmp_dir_test }}/missing_file.txt"
regexp: ^(.+)$
register: replace_test7_2
ignore_errors: true
@@ -241,18 +241,18 @@
127.0.0.1
127.0.1.1
# end of group
- dest: "{{ output_dir_test }}/addresses.txt"
+ dest: "{{ remote_tmp_dir_test }}/addresses.txt"
- name: subsection madness
replace:
- path: "{{ output_dir_test }}/addresses.txt"
+ path: "{{ remote_tmp_dir_test }}/addresses.txt"
after: '# start of group'
before: '# end of group'
regexp: '0'
replace: '9'
register: replace_test8
-- command: "cat {{ output_dir_test }}/addresses.txt"
+- command: "cat {{ remote_tmp_dir_test }}/addresses.txt"
register: replace_cat8
- name: validate before+after assertions
diff --git a/test/integration/targets/roles_arg_spec/test_complex_role_fails.yml b/test/integration/targets/roles_arg_spec/test_complex_role_fails.yml
index 923e92f7..8764d382 100644
--- a/test/integration/targets/roles_arg_spec/test_complex_role_fails.yml
+++ b/test/integration/targets/roles_arg_spec/test_complex_role_fails.yml
@@ -41,16 +41,6 @@
]
tasks:
- # This test play requires jinja >= 2.7
- - name: get the jinja2 version
- shell: python -c 'import jinja2; print(jinja2.__version__)'
- register: jinja2_version
- delegate_to: localhost
- changed_when: false
-
- - debug:
- msg: "Jinja version: {{ jinja2_version.stdout }}"
-
- name: include_role test1 since it has a arg_spec.yml
block:
- include_role:
@@ -178,7 +168,3 @@
- ansible_failed_result.validate_args_context.name == "test1"
- ansible_failed_result.validate_args_context.type == "role"
- "ansible_failed_result.validate_args_context.path is search('roles_arg_spec/roles/test1')"
-
- # skip this task if jinja isnt >= 2.7, aka centos6
- when:
- - jinja2_version.stdout is version('2.7', '>=')
diff --git a/test/integration/targets/rpm_key/aliases b/test/integration/targets/rpm_key/aliases
index 3a07aab3..a4c92ef8 100644
--- a/test/integration/targets/rpm_key/aliases
+++ b/test/integration/targets/rpm_key/aliases
@@ -1,3 +1,2 @@
destructive
shippable/posix/group1
-skip/aix
diff --git a/test/integration/targets/rpm_key/meta/main.yml b/test/integration/targets/rpm_key/meta/main.yml
new file mode 100644
index 00000000..1810d4be
--- /dev/null
+++ b/test/integration/targets/rpm_key/meta/main.yml
@@ -0,0 +1,2 @@
+dependencies:
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/rpm_key/tasks/rpm_key.yaml b/test/integration/targets/rpm_key/tasks/rpm_key.yaml
index 58020f48..24fbbaee 100644
--- a/test/integration/targets/rpm_key/tasks/rpm_key.yaml
+++ b/test/integration/targets/rpm_key/tasks/rpm_key.yaml
@@ -13,7 +13,7 @@
- name: Save gpg keys to a file
copy:
content: "{{ pubkeys['stdout'] }}\n"
- dest: '{{ output_dir }}/pubkeys'
+ dest: '{{ remote_tmp_dir }}/pubkeys'
mode: 0600
#
@@ -21,17 +21,17 @@
#
- name: download EPEL GPG key
get_url:
- url: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/rpm_key/RPM-GPG-KEY-EPEL-7
+ url: https://ci-files.testing.ansible.com/test/integration/targets/rpm_key/RPM-GPG-KEY-EPEL-7
dest: /tmp/RPM-GPG-KEY-EPEL-7
- name: download sl rpm
get_url:
- url: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/rpm_key/sl-5.02-1.el7.x86_64.rpm
+ url: https://ci-files.testing.ansible.com/test/integration/targets/rpm_key/sl-5.02-1.el7.x86_64.rpm
dest: /tmp/sl.rpm
- name: download Mono key
get_url:
- url: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/rpm_key/mono.gpg
+ url: https://ci-files.testing.ansible.com/test/integration/targets/rpm_key/mono.gpg
dest: /tmp/mono.gpg
- name: remove EPEL GPG key from keyring
@@ -96,7 +96,7 @@
- name: remove GPG key from url
rpm_key:
state: absent
- key: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/rpm_key/RPM-GPG-KEY-EPEL-7
+ key: https://ci-files.testing.ansible.com/test/integration/targets/rpm_key/RPM-GPG-KEY-EPEL-7
- name: Confirm key is missing
shell: "rpm --checksig /tmp/sl.rpm"
@@ -112,7 +112,7 @@
- name: add GPG key from url
rpm_key:
state: present
- key: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/rpm_key/RPM-GPG-KEY-EPEL-7
+ key: https://ci-files.testing.ansible.com/test/integration/targets/rpm_key/RPM-GPG-KEY-EPEL-7
- name: check GPG signature of sl. Should return okay
shell: "rpm --checksig /tmp/sl.rpm"
@@ -128,7 +128,7 @@
- name: add very first key on system
rpm_key:
state: present
- key: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/rpm_key/RPM-GPG-KEY-EPEL-7
+ key: https://ci-files.testing.ansible.com/test/integration/targets/rpm_key/RPM-GPG-KEY-EPEL-7
- name: check GPG signature of sl. Should return okay
shell: "rpm --checksig /tmp/sl.rpm"
@@ -140,7 +140,7 @@
- name: Issue 20325 - Verify fingerprint of key, invalid fingerprint - EXPECTED FAILURE
rpm_key:
- key: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/rpm_key/RPM-GPG-KEY.dag
+ key: https://ci-files.testing.ansible.com/test/integration/targets/rpm_key/RPM-GPG-KEY.dag
fingerprint: 1111 1111 1111 1111 1111 1111 1111 1111 1111 1111
register: result
failed_when: result is success
@@ -154,7 +154,7 @@
- name: Issue 20325 - Verify fingerprint of key, valid fingerprint
rpm_key:
- key: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/rpm_key/RPM-GPG-KEY.dag
+ key: https://ci-files.testing.ansible.com/test/integration/targets/rpm_key/RPM-GPG-KEY.dag
fingerprint: EBC6 E12C 62B1 C734 026B 2122 A20E 5214 6B8D 79E6
register: result
@@ -166,7 +166,7 @@
- name: Issue 20325 - Verify fingerprint of key, valid fingerprint - Idempotent check
rpm_key:
- key: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/rpm_key/RPM-GPG-KEY.dag
+ key: https://ci-files.testing.ansible.com/test/integration/targets/rpm_key/RPM-GPG-KEY.dag
fingerprint: EBC6 E12C 62B1 C734 026B 2122 A20E 5214 6B8D 79E6
register: result
@@ -183,7 +183,7 @@
shell: "rpm -q gpg-pubkey | xargs rpm -e"
- name: Restore the gpg keys normally installed on the system
- command: 'rpm --import {{ output_dir }}/pubkeys'
+ command: 'rpm --import {{ remote_tmp_dir }}/pubkeys'
- name: Retrieve a list of gpg keys are installed for package checking
shell: 'rpm -q gpg-pubkey | sort'
diff --git a/test/integration/targets/script/meta/main.yml b/test/integration/targets/script/meta/main.yml
index 07faa217..cb6005d0 100644
--- a/test/integration/targets/script/meta/main.yml
+++ b/test/integration/targets/script/meta/main.yml
@@ -1,2 +1,3 @@
dependencies:
- prepare_tests
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/script/tasks/main.yml b/test/integration/targets/script/tasks/main.yml
index f1746f7c..989513d5 100644
--- a/test/integration/targets/script/tasks/main.yml
+++ b/test/integration/targets/script/tasks/main.yml
@@ -21,16 +21,16 @@
##
- set_fact:
- output_dir_test: "{{ output_dir }}/test_script"
+ remote_tmp_dir_test: "{{ remote_tmp_dir }}/test_script"
- name: make sure our testing sub-directory does not exist
file:
- path: "{{ output_dir_test }}"
+ path: "{{ remote_tmp_dir_test }}"
state: absent
- name: create our testing sub-directory
file:
- path: "{{ output_dir_test }}"
+ path: "{{ remote_tmp_dir_test }}"
state: directory
##
@@ -76,24 +76,24 @@
# creates
- name: verify that afile.txt is absent
file:
- path: "{{ output_dir_test }}/afile.txt"
+ path: "{{ remote_tmp_dir_test }}/afile.txt"
state: absent
- name: create afile.txt with create_afile.sh via command
- script: create_afile.sh {{ output_dir_test | expanduser }}/afile.txt
+ script: create_afile.sh {{ remote_tmp_dir_test | expanduser }}/afile.txt
args:
- creates: "{{ output_dir_test | expanduser }}/afile.txt"
+ creates: "{{ remote_tmp_dir_test | expanduser }}/afile.txt"
register: _create_test1
- name: Check state of created file
stat:
- path: "{{ output_dir_test | expanduser }}/afile.txt"
+ path: "{{ remote_tmp_dir_test | expanduser }}/afile.txt"
register: _create_stat1
- name: Run create_afile.sh again to ensure it is skipped
- script: create_afile.sh {{ output_dir_test | expanduser }}/afile.txt
+ script: create_afile.sh {{ remote_tmp_dir_test | expanduser }}/afile.txt
args:
- creates: "{{ output_dir_test | expanduser }}/afile.txt"
+ creates: "{{ remote_tmp_dir_test | expanduser }}/afile.txt"
register: _create_test2
- name: Assert that script report a change, file was created, second run was skipped
@@ -107,24 +107,24 @@
# removes
- name: verify that afile.txt is present
file:
- path: "{{ output_dir_test }}/afile.txt"
+ path: "{{ remote_tmp_dir_test }}/afile.txt"
state: file
- name: remove afile.txt with remote_afile.sh via command
- script: remove_afile.sh {{ output_dir_test | expanduser }}/afile.txt
+ script: remove_afile.sh {{ remote_tmp_dir_test | expanduser }}/afile.txt
args:
- removes: "{{ output_dir_test | expanduser }}/afile.txt"
+ removes: "{{ remote_tmp_dir_test | expanduser }}/afile.txt"
register: _remove_test1
- name: Check state of removed file
stat:
- path: "{{ output_dir_test | expanduser }}/afile.txt"
+ path: "{{ remote_tmp_dir_test | expanduser }}/afile.txt"
register: _remove_stat1
- name: Run remote_afile.sh again to enure it is skipped
- script: remove_afile.sh {{ output_dir_test | expanduser }}/afile.txt
+ script: remove_afile.sh {{ remote_tmp_dir_test | expanduser }}/afile.txt
args:
- removes: "{{ output_dir_test | expanduser }}/afile.txt"
+ removes: "{{ remote_tmp_dir_test | expanduser }}/afile.txt"
register: _remove_test2
- name: Assert that script report a change, file was removed, second run was skipped
@@ -138,7 +138,7 @@
# async
- name: verify that afile.txt is absent
file:
- path: "{{ output_dir_test }}/afile.txt"
+ path: "{{ remote_tmp_dir_test }}/afile.txt"
state: absent
- name: test task failure with async param
@@ -156,7 +156,7 @@
# check mode
- name: Run script to create a file in check mode
- script: create_afile.sh {{ output_dir_test | expanduser }}/afile2.txt
+ script: create_afile.sh {{ remote_tmp_dir_test | expanduser }}/afile2.txt
check_mode: yes
register: _check_mode_test
@@ -166,7 +166,7 @@
- name: Get state of file created by script
stat:
- path: "{{ output_dir_test | expanduser }}/afile2.txt"
+ path: "{{ remote_tmp_dir_test | expanduser }}/afile2.txt"
register: _afile_stat
- debug:
@@ -176,16 +176,17 @@
- name: Assert that a change was reported but the script did not make changes
assert:
that:
- - _check_mode_test is changed
+ - _check_mode_test is not changed
+ - _check_mode_test is skipped
- not _afile_stat.stat.exists
- name: Run script to create a file
- script: create_afile.sh {{ output_dir_test | expanduser }}/afile2.txt
+ script: create_afile.sh {{ remote_tmp_dir_test | expanduser }}/afile2.txt
- name: Run script to create a file in check mode with 'creates' argument
- script: create_afile.sh {{ output_dir_test | expanduser }}/afile2.txt
+ script: create_afile.sh {{ remote_tmp_dir_test | expanduser }}/afile2.txt
args:
- creates: "{{ output_dir_test | expanduser }}/afile2.txt"
+ creates: "{{ remote_tmp_dir_test | expanduser }}/afile2.txt"
register: _check_mode_test2
check_mode: yes
@@ -197,17 +198,17 @@
assert:
that:
- _check_mode_test2 is skipped
- - '_check_mode_test2.msg == "{{ output_dir_test | expanduser }}/afile2.txt exists, matching creates option"'
+ - '_check_mode_test2.msg == "{{ remote_tmp_dir_test | expanduser }}/afile2.txt exists, matching creates option"'
- name: Remove afile2.txt
file:
- path: "{{ output_dir_test | expanduser }}/afile2.txt"
+ path: "{{ remote_tmp_dir_test | expanduser }}/afile2.txt"
state: absent
- name: Run script to remove a file in check mode with 'removes' argument
- script: remove_afile.sh {{ output_dir_test | expanduser }}/afile2.txt
+ script: remove_afile.sh {{ remote_tmp_dir_test | expanduser }}/afile2.txt
args:
- removes: "{{ output_dir_test | expanduser }}/afile2.txt"
+ removes: "{{ remote_tmp_dir_test | expanduser }}/afile2.txt"
register: _check_mode_test3
check_mode: yes
@@ -219,7 +220,7 @@
assert:
that:
- _check_mode_test3 is skipped
- - '_check_mode_test3.msg == "{{ output_dir_test | expanduser }}/afile2.txt does not exist, matching removes option"'
+ - '_check_mode_test3.msg == "{{ remote_tmp_dir_test | expanduser }}/afile2.txt does not exist, matching removes option"'
# executable
diff --git a/test/integration/targets/service/aliases b/test/integration/targets/service/aliases
index 1ef4c361..f2f9ac9d 100644
--- a/test/integration/targets/service/aliases
+++ b/test/integration/targets/service/aliases
@@ -1,5 +1,4 @@
destructive
shippable/posix/group1
-skip/aix
skip/osx
skip/macos
diff --git a/test/integration/targets/service_facts/aliases b/test/integration/targets/service_facts/aliases
index cc0aa0d9..9470f773 100644
--- a/test/integration/targets/service_facts/aliases
+++ b/test/integration/targets/service_facts/aliases
@@ -1,5 +1,4 @@
shippable/posix/group3
-skip/aix
skip/freebsd
skip/osx
skip/macos
diff --git a/test/integration/targets/setup_deb_repo/files/package_specs/foo-1.0.0 b/test/integration/targets/setup_deb_repo/files/package_specs/stable/foo-1.0.0
index 4206fbab..4206fbab 100644
--- a/test/integration/targets/setup_deb_repo/files/package_specs/foo-1.0.0
+++ b/test/integration/targets/setup_deb_repo/files/package_specs/stable/foo-1.0.0
diff --git a/test/integration/targets/setup_deb_repo/files/package_specs/foo-1.0.1 b/test/integration/targets/setup_deb_repo/files/package_specs/stable/foo-1.0.1
index 021f4d52..021f4d52 100644
--- a/test/integration/targets/setup_deb_repo/files/package_specs/foo-1.0.1
+++ b/test/integration/targets/setup_deb_repo/files/package_specs/stable/foo-1.0.1
diff --git a/test/integration/targets/setup_deb_repo/files/package_specs/foobar-1.0.0 b/test/integration/targets/setup_deb_repo/files/package_specs/stable/foobar-1.0.0
index 0da0348f..0da0348f 100644
--- a/test/integration/targets/setup_deb_repo/files/package_specs/foobar-1.0.0
+++ b/test/integration/targets/setup_deb_repo/files/package_specs/stable/foobar-1.0.0
diff --git a/test/integration/targets/setup_deb_repo/files/package_specs/foobar-1.0.1 b/test/integration/targets/setup_deb_repo/files/package_specs/stable/foobar-1.0.1
index b9fa8303..b9fa8303 100644
--- a/test/integration/targets/setup_deb_repo/files/package_specs/foobar-1.0.1
+++ b/test/integration/targets/setup_deb_repo/files/package_specs/stable/foobar-1.0.1
diff --git a/test/integration/targets/setup_deb_repo/files/package_specs/testing/foo-2.0.0 b/test/integration/targets/setup_deb_repo/files/package_specs/testing/foo-2.0.0
new file mode 100644
index 00000000..7e835f05
--- /dev/null
+++ b/test/integration/targets/setup_deb_repo/files/package_specs/testing/foo-2.0.0
@@ -0,0 +1,10 @@
+Section: misc
+Priority: optional
+Standards-Version: 2.3.3
+
+Package: foo
+Version: 2.0.0
+Section: system
+Maintainer: John Doe <john@doe.com>
+Architecture: all
+Description: Dummy package
diff --git a/test/integration/targets/setup_deb_repo/files/package_specs/testing/foo-2.0.1 b/test/integration/targets/setup_deb_repo/files/package_specs/testing/foo-2.0.1
new file mode 100644
index 00000000..c6e7b5ba
--- /dev/null
+++ b/test/integration/targets/setup_deb_repo/files/package_specs/testing/foo-2.0.1
@@ -0,0 +1,10 @@
+Section: misc
+Priority: optional
+Standards-Version: 2.3.3
+
+Package: foo
+Version: 2.0.1
+Section: system
+Maintainer: John Doe <john@doe.com>
+Architecture: all
+Description: Dummy package
diff --git a/test/integration/targets/setup_deb_repo/tasks/main.yml b/test/integration/targets/setup_deb_repo/tasks/main.yml
index 49f68a2c..471fb2a2 100644
--- a/test/integration/targets/setup_deb_repo/tasks/main.yml
+++ b/test/integration/targets/setup_deb_repo/tasks/main.yml
@@ -10,36 +10,55 @@
- set_fact:
repodir: /tmp/repo/
- - name: Create repo dir
+ - name: Create repo dirs
file:
- path: "{{ repodir }}"
+ path: "{{ repodir }}/dists/{{ item }}/main/binary-all"
state: directory
mode: 0755
+ loop:
+ - stable
+ - testing
- name: Copy package specs to remote
copy:
- src: "{{ item }}"
- dest: "{{ remote_tmp_dir }}/{{ item | basename }}"
- with_fileglob:
- - "files/package_specs/*"
+ src: package_specs
+ dest: "{{ remote_tmp_dir }}"
- name: Create deb files
- shell: "equivs-build {{ remote_tmp_dir }}/{{ item | basename }}"
+ shell: "find {{ remote_tmp_dir }}/package_specs/{{ item }} -type f -exec equivs-build {} \\;"
args:
- chdir: "{{ repodir }}"
- with_fileglob:
- - "files/package_specs/*"
+ chdir: "{{ repodir }}/dists/{{ item }}/main/binary-all"
+ loop:
+ - stable
+ - testing
- - name: Create repo
- shell: dpkg-scanpackages --multiversion . /dev/null | gzip -9c > Packages.gz
+ - name: Create repo Packages
+ shell: dpkg-scanpackages --multiversion . /dev/null dists/{{ item }}/main/binary-all/ | gzip -9c > Packages.gz
args:
- chdir: "{{ repodir }}"
+ chdir: "{{ repodir }}/dists/{{ item }}/main/binary-all"
+ loop:
+ - stable
+ - testing
- # Can't use apt_repository as it doesn't expose a trusted=yes option
- - name: Install the repo
+ - name: Create repo Release
copy:
- content: deb [trusted=yes] file:{{ repodir }} ./
- dest: /etc/apt/sources.list.d/file_tmp_repo.list
+ content: |
+ Codename: {{ item.0 }}
+ {% for k,v in item.1.items() %}
+ {{ k }}: {{ v }}
+ {% endfor %}
+ dest: "{{ repodir }}/dists/{{ item.0 }}/Release"
+ loop:
+ - [stable, {}]
+ - [testing, {NotAutomatic: "yes", ButAutomaticUpgrades: "yes"}]
+
+ - name: Install the repo
+ apt_repository:
+ repo: deb [trusted=yes arch=all] file:{{ repodir }} {{ item }} main
+ update_cache: false # interferes with task 'Test update_cache 1'
+ loop:
+ - stable
+ - testing
# Need to uncomment the deb-src for the universe component for build-dep state
- name: Ensure deb-src for the universe component
diff --git a/test/integration/targets/setup_epel/tasks/main.yml b/test/integration/targets/setup_epel/tasks/main.yml
index 1c41e13e..ba0eae30 100644
--- a/test/integration/targets/setup_epel/tasks/main.yml
+++ b/test/integration/targets/setup_epel/tasks/main.yml
@@ -1,5 +1,5 @@
- name: Install EPEL
yum:
- name: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/setup_epel/epel-release-latest-{{ ansible_distribution_major_version }}.noarch.rpm
+ name: https://ci-files.testing.ansible.com/test/integration/targets/setup_epel/epel-release-latest-{{ ansible_distribution_major_version }}.noarch.rpm
disable_gpg_check: true
when: ansible_facts.distribution in ['RedHat', 'CentOS']
diff --git a/test/integration/targets/setup_paramiko/install-Fedora-35-python-3.yml b/test/integration/targets/setup_paramiko/install-Fedora-35-python-3.yml
new file mode 100644
index 00000000..bbe97a96
--- /dev/null
+++ b/test/integration/targets/setup_paramiko/install-Fedora-35-python-3.yml
@@ -0,0 +1,9 @@
+- name: Install Paramiko and crypto policies scripts
+ dnf:
+ name:
+ - crypto-policies-scripts
+ - python3-paramiko
+ install_weak_deps: no
+
+- name: Drop the crypto-policy to LEGACY for these tests
+ command: update-crypto-policies --set LEGACY
diff --git a/test/integration/targets/setup_paramiko/install-RedHat-9-python-3.yml b/test/integration/targets/setup_paramiko/install-RedHat-9-python-3.yml
new file mode 100644
index 00000000..ca391556
--- /dev/null
+++ b/test/integration/targets/setup_paramiko/install-RedHat-9-python-3.yml
@@ -0,0 +1,9 @@
+- name: Setup remote constraints
+ include_tasks: setup-remote-constraints.yml
+- name: Install Paramiko for Python 3 on RHEL 9
+ pip: # no python3-paramiko package exists for RHEL 9
+ name: paramiko
+ extra_args: "-c {{ remote_constraints }}"
+
+- name: Drop the crypto-policy to LEGACY for these tests
+ command: update-crypto-policies --set LEGACY
diff --git a/test/integration/targets/setup_paramiko/uninstall-Fedora-35-python-3.yml b/test/integration/targets/setup_paramiko/uninstall-Fedora-35-python-3.yml
new file mode 100644
index 00000000..aa3387ba
--- /dev/null
+++ b/test/integration/targets/setup_paramiko/uninstall-Fedora-35-python-3.yml
@@ -0,0 +1,7 @@
+- name: Revert the crypto-policy back to DEFAULT
+ command: update-crypto-policies --set DEFAULT
+
+- name: Uninstall Paramiko and crypto policies scripts using dnf history undo
+ command: dnf history undo last --assumeyes
+ args:
+ warn: no
diff --git a/test/integration/targets/setup_paramiko/uninstall-RedHat-9-python-3.yml b/test/integration/targets/setup_paramiko/uninstall-RedHat-9-python-3.yml
new file mode 100644
index 00000000..f46ec55b
--- /dev/null
+++ b/test/integration/targets/setup_paramiko/uninstall-RedHat-9-python-3.yml
@@ -0,0 +1,7 @@
+- name: Uninstall Paramiko for Python 3 on RHEL 9
+ pip: # no python3-paramiko package exists for RHEL 9
+ name: paramiko
+ state: absent
+
+- name: Revert the crypto-policy back to DEFAULT
+ command: update-crypto-policies --set DEFAULT
diff --git a/test/integration/targets/setup_rpm_repo/tasks/main.yml b/test/integration/targets/setup_rpm_repo/tasks/main.yml
index b2c9ae1b..be20078f 100644
--- a/test/integration/targets/setup_rpm_repo/tasks/main.yml
+++ b/test/integration/targets/setup_rpm_repo/tasks/main.yml
@@ -24,6 +24,11 @@
args:
name: "{{ rpm_repo_packages }}"
+ - name: Install rpmfluff via pip
+ pip:
+ name: rpmfluff
+ when: ansible_facts.os_family == 'RedHat' and ansible_distribution_major_version is version('9', '==')
+
- set_fact:
repos:
- "fake-{{ ansible_architecture }}"
diff --git a/test/integration/targets/setup_rpm_repo/vars/RedHat-9.yml b/test/integration/targets/setup_rpm_repo/vars/RedHat-9.yml
new file mode 100644
index 00000000..84849e23
--- /dev/null
+++ b/test/integration/targets/setup_rpm_repo/vars/RedHat-9.yml
@@ -0,0 +1,4 @@
+rpm_repo_packages:
+ - rpm-build
+ - createrepo_c
+ - createrepo
diff --git a/test/integration/targets/setup_win_printargv/tasks/main.yml b/test/integration/targets/setup_win_printargv/tasks/main.yml
index 5f671924..3924931f 100644
--- a/test/integration/targets/setup_win_printargv/tasks/main.yml
+++ b/test/integration/targets/setup_win_printargv/tasks/main.yml
@@ -1,7 +1,7 @@
---
- name: download the PrintArgv.exe binary to temp location
win_get_url:
- url: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/setup_win_printargv/PrintArgv.exe
+ url: https://ci-files.testing.ansible.com/test/integration/targets/setup_win_printargv/PrintArgv.exe
dest: '{{ remote_tmp_dir }}\PrintArgv.exe'
- name: set fact containing PrintArgv binary path
diff --git a/test/integration/targets/slurp/meta/main.yml b/test/integration/targets/slurp/meta/main.yml
new file mode 100644
index 00000000..1810d4be
--- /dev/null
+++ b/test/integration/targets/slurp/meta/main.yml
@@ -0,0 +1,2 @@
+dependencies:
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/slurp/tasks/main.yml b/test/integration/targets/slurp/tasks/main.yml
index 6d14f4b0..93985941 100644
--- a/test/integration/targets/slurp/tasks/main.yml
+++ b/test/integration/targets/slurp/tasks/main.yml
@@ -19,11 +19,11 @@
- name: Create a UTF-8 file to test with
copy:
content: 'We are at the café'
- dest: '{{ output_dir }}/foo.txt'
+ dest: '{{ remote_tmp_dir }}/foo.txt'
- name: test slurping an existing file
slurp:
- src: '{{ output_dir }}/foo.txt'
+ src: '{{ remote_tmp_dir }}/foo.txt'
register: slurp_existing
- name: check slurp existing result
@@ -38,11 +38,11 @@
- name: Create a binary file to test with
copy:
src: bar.bin
- dest: '{{ output_dir }}/bar.bin'
+ dest: '{{ remote_tmp_dir }}/bar.bin'
- name: test slurping a binary file
slurp:
- path: '{{ output_dir }}/bar.bin'
+ path: '{{ remote_tmp_dir }}/bar.bin'
register: slurp_binary
no_log: true
diff --git a/test/integration/targets/slurp/tasks/test_unreadable.yml b/test/integration/targets/slurp/tasks/test_unreadable.yml
index da5e36af..f8a3cfe7 100644
--- a/test/integration/targets/slurp/tasks/test_unreadable.yml
+++ b/test/integration/targets/slurp/tasks/test_unreadable.yml
@@ -1,17 +1,17 @@
- name: test slurping a non-existent file
slurp:
- src: '{{ output_dir }}/i_do_not_exist'
+ src: '{{ remote_tmp_dir }}/i_do_not_exist'
register: slurp_missing
ignore_errors: yes
- name: Create a directory to test with
file:
- path: '{{ output_dir }}/baz/'
+ path: '{{ remote_tmp_dir }}/baz/'
state: directory
- name: test slurping a directory
slurp:
- src: '{{ output_dir }}/baz'
+ src: '{{ remote_tmp_dir }}/baz'
register: slurp_dir
ignore_errors: yes
@@ -27,13 +27,13 @@
- name: create unreadable file
copy:
content: "Hello, World!"
- dest: "{{ output_dir }}/qux.txt"
+ dest: "{{ remote_tmp_dir }}/qux.txt"
mode: '0600'
owner: root
- name: test slurp unreadable file
slurp:
- src: "{{ output_dir }}/qux.txt"
+ src: "{{ remote_tmp_dir }}/qux.txt"
register: slurp_unreadable_file
become: yes
become_user: "{{ become_test_user }}"
@@ -42,14 +42,14 @@
- name: create unreadable directory
file:
- path: "{{ output_dir }}/test_data"
+ path: "{{ remote_tmp_dir }}/test_data"
state: directory
mode: '0700'
owner: root
- name: test slurp unreadable directory
slurp:
- src: "{{ output_dir }}/test_data"
+ src: "{{ remote_tmp_dir }}/test_data"
register: slurp_unreadable_dir
become: yes
become_user: "{{ become_test_user }}"
@@ -58,7 +58,7 @@
- name: Try to access file as directory
slurp:
- src: "{{ output_dir }}/qux.txt/somefile"
+ src: "{{ remote_tmp_dir }}/qux.txt/somefile"
ignore_errors: yes
register: slurp_path_file_as_dir
diff --git a/test/integration/targets/stat/meta/main.yml b/test/integration/targets/stat/meta/main.yml
index 07faa217..cb6005d0 100644
--- a/test/integration/targets/stat/meta/main.yml
+++ b/test/integration/targets/stat/meta/main.yml
@@ -1,2 +1,3 @@
dependencies:
- prepare_tests
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/stat/tasks/main.yml b/test/integration/targets/stat/tasks/main.yml
index 285e2b83..374cb2fd 100644
--- a/test/integration/targets/stat/tasks/main.yml
+++ b/test/integration/targets/stat/tasks/main.yml
@@ -17,10 +17,10 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
- name: make a new file
- copy: dest={{output_dir}}/foo.txt mode=0644 content="hello world"
+ copy: dest={{remote_tmp_dir}}/foo.txt mode=0644 content="hello world"
- name: check stat of file
- stat: path={{output_dir}}/foo.txt
+ stat: path={{remote_tmp_dir}}/foo.txt
register: stat_result
- debug: var=stat_result
@@ -64,13 +64,13 @@
- name: make a symlink
file:
- src: "{{ output_dir }}/foo.txt"
- path: "{{ output_dir }}/foo-link"
+ src: "{{ remote_tmp_dir }}/foo.txt"
+ path: "{{ remote_tmp_dir }}/foo-link"
state: link
- name: check stat of a symlink with follow off
stat:
- path: "{{ output_dir }}/foo-link"
+ path: "{{ remote_tmp_dir }}/foo-link"
register: stat_result
- debug: var=stat_result
@@ -113,7 +113,7 @@
- name: check stat of a symlink with follow on
stat:
- path: "{{ output_dir }}/foo-link"
+ path: "{{ remote_tmp_dir }}/foo-link"
follow: True
register: stat_result
@@ -158,13 +158,13 @@
- name: make a new file with colon in filename
copy:
- dest: "{{ output_dir }}/foo:bar.txt"
+ dest: "{{ remote_tmp_dir }}/foo:bar.txt"
mode: '0644'
content: "hello world"
- name: check stat of a file with colon in name
stat:
- path: "{{ output_dir }}/foo:bar.txt"
+ path: "{{ remote_tmp_dir }}/foo:bar.txt"
follow: True
register: stat_result
diff --git a/test/integration/targets/strategy_free/aliases b/test/integration/targets/strategy_free/aliases
new file mode 100644
index 00000000..b5983214
--- /dev/null
+++ b/test/integration/targets/strategy_free/aliases
@@ -0,0 +1 @@
+shippable/posix/group3
diff --git a/test/integration/targets/strategy_free/inventory b/test/integration/targets/strategy_free/inventory
new file mode 100644
index 00000000..39034f14
--- /dev/null
+++ b/test/integration/targets/strategy_free/inventory
@@ -0,0 +1,2 @@
+[local]
+testhost ansible_connection=local ansible_python_interpreter="{{ ansible_playbook_python }}"
diff --git a/test/integration/targets/strategy_free/last_include_tasks.yml b/test/integration/targets/strategy_free/last_include_tasks.yml
new file mode 100644
index 00000000..6c87242c
--- /dev/null
+++ b/test/integration/targets/strategy_free/last_include_tasks.yml
@@ -0,0 +1,2 @@
+- debug:
+ msg: "INCLUDED TASK EXECUTED"
diff --git a/test/integration/targets/strategy_free/runme.sh b/test/integration/targets/strategy_free/runme.sh
new file mode 100755
index 00000000..f5b912c6
--- /dev/null
+++ b/test/integration/targets/strategy_free/runme.sh
@@ -0,0 +1,10 @@
+#!/usr/bin/env bash
+
+set -eux
+
+export ANSIBLE_STRATEGY=free
+
+set +e
+result="$(ansible-playbook test_last_include_in_always.yml -i inventory "$@" 2>&1)"
+set -e
+grep -q "INCLUDED TASK EXECUTED" <<< "$result"
diff --git a/test/integration/targets/strategy_free/test_last_include_in_always.yml b/test/integration/targets/strategy_free/test_last_include_in_always.yml
new file mode 100644
index 00000000..205f3231
--- /dev/null
+++ b/test/integration/targets/strategy_free/test_last_include_in_always.yml
@@ -0,0 +1,9 @@
+- hosts: testhost
+ gather_facts: false
+ strategy: free
+ tasks:
+ - block:
+ - name: EXPECTED FAILURE
+ fail:
+ always:
+ - include_tasks: last_include_tasks.yml
diff --git a/test/integration/targets/subversion/aliases b/test/integration/targets/subversion/aliases
index 1dd2724e..23ada3cc 100644
--- a/test/integration/targets/subversion/aliases
+++ b/test/integration/targets/subversion/aliases
@@ -1,7 +1,7 @@
setup/always/setup_passlib
shippable/posix/group2
-skip/aix
skip/osx
skip/macos
+skip/rhel/9.0b # svn checkout hangs
destructive
needs/root
diff --git a/test/integration/targets/systemd/aliases b/test/integration/targets/systemd/aliases
index f8e28c7e..a6dafcf8 100644
--- a/test/integration/targets/systemd/aliases
+++ b/test/integration/targets/systemd/aliases
@@ -1,2 +1 @@
shippable/posix/group1
-skip/aix
diff --git a/test/integration/targets/systemd/handlers/main.yml b/test/integration/targets/systemd/handlers/main.yml
index 8643a2a0..57469a04 100644
--- a/test/integration/targets/systemd/handlers/main.yml
+++ b/test/integration/targets/systemd/handlers/main.yml
@@ -2,3 +2,11 @@
file:
path: /etc/systemd/system/sleeper@.service
state: absent
+
+- name: remove dummy indirect service
+ file:
+ path: "/etc/systemd/system/{{item}}"
+ state: absent
+ loop:
+ - dummy.service
+ - dummy.socket
diff --git a/test/integration/targets/systemd/tasks/main.yml b/test/integration/targets/systemd/tasks/main.yml
index 96781eb8..3c585e07 100644
--- a/test/integration/targets/systemd/tasks/main.yml
+++ b/test/integration/targets/systemd/tasks/main.yml
@@ -119,3 +119,4 @@
- systemd_enable_ssh_2 is not changed
- import_tasks: test_unit_template.yml
+- import_tasks: test_indirect_service.yml
diff --git a/test/integration/targets/systemd/tasks/test_indirect_service.yml b/test/integration/targets/systemd/tasks/test_indirect_service.yml
new file mode 100644
index 00000000..fc11343e
--- /dev/null
+++ b/test/integration/targets/systemd/tasks/test_indirect_service.yml
@@ -0,0 +1,37 @@
+- name: Copy service file
+ template:
+ src: "{{item}}"
+ dest: "/etc/systemd/system/{{item}}"
+ owner: root
+ group: root
+ loop:
+ - dummy.service
+ - dummy.socket
+ notify: remove dummy indirect service
+
+- name: Ensure dummy indirect service is disabled
+ systemd:
+ name: "{{indirect_service}}"
+ enabled: false
+ register: dummy_disabled
+
+- assert:
+ that:
+ - dummy_disabled is not changed
+
+- name: Enable indirect service 1
+ systemd:
+ name: '{{ indirect_service }}'
+ enabled: true
+ register: systemd_enable_dummy_indirect_1
+
+- name: Enable indirect service 2
+ systemd:
+ name: '{{ indirect_service }}'
+ enabled: true
+ register: systemd_enable_dummy_indirect_2
+
+- assert:
+ that:
+ - systemd_enable_dummy_indirect_1 is changed
+ - systemd_enable_dummy_indirect_2 is not changed \ No newline at end of file
diff --git a/test/integration/targets/systemd/templates/dummy.service b/test/integration/targets/systemd/templates/dummy.service
new file mode 100644
index 00000000..f38dce10
--- /dev/null
+++ b/test/integration/targets/systemd/templates/dummy.service
@@ -0,0 +1,11 @@
+[Unit]
+Description=Dummy Server
+Requires=dummy.socket
+Documentation=dummy
+
+[Service]
+ExecStart=/bin/yes
+StandardInput=socket
+
+[Install]
+Also=dummy.socket
diff --git a/test/integration/targets/systemd/templates/dummy.socket b/test/integration/targets/systemd/templates/dummy.socket
new file mode 100644
index 00000000..f23bf9b5
--- /dev/null
+++ b/test/integration/targets/systemd/templates/dummy.socket
@@ -0,0 +1,8 @@
+[Unit]
+Description=Dummy Server Activation Socket
+
+[Socket]
+ListenDatagram=69
+
+[Install]
+WantedBy=sockets.target \ No newline at end of file
diff --git a/test/integration/targets/systemd/vars/Debian.yml b/test/integration/targets/systemd/vars/Debian.yml
index 9760744d..613410f0 100644
--- a/test/integration/targets/systemd/vars/Debian.yml
+++ b/test/integration/targets/systemd/vars/Debian.yml
@@ -1,2 +1,3 @@
ssh_service: ssh
sleep_bin_path: /bin/sleep
+indirect_service: dummy \ No newline at end of file
diff --git a/test/integration/targets/systemd/vars/default.yml b/test/integration/targets/systemd/vars/default.yml
index 57491ff0..0bf1f892 100644
--- a/test/integration/targets/systemd/vars/default.yml
+++ b/test/integration/targets/systemd/vars/default.yml
@@ -1,2 +1,3 @@
ssh_service: sshd
+indirect_service: dummy
sleep_bin_path: /usr/bin/sleep
diff --git a/test/integration/targets/template/72615.yml b/test/integration/targets/template/72615.yml
index 9a6eb941..153cfd67 100644
--- a/test/integration/targets/template/72615.yml
+++ b/test/integration/targets/template/72615.yml
@@ -16,11 +16,3 @@
- "'top-level-foo' not in template_result"
- "'template-level-foo' in template_result"
- "'template-nested-level-foo' in template_result"
- when: lookup('pipe', ansible_python_interpreter ~ ' -c "import jinja2; print(jinja2.__version__)"') is version('2.9', '>=')
-
- - assert:
- that:
- - "'top-level-foo' in template_result"
- - "'template-level-foo' not in template_result"
- - "'template-nested-level-foo' not in template_result"
- when: lookup('pipe', ansible_python_interpreter ~ ' -c "import jinja2; print(jinja2.__version__)"') is version('2.9', '<')
diff --git a/test/integration/targets/template/corner_cases.yml b/test/integration/targets/template/corner_cases.yml
index 48782f79..9d41ed94 100644
--- a/test/integration/targets/template/corner_cases.yml
+++ b/test/integration/targets/template/corner_cases.yml
@@ -12,10 +12,14 @@
- '"I SHOULD NOT BE TEMPLATED" not in adjacent'
- globals1 == "[[], globals()]"
- globals2 == "[[], globals]"
+ - left_hand == '[1] + [2]'
+ - left_hand_2 == '[1 + 2 * 3 / 4] + [-2.5, 2.5, 3.5]'
vars:
adjacent: "{{ empty_list }} + [dont]"
globals1: "[{{ empty_list }}, globals()]"
globals2: "[{{ empty_list }}, globals]"
+ left_hand: '[1] + {{ [2] }}'
+ left_hand_2: '[1 + 2 * 3 / 4] + {{ [-2.5, +2.5, 1 + 2.5] }}'
- name: 'ensure we can add lists'
assert:
diff --git a/test/integration/targets/template/files/string_type_filters.expected b/test/integration/targets/template/files/string_type_filters.expected
new file mode 100644
index 00000000..989c356e
--- /dev/null
+++ b/test/integration/targets/template/files/string_type_filters.expected
@@ -0,0 +1,4 @@
+{
+ "foo": "bar",
+ "foobar": 1
+}
diff --git a/test/integration/targets/template/tasks/main.yml b/test/integration/targets/template/tasks/main.yml
index 14ef6180..e8a2b9a8 100644
--- a/test/integration/targets/template/tasks/main.yml
+++ b/test/integration/targets/template/tasks/main.yml
@@ -199,11 +199,6 @@
# VERIFY lstrip_blocks
-- name: Check support for lstrip_blocks in Jinja2
- shell: "{{ ansible_python.executable }} -c 'import jinja2; jinja2.defaults.LSTRIP_BLOCKS'"
- register: lstrip_block_support
- ignore_errors: True
-
- name: Render a template with "lstrip_blocks" set to False
template:
src: lstrip_blocks.j2
@@ -229,24 +224,15 @@
register: lstrip_blocks_true_result
ignore_errors: True
-- name: Verify exception is thrown if Jinja2 does not support lstrip_blocks but lstrip_blocks is used
- assert:
- that:
- - "lstrip_blocks_true_result.failed"
- - 'lstrip_blocks_true_result.msg is search(">=2.7")'
- when: "lstrip_block_support is failed"
-
- name: Get checksum of known good lstrip_blocks_true.expected
stat:
path: "{{role_path}}/files/lstrip_blocks_true.expected"
register: lstrip_blocks_true_good
- when: "lstrip_block_support is successful"
- name: Verify templated lstrip_blocks_true matches known good using checksum
assert:
that:
- "lstrip_blocks_true_result.checksum == lstrip_blocks_true_good.stat.checksum"
- when: "lstrip_block_support is successful"
# VERIFY CONTENTS
@@ -743,3 +729,35 @@
# aliases file requires root for template tests so this should be safe
- import_tasks: backup_test.yml
+
+- name: test STRING_TYPE_FILTERS
+ copy:
+ content: "{{ a_dict | to_nice_json(indent=(indent_value|int))}}\n"
+ dest: "{{ output_dir }}/string_type_filters.templated"
+ vars:
+ a_dict:
+ foo: bar
+ foobar: 1
+ indent_value: 2
+
+- name: copy known good string_type_filters.expected into place
+ copy:
+ src: string_type_filters.expected
+ dest: "{{ output_dir }}/string_type_filters.expected"
+
+- command: "diff {{ output_dir }}/string_type_filters.templated {{ output_dir}}/string_type_filters.expected"
+ register: out
+
+- assert:
+ that:
+ - out.rc == 0
+
+- template:
+ src: empty_template.j2
+ dest: "{{ output_dir }}/empty_template.templated"
+
+- assert:
+ that:
+ - test
+ vars:
+ test: "{{ lookup('file', '{{ output_dir }}/empty_template.templated')|length == 0 }}"
diff --git a/test/integration/targets/incidental_win_lineinfile/files/test_linebreak.txt b/test/integration/targets/template/templates/empty_template.j2
index e69de29b..e69de29b 100644
--- a/test/integration/targets/incidental_win_lineinfile/files/test_linebreak.txt
+++ b/test/integration/targets/template/templates/empty_template.j2
diff --git a/test/integration/targets/template/unsafe.yml b/test/integration/targets/template/unsafe.yml
index 6746e1ea..bef9a4b4 100644
--- a/test/integration/targets/template/unsafe.yml
+++ b/test/integration/targets/template/unsafe.yml
@@ -17,3 +17,48 @@
that:
- this_always_safe == imunsafe
- imunsafe == this_was_unsafe.strip()
+
+
+- hosts: localhost
+ gather_facts: false
+ vars:
+ output_dir: "{{ lookup('env', 'OUTPUT_DIR') }}"
+ tasks:
+ - set_fact:
+ unsafe_foo: "{{ lookup('list', var0) }}"
+ vars:
+ var0: "{{ var1 }}"
+ var1:
+ - unsafe
+
+ - assert:
+ that:
+ - "{{ unsafe_foo[0] | type_debug == 'AnsibleUnsafeText' }}"
+
+ - block:
+ - copy:
+ dest: "{{ file_name }}"
+ content: !unsafe "{{ i_should_not_be_templated }}"
+
+ - set_fact:
+ file_content: "{{ lookup('file', file_name) }}"
+
+ - assert:
+ that:
+ - not file_content is contains('unsafe')
+
+ - set_fact:
+ file_content: "{{ lookup('file', file_name_tmpl) }}"
+ vars:
+ file_name_tmpl: "{{ file_name }}"
+
+ - assert:
+ that:
+ - not file_content is contains('unsafe')
+ vars:
+ file_name: "{{ output_dir }}/unsafe_file"
+ i_should_not_be_templated: unsafe
+ always:
+ - file:
+ dest: "{{ file_name }}"
+ state: absent
diff --git a/test/integration/targets/template_jinja2_latest/aliases b/test/integration/targets/template_jinja2_latest/aliases
deleted file mode 100644
index b9c19e3d..00000000
--- a/test/integration/targets/template_jinja2_latest/aliases
+++ /dev/null
@@ -1,5 +0,0 @@
-needs/root
-shippable/posix/group2
-needs/target/template
-context/controller
-needs/file/test/lib/ansible_test/_data/requirements/constraints.txt
diff --git a/test/integration/targets/template_jinja2_latest/main.yml b/test/integration/targets/template_jinja2_latest/main.yml
deleted file mode 100644
index aa7d6433..00000000
--- a/test/integration/targets/template_jinja2_latest/main.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- hosts: testhost
- gather_facts: True
- roles:
- - { role: template }
diff --git a/test/integration/targets/template_jinja2_latest/pip-requirements.txt b/test/integration/targets/template_jinja2_latest/pip-requirements.txt
deleted file mode 100644
index fdd9ec5c..00000000
--- a/test/integration/targets/template_jinja2_latest/pip-requirements.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-# pip 7.1 added support for constraints, which are required by ansible-test to install most python requirements
-# see https://github.com/pypa/pip/blame/e648e00dc0226ade30ade99591b245b0c98e86c9/NEWS.rst#L1258
-pip >= 7.1, < 10 ; python_version < '2.7' # pip 10+ drops support for python 2.6 (sanity_ok)
-pip >= 7.1 ; python_version >= '2.7' # sanity_ok
diff --git a/test/integration/targets/template_jinja2_latest/requirements.txt b/test/integration/targets/template_jinja2_latest/requirements.txt
deleted file mode 100644
index 49a806fb..00000000
--- a/test/integration/targets/template_jinja2_latest/requirements.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-jinja2 < 2.11 ; python_version < '2.7' # jinja2 2.11 and later require python 2.7 or later
-jinja2 ; python_version >= '2.7'
diff --git a/test/integration/targets/template_jinja2_latest/runme.sh b/test/integration/targets/template_jinja2_latest/runme.sh
deleted file mode 100755
index d6ef693e..00000000
--- a/test/integration/targets/template_jinja2_latest/runme.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env bash
-
-set -eux
-
-source virtualenv.sh
-
-pip install --requirement pip-requirements.txt
-
-pip install -U -r requirements.txt --constraint "../../../lib/ansible_test/_data/requirements/constraints.txt"
-
-ANSIBLE_ROLES_PATH=../
-export ANSIBLE_ROLES_PATH
-
-ansible-playbook -i ../../inventory main.yml -v "$@"
diff --git a/test/integration/targets/template_jinja2_non_native/46169.yml b/test/integration/targets/template_jinja2_non_native/46169.yml
index efb443ea..4dc3dc01 100644
--- a/test/integration/targets/template_jinja2_non_native/46169.yml
+++ b/test/integration/targets/template_jinja2_non_native/46169.yml
@@ -29,4 +29,3 @@
- assert:
that:
- native_lookup | type_debug == 'dict'
- when: lookup('pipe', ansible_python_interpreter ~ ' -c "import jinja2; print(jinja2.__version__)"') is version('2.10', '>=')
diff --git a/test/integration/targets/templating_lookups/runme.sh b/test/integration/targets/templating_lookups/runme.sh
index ebfc0d11..b900c153 100755
--- a/test/integration/targets/templating_lookups/runme.sh
+++ b/test/integration/targets/templating_lookups/runme.sh
@@ -7,6 +7,3 @@ ANSIBLE_ROLES_PATH=./ UNICODE_VAR=café ansible-playbook runme.yml "$@"
ansible-playbook template_lookup_vaulted/playbook.yml --vault-password-file template_lookup_vaulted/test_vault_pass "$@"
ansible-playbook template_deepcopy/playbook.yml -i template_deepcopy/hosts "$@"
-
-# https://github.com/ansible/ansible/issues/66943
-ANSIBLE_JINJA2_NATIVE=0 ansible-playbook template_lookup_safe_eval_unicode/playbook.yml "$@"
diff --git a/test/integration/targets/templating_lookups/template_lookup_safe_eval_unicode/playbook.yml b/test/integration/targets/templating_lookups/template_lookup_safe_eval_unicode/playbook.yml
deleted file mode 100644
index 29e4b615..00000000
--- a/test/integration/targets/templating_lookups/template_lookup_safe_eval_unicode/playbook.yml
+++ /dev/null
@@ -1,8 +0,0 @@
-- hosts: localhost
- gather_facts: no
- vars:
- original_dict: "{{ lookup('template', 'template.json.j2') }}"
- copy_dict: {}
- tasks:
- - set_fact:
- copy_dict: "{{ copy_dict | combine(original_dict) }}"
diff --git a/test/integration/targets/templating_lookups/template_lookup_safe_eval_unicode/template.json.j2 b/test/integration/targets/templating_lookups/template_lookup_safe_eval_unicode/template.json.j2
deleted file mode 100644
index bc31407c..00000000
--- a/test/integration/targets/templating_lookups/template_lookup_safe_eval_unicode/template.json.j2
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "key1": "ascii_value",
- "key2": "unicode_value_křížek",
-}
diff --git a/test/integration/targets/test_mathstuff/tasks/main.yml b/test/integration/targets/test_mathstuff/tasks/main.yml
index dd379ce2..b5109ce3 100644
--- a/test/integration/targets/test_mathstuff/tasks/main.yml
+++ b/test/integration/targets/test_mathstuff/tasks/main.yml
@@ -1,8 +1,3 @@
-- name: Get Jinja2 version
- set_fact:
- jinja2_version: >-
- {{ lookup('pipe', '{{ ansible_playbook_python }} -c "import jinja2; print(jinja2.__version__)"') }}
-
- name: Assert subset tests work
assert:
that:
@@ -28,11 +23,5 @@
that:
- "'bad' is not nan"
- "1.1 | float is not nan"
-
-# Jinja2 versions prior to 2.10 will traceback when using: 'nan' | float
-- name: Assert nan tests work (Jinja2 2.10+)
- assert:
- that:
- "'nan' | float is isnan" # old name
- "'nan' | float is nan"
- when: jinja2_version is version('2.10', '>=')
diff --git a/test/integration/targets/unarchive/aliases b/test/integration/targets/unarchive/aliases
index db9bbd8c..961b2051 100644
--- a/test/integration/targets/unarchive/aliases
+++ b/test/integration/targets/unarchive/aliases
@@ -1,4 +1,3 @@
needs/root
shippable/posix/group2
destructive
-skip/aix
diff --git a/test/integration/targets/unarchive/tasks/main.yml b/test/integration/targets/unarchive/tasks/main.yml
index baa2a8cf..148e583f 100644
--- a/test/integration/targets/unarchive/tasks/main.yml
+++ b/test/integration/targets/unarchive/tasks/main.yml
@@ -18,3 +18,5 @@
- import_tasks: test_download.yml
- import_tasks: test_unprivileged_user.yml
- import_tasks: test_different_language_var.yml
+- import_tasks: test_invalid_options.yml
+- import_tasks: test_ownership_top_folder.yml
diff --git a/test/integration/targets/unarchive/tasks/test_include.yml b/test/integration/targets/unarchive/tasks/test_include.yml
index 04842e0e..ea3a01cf 100644
--- a/test/integration/targets/unarchive/tasks/test_include.yml
+++ b/test/integration/targets/unarchive/tasks/test_include.yml
@@ -24,17 +24,12 @@
paths: "{{ remote_tmp_dir }}/include-zip"
register: unarchive_dir02
-# The map filter was added in Jinja2 2.7, which is newer than the version on RHEL/CentOS 6,
-# so we skip this validation on those hosts
- name: Verify that zip extraction included only one file
assert:
that:
- file_names == ['FOO-UNAR.TXT']
vars:
file_names: "{{ unarchive_dir02.files | map(attribute='path') | map('basename') }}"
- when:
- - "ansible_facts.os_family == 'RedHat'"
- - ansible_facts.distribution_major_version is version('7', '>=')
- name: Unpack tar file include one file
unarchive:
diff --git a/test/integration/targets/unarchive/tasks/test_invalid_options.yml b/test/integration/targets/unarchive/tasks/test_invalid_options.yml
new file mode 100644
index 00000000..68a06213
--- /dev/null
+++ b/test/integration/targets/unarchive/tasks/test_invalid_options.yml
@@ -0,0 +1,27 @@
+- name: create our tar unarchive destination
+ file:
+ path: '{{remote_tmp_dir}}/test-unarchive-tar'
+ state: directory
+
+- name: unarchive a tar file with an invalid option
+ unarchive:
+ src: '{{remote_tmp_dir}}/test-unarchive.tar'
+ dest: '{{remote_tmp_dir}}/test-unarchive-tar'
+ remote_src: yes
+ extra_opts:
+ - "--invalid-éxtra-optら"
+ ignore_errors: yes
+ register: unarchive
+
+- name: verify that the invalid option is in the error message
+ assert:
+ that:
+ - "unarchive is failed"
+ - "unarchive['msg'] is search(msg)"
+ vars:
+ msg: "Unable to list files in the archive: /.*/(tar|gtar): unrecognized option '--invalid-éxtra-optら'"
+
+- name: remove our tar unarchive destination
+ file:
+ path: '{{remote_tmp_dir}}/test-unarchive-tar'
+ state: absent
diff --git a/test/integration/targets/unarchive/tasks/test_ownership_top_folder.yml b/test/integration/targets/unarchive/tasks/test_ownership_top_folder.yml
new file mode 100644
index 00000000..75bd125b
--- /dev/null
+++ b/test/integration/targets/unarchive/tasks/test_ownership_top_folder.yml
@@ -0,0 +1,73 @@
+- name: Create unarchivetest3 user
+ user:
+ name: unarchivetest3
+ group: "{{ group_table[ansible_facts['distribution']] | default(omit) }}"
+ register: user
+ vars:
+ group_table:
+ MacOSX: staff
+
+- name: Test unarchiving as root and apply different ownership to top folder
+ become: yes
+ become_user: root
+ block:
+
+ - name: Create top folder owned by root
+ file:
+ path: "{{ user.home }}/tarball-top-folder"
+ state: directory
+ owner: root
+
+ - name: Add a file owned by root
+ copy:
+ src: foo.txt
+ dest: "{{ user.home }}/tarball-top-folder/foo-unarchive.txt"
+ mode: preserve
+
+ - name: Create a tarball as root. This tarball won't list the top folder when doing "tar tvf test-tarball.tar.gz"
+ shell: tar -czf test-tarball.tar.gz tarball-top-folder/foo-unarchive.txt
+ args:
+ chdir: "{{ user.home }}"
+ creates: "{{ user.home }}/test-tarball.tar.gz"
+
+ - name: Create unarchive destination folder in /home/unarchivetest3/unarchivetest3-unarchive
+ file:
+ path: "{{ user.home }}/unarchivetest3-unarchive"
+ state: directory
+ owner: unarchivetest3
+ group: "{{ user.group }}"
+
+ - name: unarchive the tarball as root. apply ownership for unarchivetest3
+ unarchive:
+ src: "{{ user.home }}/test-tarball.tar.gz"
+ dest: "{{ user.home }}/unarchivetest3-unarchive"
+ remote_src: yes
+ list_files: True
+ owner: unarchivetest3
+ group: "{{ user.group }}"
+
+ - name: Stat the extracted top folder
+ stat:
+ path: "{{ user.home }}/unarchivetest3-unarchive/tarball-top-folder"
+ register: top_folder_info
+
+ - name: verify that extracted top folder is owned by unarchivetest3
+ assert:
+ that:
+ - top_folder_info.stat.pw_name == "unarchivetest3"
+ - top_folder_info.stat.gid == {{ user.group }}
+
+ always:
+ - name: remove our unarchivetest3 user and files
+ user:
+ name: unarchivetest3
+ state: absent
+ remove: yes
+ become: no
+
+ - name: Remove user home directory on macOS
+ file:
+ path: /Users/unarchivetest3
+ state: absent
+ become: no
+ when: ansible_facts.distribution == 'MacOSX'
diff --git a/test/integration/targets/undefined/tasks/main.yml b/test/integration/targets/undefined/tasks/main.yml
index bbd82845..5bf47860 100644
--- a/test/integration/targets/undefined/tasks/main.yml
+++ b/test/integration/targets/undefined/tasks/main.yml
@@ -1,19 +1,16 @@
-- when: lookup('pipe', ansible_playbook_python ~ ' -c "import jinja2; print(jinja2.__version__)"') is version('2.7', '>=')
- block:
- - set_fact:
- names: '{{ things|map(attribute="name") }}'
- vars:
- things:
- - name: one
- - name: two
- - notname: three
- - name: four
+- set_fact:
+ names: '{{ things|map(attribute="name") }}'
+ vars:
+ things:
+ - name: one
+ - name: two
+ - notname: three
+ - name: four
+ ignore_errors: true
+ register: undefined_set_fact
- - assert:
- that:
- - '"%r"|format(an_undefined_var) == "AnsibleUndefined"'
- - '"%r"|format(undef()) == "AnsibleUndefined"'
- # The existence of AnsibleUndefined in a templating result
- # prevents safe_eval from turning the value into a python object
- - names is string
- - '", AnsibleUndefined," in names'
+- assert:
+ that:
+ - '("%r"|format(undefined_variable)).startswith("AnsibleUndefined")'
+ - undefined_set_fact is failed
+ - undefined_set_fact.msg is contains 'undefined variable'
diff --git a/test/integration/targets/unsafe_writes/aliases b/test/integration/targets/unsafe_writes/aliases
index cf954afc..0d8146e7 100644
--- a/test/integration/targets/unsafe_writes/aliases
+++ b/test/integration/targets/unsafe_writes/aliases
@@ -3,6 +3,5 @@ needs/root
skip/freebsd
skip/osx
skip/macos
-skip/aix
shippable/posix/group3
needs/target/setup_remote_tmp_dir
diff --git a/test/integration/targets/uri/aliases b/test/integration/targets/uri/aliases
index 11e91ee7..6c31bd09 100644
--- a/test/integration/targets/uri/aliases
+++ b/test/integration/targets/uri/aliases
@@ -1,4 +1,3 @@
destructive
shippable/posix/group4
needs/httptester
-skip/aix
diff --git a/test/integration/targets/uri/tasks/main.yml b/test/integration/targets/uri/tasks/main.yml
index 755cd432..1d560e5c 100644
--- a/test/integration/targets/uri/tasks/main.yml
+++ b/test/integration/targets/uri/tasks/main.yml
@@ -173,6 +173,25 @@
- result is failed
- "'certificate verify failed' in result.msg"
+- name: Locate ca-bundle
+ stat:
+ path: '{{ item }}'
+ loop:
+ - /etc/ssl/certs/ca-bundle.crt
+ - /etc/ssl/certs/ca-certificates.crt
+ - /var/lib/ca-certificates/ca-bundle.pem
+ - /usr/local/share/certs/ca-root-nss.crt
+ - '{{ macos_cafile.stdout_lines|default(["/_i_dont_exist_ca.pem"])|first }}'
+ - /etc/ssl/cert.pem
+ register: ca_bundle_candidates
+
+- name: Test that ca_path can be a full bundle
+ uri:
+ url: "https://{{ httpbin_host }}/get"
+ ca_path: '{{ ca_bundle }}'
+ vars:
+ ca_bundle: '{{ ca_bundle_candidates.results|selectattr("stat.exists")|map(attribute="item")|first }}'
+
- name: test redirect without follow_redirects
uri:
url: 'https://{{ httpbin_host }}/redirect/2'
@@ -228,6 +247,16 @@
headers:
Cookie: "fake=fake_value"
+- name: test digest auth failure
+ uri:
+ url: 'https://{{ httpbin_host }}/digest-auth/auth/user/passwd'
+ user: user
+ password: wrong
+ headers:
+ Cookie: "fake=fake_value"
+ register: result
+ failed_when: result.status != 401
+
- name: test unredirected_headers
uri:
url: 'https://{{ httpbin_host }}/redirect-to?status_code=301&url=/basic-auth/user/passwd'
diff --git a/test/integration/targets/user/aliases b/test/integration/targets/user/aliases
index 3a07aab3..a4c92ef8 100644
--- a/test/integration/targets/user/aliases
+++ b/test/integration/targets/user/aliases
@@ -1,3 +1,2 @@
destructive
shippable/posix/group1
-skip/aix
diff --git a/test/integration/targets/user/tasks/test_expires_min_max.yml b/test/integration/targets/user/tasks/test_expires_min_max.yml
index 80e607b6..0b803791 100644
--- a/test/integration/targets/user/tasks/test_expires_min_max.yml
+++ b/test/integration/targets/user/tasks/test_expires_min_max.yml
@@ -53,3 +53,21 @@
that:
- ansible_facts.getent_shadow['ansibulluser'][2] == '5'
- ansible_facts.getent_shadow['ansibulluser'][3] == '10'
+
+ - name: Set min and max at the same time
+ user:
+ name: ansibulluser
+ # also checks that assigning 0 works
+ password_expire_min: 0
+ password_expire_max: 0
+
+ - name: Get shadow data for ansibulluser
+ getent:
+ database: shadow
+ key: ansibulluser
+
+ - name: Ensure password expiration was set properly
+ assert:
+ that:
+ - ansible_facts.getent_shadow['ansibulluser'][2] == '0'
+ - ansible_facts.getent_shadow['ansibulluser'][3] == '0'
diff --git a/test/integration/targets/wait_for/meta/main.yml b/test/integration/targets/wait_for/meta/main.yml
index 07faa217..cb6005d0 100644
--- a/test/integration/targets/wait_for/meta/main.yml
+++ b/test/integration/targets/wait_for/meta/main.yml
@@ -1,2 +1,3 @@
dependencies:
- prepare_tests
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/wait_for/tasks/main.yml b/test/integration/targets/wait_for/tasks/main.yml
index 1af08924..c524f990 100644
--- a/test/integration/targets/wait_for/tasks/main.yml
+++ b/test/integration/targets/wait_for/tasks/main.yml
@@ -18,21 +18,21 @@
- name: setup webserver
copy:
src: "testserver.py"
- dest: "{{ output_dir }}/testserver.py"
+ dest: "{{ remote_tmp_dir }}/testserver.py"
- name: setup a path
file:
- path: "{{ output_dir }}/wait_for_file"
+ path: "{{ remote_tmp_dir }}/wait_for_file"
state: touch
- name: setup remove a file after 3s
- shell: sleep 3 && rm {{ output_dir }}/wait_for_file
+ shell: sleep 3 && rm {{ remote_tmp_dir }}/wait_for_file
async: 20
poll: 0
- name: test for absent path
wait_for:
- path: "{{ output_dir }}/wait_for_file"
+ path: "{{ remote_tmp_dir }}/wait_for_file"
state: absent
timeout: 20
register: waitfor
@@ -40,36 +40,36 @@
assert:
that:
- waitfor is successful
- - waitfor.path == "{{ output_dir | expanduser }}/wait_for_file"
+ - waitfor.path == "{{ remote_tmp_dir | expanduser }}/wait_for_file"
- waitfor.elapsed >= 2
- waitfor.elapsed <= 15
- name: setup create a file after 3s
- shell: sleep 3 && touch {{ output_dir }}/wait_for_file
+ shell: sleep 3 && touch {{ remote_tmp_dir }}/wait_for_file
async: 20
poll: 0
- name: test for present path
wait_for:
- path: "{{ output_dir }}/wait_for_file"
+ path: "{{ remote_tmp_dir }}/wait_for_file"
timeout: 5
register: waitfor
- name: verify test for absent path
assert:
that:
- waitfor is successful
- - waitfor.path == "{{ output_dir | expanduser }}/wait_for_file"
+ - waitfor.path == "{{ remote_tmp_dir | expanduser }}/wait_for_file"
- waitfor.elapsed >= 2
- waitfor.elapsed <= 15
- name: setup write keyword to file after 3s
- shell: sleep 3 && echo completed > {{output_dir}}/wait_for_keyword
+ shell: sleep 3 && echo completed > {{remote_tmp_dir}}/wait_for_keyword
async: 20
poll: 0
- name: test wait for keyword in file
wait_for:
- path: "{{output_dir}}/wait_for_keyword"
+ path: "{{remote_tmp_dir}}/wait_for_keyword"
search_regex: completed
timeout: 5
register: waitfor
@@ -83,13 +83,13 @@
- waitfor.elapsed <= 15
- name: setup write keyword to file after 3s
- shell: sleep 3 && echo "completed data 123" > {{output_dir}}/wait_for_keyword
+ shell: sleep 3 && echo "completed data 123" > {{remote_tmp_dir}}/wait_for_keyword
async: 20
poll: 0
- name: test wait for keyword in file with match groups
wait_for:
- path: "{{output_dir}}/wait_for_keyword"
+ path: "{{remote_tmp_dir}}/wait_for_keyword"
search_regex: completed (?P<foo>\w+) ([0-9]+)
timeout: 5
register: waitfor
@@ -132,7 +132,7 @@
- "waitfor.msg == 'fail with custom message'"
- name: setup start SimpleHTTPServer
- shell: sleep 3 && cd {{ files_dir }} && {{ ansible_python.executable }} {{ output_dir}}/testserver.py {{ http_port }}
+ shell: sleep 3 && cd {{ files_dir }} && {{ ansible_python.executable }} {{ remote_tmp_dir}}/testserver.py {{ http_port }}
async: 120 # this test set can take ~1m to run on FreeBSD (via Shippable)
poll: 0
@@ -156,10 +156,10 @@
- name: Copy zombie.py
copy:
src: zombie.py
- dest: "{{ output_dir }}"
+ dest: "{{ remote_tmp_dir }}"
- name: Create zombie process
- shell: "{{ ansible_python.executable }} {{ output_dir }}/zombie"
+ shell: "{{ ansible_python.executable }} {{ remote_tmp_dir }}/zombie"
async: 90
poll: 0
@@ -175,3 +175,15 @@
- waitfor is successful
- waitfor is not changed
- "waitfor.port == {{ http_port }}"
+
+- name: test wait_for with delay
+ wait_for:
+ timeout: 2
+ delay: 2
+ register: waitfor
+
+- name: verify test wait_for with delay
+ assert:
+ that:
+ - waitfor is successful
+ - waitfor.elapsed >= 4
diff --git a/test/integration/targets/wait_for/vars/main.yml b/test/integration/targets/wait_for/vars/main.yml
index c2732948..d15b6d7d 100644
--- a/test/integration/targets/wait_for/vars/main.yml
+++ b/test/integration/targets/wait_for/vars/main.yml
@@ -1,4 +1,4 @@
---
http_port: 15261
-files_dir: '{{ output_dir|expanduser }}/files'
-checkout_dir: '{{ output_dir }}/git'
+files_dir: '{{ remote_tmp_dir|expanduser }}/files'
+checkout_dir: '{{ remote_tmp_dir }}/git'
diff --git a/test/integration/targets/win_async_wrapper/library/async_test.ps1 b/test/integration/targets/win_async_wrapper/library/async_test.ps1
index fa41b3e8..3b4c1c85 100644
--- a/test/integration/targets/win_async_wrapper/library/async_test.ps1
+++ b/test/integration/targets/win_async_wrapper/library/async_test.ps1
@@ -7,9 +7,9 @@
$parsed_args = Parse-Args $args
$sleep_delay_sec = Get-AnsibleParam -obj $parsed_args -name "sleep_delay_sec" -type "int" -default 0
-$fail_mode = Get-AnsibleParam -obj $parsed_args -name "fail_mode" -type "str" -default "success" -validateset "success","graceful","exception"
+$fail_mode = Get-AnsibleParam -obj $parsed_args -name "fail_mode" -type "str" -default "success" -validateset "success", "graceful", "exception"
-If($fail_mode -isnot [array]) {
+If ($fail_mode -isnot [array]) {
$fail_mode = @($fail_mode)
}
@@ -19,30 +19,29 @@ $result = @{
module_tempdir = $PSScriptRoot
}
-If($sleep_delay_sec -gt 0) {
+If ($sleep_delay_sec -gt 0) {
Sleep -Seconds $sleep_delay_sec
$result["slept_sec"] = $sleep_delay_sec
}
-If($fail_mode -contains "leading_junk") {
+If ($fail_mode -contains "leading_junk") {
Write-Output "leading junk before module output"
}
-If($fail_mode -contains "graceful") {
+If ($fail_mode -contains "graceful") {
Fail-Json $result "failed gracefully"
}
Try {
- If($fail_mode -contains "exception") {
+ If ($fail_mode -contains "exception") {
Throw "failing via exception"
}
Exit-Json $result
}
-Finally
-{
- If($fail_mode -contains "trailing_junk") {
+Finally {
+ If ($fail_mode -contains "trailing_junk") {
Write-Output "trailing junk after module output"
}
}
diff --git a/test/integration/targets/win_exec_wrapper/library/test_common_functions.ps1 b/test/integration/targets/win_exec_wrapper/library/test_common_functions.ps1
index 9a5918f9..dde1ebc4 100644
--- a/test/integration/targets/win_exec_wrapper/library/test_common_functions.ps1
+++ b/test/integration/targets/win_exec_wrapper/library/test_common_functions.ps1
@@ -4,10 +4,13 @@
$ErrorActionPreference = "Stop"
-Function Assert-Equals($actual, $expected) {
+Function Assert-Equal($actual, $expected) {
if ($actual -cne $expected) {
$call_stack = (Get-PSCallStack)[1]
- $error_msg = "AssertionError:`r`nActual: `"$actual`" != Expected: `"$expected`"`r`nLine: $($call_stack.ScriptLineNumber), Method: $($call_stack.Position.Text)"
+ $error_msg = -join @(
+ "AssertionError:`r`nActual: `"$actual`" != Expected: `"$expected`"`r`nLine: "
+ "$($call_stack.ScriptLineNumber), Method: $($call_stack.Position.Text)"
+ )
Fail-Json -obj $result -message $error_msg
}
}
@@ -17,23 +20,23 @@ $result = @{
}
#ConvertFrom-AnsibleJso
-$input_json = '{"string":"string","float":3.1415926,"dict":{"string":"string","int":1},"list":["entry 1","entry 2"],"null":null,"int":1}'
+$input_json = '{"string":"string","float":3.1415926,"dict":{"string":"string","int":1},"list":["entry 1","entry 2"],"null":null,"int":1}'
$actual = ConvertFrom-AnsibleJson -InputObject $input_json
-Assert-Equals -actual $actual.GetType() -expected ([Hashtable])
-Assert-Equals -actual $actual.string.GetType() -expected ([String])
-Assert-Equals -actual $actual.string -expected "string"
-Assert-Equals -actual $actual.int.GetType() -expected ([Int32])
-Assert-Equals -actual $actual.int -expected 1
-Assert-Equals -actual $actual.null -expected $null
-Assert-Equals -actual $actual.float.GetType() -expected ([Decimal])
-Assert-Equals -actual $actual.float -expected 3.1415926
-Assert-Equals -actual $actual.list.GetType() -expected ([Object[]])
-Assert-Equals -actual $actual.list.Count -expected 2
-Assert-Equals -actual $actual.list[0] -expected "entry 1"
-Assert-Equals -actual $actual.list[1] -expected "entry 2"
-Assert-Equals -actual $actual.GetType() -expected ([Hashtable])
-Assert-Equals -actual $actual.dict.string -expected "string"
-Assert-Equals -actual $actual.dict.int -expected 1
+Assert-Equal -actual $actual.GetType() -expected ([Hashtable])
+Assert-Equal -actual $actual.string.GetType() -expected ([String])
+Assert-Equal -actual $actual.string -expected "string"
+Assert-Equal -actual $actual.int.GetType() -expected ([Int32])
+Assert-Equal -actual $actual.int -expected 1
+Assert-Equal -actual $actual.null -expected $null
+Assert-Equal -actual $actual.float.GetType() -expected ([Decimal])
+Assert-Equal -actual $actual.float -expected 3.1415926
+Assert-Equal -actual $actual.list.GetType() -expected ([Object[]])
+Assert-Equal -actual $actual.list.Count -expected 2
+Assert-Equal -actual $actual.list[0] -expected "entry 1"
+Assert-Equal -actual $actual.list[1] -expected "entry 2"
+Assert-Equal -actual $actual.GetType() -expected ([Hashtable])
+Assert-Equal -actual $actual.dict.string -expected "string"
+Assert-Equal -actual $actual.dict.int -expected 1
$result.msg = "good"
Exit-Json -obj $result
diff --git a/test/integration/targets/win_exec_wrapper/library/test_fail.ps1 b/test/integration/targets/win_exec_wrapper/library/test_fail.ps1
index 06c63f72..72b89c69 100644
--- a/test/integration/targets/win_exec_wrapper/library/test_fail.ps1
+++ b/test/integration/targets/win_exec_wrapper/library/test_fail.ps1
@@ -31,24 +31,32 @@ Function Test-ThrowException {
if ($data -eq "normal") {
Exit-Json -obj $result
-} elseif ($data -eq "fail") {
+}
+elseif ($data -eq "fail") {
Fail-Json -obj $result -message "fail message"
-} elseif ($data -eq "throw") {
+}
+elseif ($data -eq "throw") {
throw [ArgumentException]"module is thrown"
-} elseif ($data -eq "error") {
+}
+elseif ($data -eq "error") {
Write-Error -Message $data
-} elseif ($data -eq "cmdlet_error") {
+}
+elseif ($data -eq "cmdlet_error") {
Get-Item -Path "fake:\path"
-} elseif ($data -eq "dotnet_exception") {
+}
+elseif ($data -eq "dotnet_exception") {
[System.IO.Path]::GetFullPath($null)
-} elseif ($data -eq "function_throw") {
+}
+elseif ($data -eq "function_throw") {
Test-ThrowException
-} elseif ($data -eq "proc_exit_fine") {
+}
+elseif ($data -eq "proc_exit_fine") {
# verifies that if no error was actually fired and we have an output, we
# don't use the RC to validate if the module failed
&cmd.exe /c exit 2
Exit-Json -obj $result
-} elseif ($data -eq "proc_exit_fail") {
+}
+elseif ($data -eq "proc_exit_fail") {
&cmd.exe /c exit 2
Fail-Json -obj $result -message "proc_exit_fail"
}
diff --git a/test/integration/targets/win_module_utils/library/legacy_only_new_way.ps1 b/test/integration/targets/win_module_utils/library/legacy_only_new_way.ps1
index 8ea3e061..045ca75f 100644
--- a/test/integration/targets/win_module_utils/library/legacy_only_new_way.ps1
+++ b/test/integration/targets/win_module_utils/library/legacy_only_new_way.ps1
@@ -2,4 +2,4 @@
#Requires -Module Ansible.ModuleUtils.Legacy
-Exit-Json @{ data="success" }
+Exit-Json @{ data = "success" }
diff --git a/test/integration/targets/win_module_utils/library/legacy_only_new_way_win_line_ending.ps1 b/test/integration/targets/win_module_utils/library/legacy_only_new_way_win_line_ending.ps1
index d9c2e008..837a5162 100644
--- a/test/integration/targets/win_module_utils/library/legacy_only_new_way_win_line_ending.ps1
+++ b/test/integration/targets/win_module_utils/library/legacy_only_new_way_win_line_ending.ps1
@@ -2,5 +2,5 @@
#Requires -Module Ansible.ModuleUtils.Legacy
-Exit-Json @{ data="success" }
+Exit-Json @{ data = "success" }
diff --git a/test/integration/targets/win_module_utils/library/legacy_only_old_way.ps1 b/test/integration/targets/win_module_utils/library/legacy_only_old_way.ps1
index 652e1281..3c6b0832 100644
--- a/test/integration/targets/win_module_utils/library/legacy_only_old_way.ps1
+++ b/test/integration/targets/win_module_utils/library/legacy_only_old_way.ps1
@@ -2,4 +2,4 @@
# POWERSHELL_COMMON
-Exit-Json @{ data="success" }
+Exit-Json @{ data = "success" }
diff --git a/test/integration/targets/win_module_utils/library/legacy_only_old_way_win_line_ending.ps1 b/test/integration/targets/win_module_utils/library/legacy_only_old_way_win_line_ending.ps1
index d5d328a5..afe7548d 100644
--- a/test/integration/targets/win_module_utils/library/legacy_only_old_way_win_line_ending.ps1
+++ b/test/integration/targets/win_module_utils/library/legacy_only_old_way_win_line_ending.ps1
@@ -1,4 +1,4 @@
#!powershell
# POWERSHELL_COMMON
-Exit-Json @{ data="success" }
+Exit-Json @{ data = "success" }
diff --git a/test/integration/targets/win_module_utils/library/uses_bogus_utils.ps1 b/test/integration/targets/win_module_utils/library/uses_bogus_utils.ps1
index 0a1c21a3..3886aec6 100644
--- a/test/integration/targets/win_module_utils/library/uses_bogus_utils.ps1
+++ b/test/integration/targets/win_module_utils/library/uses_bogus_utils.ps1
@@ -3,4 +3,4 @@
# this should fail
#Requires -Module Ansible.ModuleUtils.BogusModule
-Exit-Json @{ data="success" }
+Exit-Json @{ data = "success" }
diff --git a/test/integration/targets/win_module_utils/library/uses_local_utils.ps1 b/test/integration/targets/win_module_utils/library/uses_local_utils.ps1
index 3dfc940c..48c27571 100644
--- a/test/integration/targets/win_module_utils/library/uses_local_utils.ps1
+++ b/test/integration/targets/win_module_utils/library/uses_local_utils.ps1
@@ -6,4 +6,4 @@
$o = CustomFunction
-Exit-Json @{data=$o}
+Exit-Json @{data = $o }
diff --git a/test/integration/targets/win_script/files/test_script_bool.ps1 b/test/integration/targets/win_script/files/test_script_bool.ps1
index 970dedce..d5116f36 100644
--- a/test/integration/targets/win_script/files/test_script_bool.ps1
+++ b/test/integration/targets/win_script/files/test_script_bool.ps1
@@ -1,5 +1,5 @@
Param(
-[bool]$boolvariable
+ [bool]$boolvariable
)
Write-Output $boolvariable.GetType().FullName
diff --git a/test/integration/targets/win_script/files/test_script_creates_file.ps1 b/test/integration/targets/win_script/files/test_script_creates_file.ps1
index 47f85a2d..3a7c3a9f 100644
--- a/test/integration/targets/win_script/files/test_script_creates_file.ps1
+++ b/test/integration/targets/win_script/files/test_script_creates_file.ps1
@@ -1,3 +1,3 @@
# Test script to create a file.
-echo $null > $args[0]
+Write-Output $null > $args[0]
diff --git a/test/integration/targets/win_script/files/test_script_with_args.ps1 b/test/integration/targets/win_script/files/test_script_with_args.ps1
index 520aafa3..01bb37f5 100644
--- a/test/integration/targets/win_script/files/test_script_with_args.ps1
+++ b/test/integration/targets/win_script/files/test_script_with_args.ps1
@@ -1,7 +1,6 @@
# Test script to make sure the Ansible script module works when arguments are
# passed to the script.
-foreach ($i in $args)
-{
+foreach ($i in $args) {
Write-Host $i;
}
diff --git a/test/integration/targets/win_script/files/test_script_with_errors.ps1 b/test/integration/targets/win_script/files/test_script_with_errors.ps1
index 2d60dc1f..56f97735 100644
--- a/test/integration/targets/win_script/files/test_script_with_errors.ps1
+++ b/test/integration/targets/win_script/files/test_script_with_errors.ps1
@@ -1,7 +1,6 @@
# Test script to make sure we handle non-zero exit codes.
-trap
-{
+trap {
Write-Error -ErrorRecord $_
exit 1;
}
diff --git a/test/integration/targets/yum/aliases b/test/integration/targets/yum/aliases
index 5aba303d..aed61383 100644
--- a/test/integration/targets/yum/aliases
+++ b/test/integration/targets/yum/aliases
@@ -1,6 +1,5 @@
destructive
shippable/posix/group4
-skip/aix
skip/freebsd
skip/osx
skip/macos
diff --git a/test/integration/targets/yum/tasks/proxy.yml b/test/integration/targets/yum/tasks/proxy.yml
index 00fcf488..b011d11b 100644
--- a/test/integration/targets/yum/tasks/proxy.yml
+++ b/test/integration/targets/yum/tasks/proxy.yml
@@ -2,7 +2,7 @@
block:
- name: install tinyproxy
yum:
- name: 'https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/yum/tinyproxy-1.10.0-3.el7.x86_64.rpm'
+ name: 'https://ci-files.testing.ansible.com/test/integration/targets/yum/tinyproxy-1.10.0-3.el7.x86_64.rpm'
state: installed
# systemd doesn't play nice with this in a container for some reason
@@ -25,7 +25,7 @@
- name: install ninvaders with unauthenticated proxy
yum:
- name: 'https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/yum/ninvaders-0.1.1-18.el7.x86_64.rpm'
+ name: 'https://ci-files.testing.ansible.com/test/integration/targets/yum/ninvaders-0.1.1-18.el7.x86_64.rpm'
state: installed
register: yum_proxy_result
@@ -84,7 +84,7 @@
- name: install ninvaders with authenticated proxy
yum:
- name: 'https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/yum/ninvaders-0.1.1-18.el7.x86_64.rpm'
+ name: 'https://ci-files.testing.ansible.com/test/integration/targets/yum/ninvaders-0.1.1-18.el7.x86_64.rpm'
state: installed
register: yum_proxy_result
@@ -134,7 +134,7 @@
- name: install ninvaders with proxy, proxy_username, and proxy_password config in yum.conf
yum:
- name: 'https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/yum/ninvaders-0.1.1-18.el7.x86_64.rpm'
+ name: 'https://ci-files.testing.ansible.com/test/integration/targets/yum/ninvaders-0.1.1-18.el7.x86_64.rpm'
state: installed
register: yum_proxy_result
diff --git a/test/integration/targets/yum/tasks/yum.yml b/test/integration/targets/yum/tasks/yum.yml
index e1caa852..511c5776 100644
--- a/test/integration/targets/yum/tasks/yum.yml
+++ b/test/integration/targets/yum/tasks/yum.yml
@@ -532,7 +532,7 @@
- name: try to install from non existing url
yum:
- name: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/yum/non-existing-1.0.0.fc26.noarch.rpm
+ name: https://ci-files.testing.ansible.com/test/integration/targets/yum/non-existing-1.0.0.fc26.noarch.rpm
state: present
register: yum_result
ignore_errors: yes
@@ -580,7 +580,7 @@
- name: try to install uncompatible arch rpm on non-ppc64le, should fail
yum:
- name: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/yum/banner-1.3.4-3.el7.ppc64le.rpm
+ name: https://ci-files.testing.ansible.com/test/integration/targets/yum/banner-1.3.4-3.el7.ppc64le.rpm
state: present
register: yum_result
ignore_errors: True
@@ -597,7 +597,7 @@
- name: try to install uncompatible arch rpm on ppc64le, should fail
yum:
- name: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/yum/tinyproxy-1.10.0-3.el7.x86_64.rpm
+ name: https://ci-files.testing.ansible.com/test/integration/targets/yum/tinyproxy-1.10.0-3.el7.x86_64.rpm
state: present
register: yum_result
ignore_errors: True
@@ -837,8 +837,8 @@
- name: Install test packages
yum:
name:
- - https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/yum/test-package-that-provides-toaster-1.3.3.7-1.el7.noarch.rpm
- - https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/yum/toaster-1.2.3.4-1.el7.noarch.rpm
+ - https://ci-files.testing.ansible.com/test/integration/targets/yum/test-package-that-provides-toaster-1.3.3.7-1.el7.noarch.rpm
+ - https://ci-files.testing.ansible.com/test/integration/targets/yum/toaster-1.2.3.4-1.el7.noarch.rpm
disable_gpg_check: true
register: install
diff --git a/test/integration/targets/yum/vars/main.yml b/test/integration/targets/yum/vars/main.yml
index 2be15132..a2a073f2 100644
--- a/test/integration/targets/yum/vars/main.yml
+++ b/test/integration/targets/yum/vars/main.yml
@@ -1 +1 @@
-multiarch_repo_baseurl: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/yum/multiarch-test-repo/RPMS/
+multiarch_repo_baseurl: https://ci-files.testing.ansible.com/test/integration/targets/yum/multiarch-test-repo/RPMS/
diff --git a/test/integration/targets/yum_repository/aliases b/test/integration/targets/yum_repository/aliases
index 0b484bba..6eae8bd8 100644
--- a/test/integration/targets/yum_repository/aliases
+++ b/test/integration/targets/yum_repository/aliases
@@ -1,3 +1,2 @@
shippable/posix/group1
destructive
-skip/aix
diff --git a/test/lib/ansible_test/__init__.py b/test/lib/ansible_test/__init__.py
index e69de29b..527d413a 100644
--- a/test/lib/ansible_test/__init__.py
+++ b/test/lib/ansible_test/__init__.py
@@ -0,0 +1,2 @@
+# Empty __init__.py to allow importing of `ansible_test._util.target.common` under Python 2.x.
+# This allows the ansible-test entry point to report supported Python versions before exiting.
diff --git a/test/lib/ansible_test/_data/completion/docker.txt b/test/lib/ansible_test/_data/completion/docker.txt
index 05d173a6..93a9dea2 100644
--- a/test/lib/ansible_test/_data/completion/docker.txt
+++ b/test/lib/ansible_test/_data/completion/docker.txt
@@ -1,12 +1,10 @@
-base image=quay.io/ansible/base-test-container:1.1.0 python=3.9,2.6,2.7,3.5,3.6,3.7,3.8,3.10 seccomp=unconfined
-default image=quay.io/ansible/default-test-container:4.2.0 python=3.9,2.6,2.7,3.5,3.6,3.7,3.8,3.10 seccomp=unconfined context=collection
-default image=quay.io/ansible/ansible-core-test-container:4.2.0 python=3.9,2.6,2.7,3.5,3.6,3.7,3.8,3.10 seccomp=unconfined context=ansible-core
+base image=quay.io/ansible/base-test-container:2.2.0 python=3.10,2.7,3.5,3.6,3.7,3.8,3.9 seccomp=unconfined
+default image=quay.io/ansible/default-test-container:5.9.0 python=3.10,2.7,3.5,3.6,3.7,3.8,3.9 seccomp=unconfined context=collection
+default image=quay.io/ansible/ansible-core-test-container:5.9.0 python=3.10,2.7,3.5,3.6,3.7,3.8,3.9 seccomp=unconfined context=ansible-core
alpine3 image=quay.io/ansible/alpine3-test-container:3.3.0 python=3.9
-centos6 image=quay.io/ansible/centos6-test-container:3.1.0 python=2.6 seccomp=unconfined
centos7 image=quay.io/ansible/centos7-test-container:3.1.0 python=2.7 seccomp=unconfined
-centos8 image=quay.io/ansible/centos8-test-container:3.1.0 python=3.6 seccomp=unconfined
-fedora33 image=quay.io/ansible/fedora33-test-container:3.1.0 python=3.9
fedora34 image=quay.io/ansible/fedora34-test-container:3.1.0 python=3.9 seccomp=unconfined
+fedora35 image=quay.io/ansible/fedora35-test-container:3.2.0 python=3.10 seccomp=unconfined
opensuse15py2 image=quay.io/ansible/opensuse15py2-test-container:3.1.0 python=2.7
opensuse15 image=quay.io/ansible/opensuse15-test-container:3.1.0 python=3.6
ubuntu1804 image=quay.io/ansible/ubuntu1804-test-container:3.1.0 python=3.6 seccomp=unconfined
diff --git a/test/lib/ansible_test/_data/completion/remote.txt b/test/lib/ansible_test/_data/completion/remote.txt
index 97c9a677..c7a024fb 100644
--- a/test/lib/ansible_test/_data/completion/remote.txt
+++ b/test/lib/ansible_test/_data/completion/remote.txt
@@ -1,10 +1,8 @@
-freebsd/12.2 python=3.7,2.7,3.8 python_dir=/usr/local/bin provider=aws
+freebsd/12.3 python=3.8 python_dir=/usr/local/bin provider=aws
freebsd/13.0 python=3.7,2.7,3.8,3.9 python_dir=/usr/local/bin provider=aws
freebsd python_dir=/usr/local/bin provider=aws
-macos/11.1 python=3.9 python_dir=/usr/local/bin provider=parallels
+macos/12.0 python=3.10 python_dir=/usr/local/bin provider=parallels
macos python_dir=/usr/local/bin provider=parallels
rhel/7.9 python=2.7 provider=aws
-rhel/8.4 python=3.6,3.8 provider=aws
+rhel/8.5 python=3.6,3.8,3.9 provider=aws
rhel provider=aws
-aix/7.2 python=2.7,3.7 python_dir=/opt/freeware/bin provider=ibmps
-aix python_dir=/opt/freeware/bin provider=ibmps
diff --git a/test/lib/ansible_test/_data/completion/windows.txt b/test/lib/ansible_test/_data/completion/windows.txt
index 94868f06..280ad97f 100644
--- a/test/lib/ansible_test/_data/completion/windows.txt
+++ b/test/lib/ansible_test/_data/completion/windows.txt
@@ -3,3 +3,4 @@ windows/2012-R2 provider=aws
windows/2016 provider=aws
windows/2019 provider=aws
windows/2022 provider=aws
+windows provider=aws
diff --git a/test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.ps1 b/test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.ps1
index 012af83b..b9e563d1 100644
--- a/test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.ps1
+++ b/test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.ps1
@@ -8,7 +8,7 @@ A list of hosts entries, delimited by '|'.
[CmdletBinding()]
param(
- [Parameter(Mandatory=$true, Position=0)][String]$Hosts
+ [Parameter(Mandatory = $true, Position = 0)][String]$Hosts
)
$ProgressPreference = "SilentlyContinue"
diff --git a/test/lib/ansible_test/_data/playbooks/windows_hosts_restore.ps1 b/test/lib/ansible_test/_data/playbooks/windows_hosts_restore.ps1
index fdfb9616..ac19ffe8 100644
--- a/test/lib/ansible_test/_data/playbooks/windows_hosts_restore.ps1
+++ b/test/lib/ansible_test/_data/playbooks/windows_hosts_restore.ps1
@@ -8,7 +8,7 @@ A list of hosts entries, delimited by '|'.
[CmdletBinding()]
param(
- [Parameter(Mandatory=$true, Position=0)][String]$Hosts
+ [Parameter(Mandatory = $true, Position = 0)][String]$Hosts
)
$ProgressPreference = "SilentlyContinue"
@@ -26,7 +26,8 @@ $new_lines = [System.Collections.ArrayList]@()
foreach ($host_line in $hosts_file_lines) {
if ($host_line -in $hosts_entries) {
$changed = $true
- } else {
+ }
+ else {
$new_lines += $host_line
}
}
diff --git a/test/lib/ansible_test/_data/pytest.ini b/test/lib/ansible_test/_data/pytest.ini
index c1c38ff7..b2668dc2 100644
--- a/test/lib/ansible_test/_data/pytest.ini
+++ b/test/lib/ansible_test/_data/pytest.ini
@@ -1,9 +1,4 @@
[pytest]
xfail_strict = true
mock_use_standalone_module = true
-# It was decided to stick with "legacy" (aka "xunit1") for now.
-# Currently used pytest versions all support xunit2 format too.
-# Except the one used under Python 2.6 - it doesn't process this option
-# at all. Ref:
-# https://github.com/ansible/ansible/pull/66445#discussion_r372530176
junit_family = xunit1
diff --git a/test/lib/ansible_test/_data/requirements/ansible.txt b/test/lib/ansible_test/_data/requirements/ansible.txt
index 40cf83a6..a732a595 100644
--- a/test/lib/ansible_test/_data/requirements/ansible.txt
+++ b/test/lib/ansible_test/_data/requirements/ansible.txt
@@ -3,7 +3,7 @@
# packages. Thus, this should be the loosest set possible (only required
# packages, not optional ones, and with the widest range of versions that could
# be suitable)
-jinja2
+jinja2 >= 3.0.0
PyYAML
cryptography
packaging
diff --git a/test/lib/ansible_test/_data/requirements/constraints.txt b/test/lib/ansible_test/_data/requirements/constraints.txt
index 6b5b064a..6c4b53ba 100644
--- a/test/lib/ansible_test/_data/requirements/constraints.txt
+++ b/test/lib/ansible_test/_data/requirements/constraints.txt
@@ -1,30 +1,16 @@
# do not add a cryptography or pyopenssl constraint to this file, they require special handling, see get_cryptography_requirements in python_requirements.py
# do not add a coverage constraint to this file, it is handled internally by ansible-test
packaging < 21.0 ; python_version < '3.6' # packaging 21.0 requires Python 3.6 or newer
-six < 1.14.0 ; python_version < '2.7' # six 1.14.0 drops support for python 2.6
-jinja2 < 2.11 ; python_version < '2.7' # jinja2 2.11 and later require python 2.7 or later
-urllib3 < 1.24 ; python_version < '2.7' # urllib3 1.24 and later require python 2.7 or later
+paramiko < 2.9.0 # paramiko 2.9.0+ requires changes to the paramiko_ssh connection plugin to work with older systems
pywinrm >= 0.3.0 # message encryption support
-wheel < 0.30.0 ; python_version < '2.7' # wheel 0.30.0 and later require python 2.7 or later
-idna < 2.6, >= 2.5 # linode requires idna < 2.9, >= 2.5, requests requires idna < 2.6, but cryptography will cause the latest version to be installed instead
-paramiko < 2.4.0 ; python_version < '2.7' # paramiko 2.4.0 drops support for python 2.6
-pytest < 3.3.0, >= 3.1.0 ; python_version < '2.7' # pytest 3.3.0 drops support for python 2.6
pytest < 5.0.0, >= 4.5.0 ; python_version == '2.7' # pytest 5.0.0 and later will no longer support python 2.7
pytest >= 4.5.0 ; python_version > '2.7' # pytest 4.5.0 added support for --strict-markers
-pytest-forked < 1.0.2 ; python_version < '2.7' # pytest-forked 1.0.2 and later require python 2.7 or later
-pytest-forked >= 1.0.2 ; python_version >= '2.7' # pytest-forked before 1.0.2 does not work with pytest 4.2.0+ (which requires python 2.7+)
+pytest-forked >= 1.0.2 # pytest-forked before 1.0.2 does not work with pytest 4.2.0+
ntlm-auth >= 1.3.0 # message encryption support using cryptography
-requests < 2.20.0 ; python_version < '2.7' # requests 2.20.0 drops support for python 2.6
requests-ntlm >= 1.1.0 # message encryption support
requests-credssp >= 0.1.0 # message encryption support
-virtualenv < 16.0.0 ; python_version < '2.7' # virtualenv 16.0.0 and later require python 2.7 or later
pyparsing < 3.0.0 ; python_version < '3.5' # pyparsing 3 and later require python 3.5 or later
-pyyaml < 5.1 ; python_version < '2.7' # pyyaml 5.1 and later require python 2.7 or later
-pycparser < 2.19 ; python_version < '2.7' # pycparser 2.19 and later require python 2.7 or later
mock >= 2.0.0 # needed for features backported from Python 3.6 unittest.mock (assert_called, assert_called_once...)
pytest-mock >= 1.4.0 # needed for mock_use_standalone_module pytest option
-xmltodict < 0.12.0 ; python_version < '2.7' # xmltodict 0.12.0 and later require python 2.7 or later
-setuptools < 37 ; python_version == '2.6' # setuptools 37 and later require python 2.7 or later
setuptools < 45 ; python_version == '2.7' # setuptools 45 and later require python 3.5 or later
pyspnego >= 0.1.6 ; python_version >= '3.10' # bug in older releases breaks on Python 3.10
-MarkupSafe < 2.0.0 ; python_version < '3.6' # MarkupSafe >= 2.0.0. requires Python >= 3.6
diff --git a/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.in b/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.in
new file mode 100644
index 00000000..80c769fb
--- /dev/null
+++ b/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.in
@@ -0,0 +1,3 @@
+jinja2 # ansible-core requirement
+packaging # ansible-core requirement
+pyyaml # ansible-core requirement
diff --git a/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt b/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt
index 660620dc..105069f2 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt
@@ -1,7 +1,6 @@
-jinja2 == 3.0.1 # ansible-core requirement
-pyyaml == 5.4.1 # ansible-core requirement
-packaging == 21.0 # ansible-doc requirement
-
-# dependencies
-MarkupSafe == 2.0.1
-pyparsing == 2.4.7
+# edit "sanity.ansible-doc.in" and generate with: hacking/update-sanity-requirements.py --test ansible-doc
+Jinja2==3.0.3
+MarkupSafe==2.0.1
+packaging==21.2
+pyparsing==2.4.7
+PyYAML==6.0
diff --git a/test/lib/ansible_test/_data/requirements/sanity.changelog.in b/test/lib/ansible_test/_data/requirements/sanity.changelog.in
new file mode 100644
index 00000000..3fcfbe08
--- /dev/null
+++ b/test/lib/ansible_test/_data/requirements/sanity.changelog.in
@@ -0,0 +1,2 @@
+antsibull-changelog
+docutils < 0.18 # match version required by sphinx in the docs-build sanity test
diff --git a/test/lib/ansible_test/_data/requirements/sanity.changelog.txt b/test/lib/ansible_test/_data/requirements/sanity.changelog.txt
index cb9f02f8..b429b639 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.changelog.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.changelog.txt
@@ -1,9 +1,8 @@
-antsibull-changelog == 0.9.0
-
-# dependencies
-pyyaml == 5.4.1
-docutils == 0.17.1
-packaging == 21.0
-pyparsing == 2.4.7
-rstcheck == 3.3.1
-semantic-version == 2.8.5
+# edit "sanity.changelog.in" and generate with: hacking/update-sanity-requirements.py --test changelog
+antsibull-changelog==0.14.0
+docutils==0.17.1
+packaging==21.2
+pyparsing==2.4.7
+PyYAML==6.0
+rstcheck==3.3.1
+semantic-version==2.8.5
diff --git a/test/lib/ansible_test/_data/requirements/sanity.import.in b/test/lib/ansible_test/_data/requirements/sanity.import.in
new file mode 100644
index 00000000..dea704eb
--- /dev/null
+++ b/test/lib/ansible_test/_data/requirements/sanity.import.in
@@ -0,0 +1 @@
+pyyaml # needed for yaml_to_json.py
diff --git a/test/lib/ansible_test/_data/requirements/sanity.import.plugin.in b/test/lib/ansible_test/_data/requirements/sanity.import.plugin.in
new file mode 100644
index 00000000..cec0eed3
--- /dev/null
+++ b/test/lib/ansible_test/_data/requirements/sanity.import.plugin.in
@@ -0,0 +1,2 @@
+jinja2 # ansible-core requirement
+pyyaml # ansible-core requirement
diff --git a/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt b/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt
index 76d16725..bf7050dd 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt
@@ -1,12 +1,4 @@
-jinja2 == 3.0.1
-PyYAML == 5.4.1
-cryptography == 3.3.2
-packaging == 21.0
-resolvelib == 0.5.4
-
-# dependencies
-MarkupSafe == 2.0.1
-cffi == 1.15.0
-pycparser == 2.20
-pyparsing == 2.4.7
-six == 1.16.0
+# edit "sanity.import.plugin.in" and generate with: hacking/update-sanity-requirements.py --test import.plugin
+Jinja2==3.0.3
+MarkupSafe==2.0.1
+PyYAML==6.0
diff --git a/test/lib/ansible_test/_data/requirements/sanity.import.txt b/test/lib/ansible_test/_data/requirements/sanity.import.txt
index d77a09d7..e9645ea2 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.import.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.import.txt
@@ -1 +1,2 @@
-pyyaml == 5.4.1 # needed for yaml_to_json.py
+# edit "sanity.import.in" and generate with: hacking/update-sanity-requirements.py --test import
+PyYAML==6.0
diff --git a/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.in b/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.in
new file mode 100644
index 00000000..c3726e8b
--- /dev/null
+++ b/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.in
@@ -0,0 +1 @@
+pyyaml
diff --git a/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt b/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt
index cc530e42..ba3a5028 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt
@@ -1 +1,2 @@
-pyyaml == 5.4.1
+# edit "sanity.integration-aliases.in" and generate with: hacking/update-sanity-requirements.py --test integration-aliases
+PyYAML==6.0
diff --git a/test/lib/ansible_test/_data/requirements/sanity.mypy.in b/test/lib/ansible_test/_data/requirements/sanity.mypy.in
new file mode 100644
index 00000000..b7b82297
--- /dev/null
+++ b/test/lib/ansible_test/_data/requirements/sanity.mypy.in
@@ -0,0 +1,9 @@
+mypy[python2]
+packaging # type stubs not published separately
+types-backports
+types-jinja2
+types-paramiko
+types-pyyaml < 6 # PyYAML 6+ stubs do not support Python 2.7
+types-requests
+types-setuptools
+types-toml
diff --git a/test/lib/ansible_test/_data/requirements/sanity.mypy.txt b/test/lib/ansible_test/_data/requirements/sanity.mypy.txt
new file mode 100644
index 00000000..d4baf563
--- /dev/null
+++ b/test/lib/ansible_test/_data/requirements/sanity.mypy.txt
@@ -0,0 +1,20 @@
+# edit "sanity.mypy.in" and generate with: hacking/update-sanity-requirements.py --test mypy
+mypy==0.931
+mypy-extensions==0.4.3
+packaging==21.2
+pyparsing==2.4.7
+tomli==2.0.1
+typed-ast==1.5.2
+types-backports==0.1.3
+types-cryptography==3.3.15
+types-enum34==1.1.8
+types-ipaddress==1.0.8
+types-Jinja2==2.11.9
+types-MarkupSafe==1.1.10
+types-paramiko==2.8.13
+types-PyYAML==5.4.12
+types-requests==2.27.10
+types-setuptools==57.4.9
+types-toml==0.10.4
+types-urllib3==1.26.9
+typing-extensions==3.10.0.2
diff --git a/test/lib/ansible_test/_data/requirements/sanity.pep8.in b/test/lib/ansible_test/_data/requirements/sanity.pep8.in
new file mode 100644
index 00000000..282a93fb
--- /dev/null
+++ b/test/lib/ansible_test/_data/requirements/sanity.pep8.in
@@ -0,0 +1 @@
+pycodestyle
diff --git a/test/lib/ansible_test/_data/requirements/sanity.pep8.txt b/test/lib/ansible_test/_data/requirements/sanity.pep8.txt
index 86f73fba..cc0f1afd 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.pep8.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.pep8.txt
@@ -1 +1,2 @@
-pycodestyle == 2.6.0
+# edit "sanity.pep8.in" and generate with: hacking/update-sanity-requirements.py --test pep8
+pycodestyle==2.8.0
diff --git a/test/lib/ansible_test/_data/requirements/sanity.pslint.ps1 b/test/lib/ansible_test/_data/requirements/sanity.pslint.ps1
index 79ee8152..68545c9e 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.pslint.ps1
+++ b/test/lib/ansible_test/_data/requirements/sanity.pslint.ps1
@@ -3,7 +3,7 @@ param (
$IsContainer
)
-#Requires -Version 6
+#Requires -Version 7
Set-StrictMode -Version 2.0
$ErrorActionPreference = "Stop"
@@ -12,11 +12,11 @@ $ProgressPreference = 'SilentlyContinue'
Function Install-PSModule {
[CmdletBinding()]
param(
- [Parameter(Mandatory=$true)]
+ [Parameter(Mandatory = $true)]
[String]
$Name,
- [Parameter(Mandatory=$true)]
+ [Parameter(Mandatory = $true)]
[Version]
$RequiredVersion
)
@@ -25,16 +25,16 @@ Function Install-PSModule {
$installedModule = Get-Module -Name $Name -ListAvailable | Where-Object Version -eq $RequiredVersion
if (-not $installedModule) {
Install-Module -Name $Name -RequiredVersion $RequiredVersion -Scope CurrentUser
- }
+ }
}
Set-PSRepository -Name PSGallery -InstallationPolicy Trusted
-Install-PSModule -Name PSScriptAnalyzer -RequiredVersion 1.18.0
+Install-PSModule -Name PSScriptAnalyzer -RequiredVersion 1.20.0
if ($IsContainer) {
# PSScriptAnalyzer contain lots of json files for the UseCompatibleCommands check. We don't use this rule so by
# removing the contents we can save 200MB in the docker image (or more in the future).
- # https://github.com/PowerShell/PSScriptAnalyzer/blob/master/RuleDocumentation/UseCompatibleCommands.md
+ # https://github.com/PowerShell/PSScriptAnalyzer/blob/master/docs/Rules/UseCompatibleCommands.md
$pssaPath = (Get-Module -ListAvailable -Name PSScriptAnalyzer).ModuleBase
$compatPath = Join-Path -Path $pssaPath -ChildPath compatibility_profiles -AdditionalChildPath '*'
Remove-Item -Path $compatPath -Recurse -Force
diff --git a/test/lib/ansible_test/_data/requirements/sanity.pylint.in b/test/lib/ansible_test/_data/requirements/sanity.pylint.in
new file mode 100644
index 00000000..2344fb4d
--- /dev/null
+++ b/test/lib/ansible_test/_data/requirements/sanity.pylint.in
@@ -0,0 +1,2 @@
+pylint == 2.9.3 # currently vetted version
+pyyaml # needed for collection_detail.py
diff --git a/test/lib/ansible_test/_data/requirements/sanity.pylint.txt b/test/lib/ansible_test/_data/requirements/sanity.pylint.txt
index 7332d162..85fdedb8 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.pylint.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.pylint.txt
@@ -1,10 +1,9 @@
-pylint == 2.9.3
-pyyaml == 5.4.1 # needed for collection_detail.py
-
-# dependencies
-astroid == 2.6.6
-isort == 5.9.3
-lazy-object-proxy == 1.6.0
-mccabe == 0.6.1
-toml == 0.10.2
-wrapt == 1.12.1
+# edit "sanity.pylint.in" and generate with: hacking/update-sanity-requirements.py --test pylint
+astroid==2.6.6
+isort==5.10.1
+lazy-object-proxy==1.6.0
+mccabe==0.6.1
+pylint==2.9.3
+PyYAML==6.0
+toml==0.10.2
+wrapt==1.12.1
diff --git a/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.in b/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.in
new file mode 100644
index 00000000..edd96991
--- /dev/null
+++ b/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.in
@@ -0,0 +1,2 @@
+pyyaml
+voluptuous
diff --git a/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt b/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt
index 1281a045..3324a389 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt
@@ -1,2 +1,3 @@
-pyyaml == 5.4.1
-voluptuous == 0.12.1
+# edit "sanity.runtime-metadata.in" and generate with: hacking/update-sanity-requirements.py --test runtime-metadata
+PyYAML==6.0
+voluptuous==0.12.2
diff --git a/test/lib/ansible_test/_data/requirements/sanity.validate-modules.in b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.in
new file mode 100644
index 00000000..efe94004
--- /dev/null
+++ b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.in
@@ -0,0 +1,3 @@
+jinja2 # ansible-core requirement
+pyyaml # needed for collection_detail.py
+voluptuous
diff --git a/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt
index 4b1d5f05..32f14fea 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt
@@ -1,6 +1,5 @@
-jinja2 == 3.0.1 # ansible-core requirement
-pyyaml == 5.4.1 # needed for collection_detail.py
-voluptuous == 0.12.1
-
-# dependencies
-MarkupSafe == 2.0.1
+# edit "sanity.validate-modules.in" and generate with: hacking/update-sanity-requirements.py --test validate-modules
+Jinja2==3.0.3
+MarkupSafe==2.0.1
+PyYAML==6.0
+voluptuous==0.12.2
diff --git a/test/lib/ansible_test/_data/requirements/sanity.yamllint.in b/test/lib/ansible_test/_data/requirements/sanity.yamllint.in
new file mode 100644
index 00000000..b2c729ca
--- /dev/null
+++ b/test/lib/ansible_test/_data/requirements/sanity.yamllint.in
@@ -0,0 +1 @@
+yamllint
diff --git a/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt b/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt
index 67384863..78806991 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt
@@ -1,5 +1,4 @@
-yamllint == 1.26.0
-
-# dependencies
-pathspec == 0.9.0
-pyyaml == 5.4.1
+# edit "sanity.yamllint.in" and generate with: hacking/update-sanity-requirements.py --test yamllint
+pathspec==0.9.0
+PyYAML==6.0
+yamllint==1.26.3
diff --git a/test/lib/ansible_test/_internal/__init__.py b/test/lib/ansible_test/_internal/__init__.py
index e604a2b3..e663c45e 100644
--- a/test/lib/ansible_test/_internal/__init__.py
+++ b/test/lib/ansible_test/_internal/__init__.py
@@ -3,6 +3,7 @@ from __future__ import annotations
import os
import sys
+import typing as t
# This import should occur as early as possible.
# It must occur before subprocess has been imported anywhere in the current process.
@@ -13,7 +14,6 @@ from .init import (
from .util import (
ApplicationError,
display,
- MAXFD,
)
from .delegation import (
@@ -47,11 +47,11 @@ from .provisioning import (
)
-def main():
+def main(cli_args=None): # type: (t.Optional[t.List[str]]) -> None
"""Main program function."""
try:
os.chdir(data_context().content.root)
- args = parse_args()
+ args = parse_args(cli_args)
config = args.config(args) # type: CommonConfig
display.verbosity = config.verbosity
display.truncate = config.truncate
@@ -61,12 +61,12 @@ def main():
configure_timeout(config)
display.info('RLIMIT_NOFILE: %s' % (CURRENT_RLIMIT_NOFILE,), verbosity=2)
- display.info('MAXFD: %d' % MAXFD, verbosity=2)
delegate_args = None
target_names = None
try:
+ data_context().check_layout()
args.func(config)
except PrimeContainers:
pass
@@ -78,7 +78,6 @@ def main():
delegate_args = (ex.host_state, ex.exclude, ex.require)
if delegate_args:
- # noinspection PyTypeChecker
delegate(config, *delegate_args)
if target_names:
diff --git a/test/lib/ansible_test/_internal/ansible_util.py b/test/lib/ansible_test/_internal/ansible_util.py
index 5c689bed..a3582dc8 100644
--- a/test/lib/ansible_test/_internal/ansible_util.py
+++ b/test/lib/ansible_test/_internal/ansible_util.py
@@ -96,7 +96,6 @@ def ansible_environment(args, color=True, ansible_config=None): # type: (Common
ANSIBLE_CONFIG=ansible_config,
ANSIBLE_LIBRARY='/dev/null',
ANSIBLE_DEVEL_WARNING='false', # Don't show warnings that CI is running devel
- ANSIBLE_JINJA2_NATIVE_WARNING='false', # Don't show warnings in CI for old Jinja for native
PYTHONPATH=get_ansible_python_path(args),
PAGER='/bin/cat',
PATH=path,
@@ -199,7 +198,7 @@ def get_ansible_python_path(args): # type: (CommonConfig) -> str
If a temporary directory is required, it will be cached for the lifetime of the process and cleaned up at exit.
"""
try:
- return get_ansible_python_path.python_path
+ return get_ansible_python_path.python_path # type: ignore[attr-defined]
except AttributeError:
pass
@@ -217,7 +216,7 @@ def get_ansible_python_path(args): # type: (CommonConfig) -> str
if not args.explain:
generate_egg_info(python_path)
- get_ansible_python_path.python_path = python_path
+ get_ansible_python_path.python_path = python_path # type: ignore[attr-defined]
return python_path
diff --git a/test/lib/ansible_test/_internal/bootstrap.py b/test/lib/ansible_test/_internal/bootstrap.py
index 9eb26de7..32697397 100644
--- a/test/lib/ansible_test/_internal/bootstrap.py
+++ b/test/lib/ansible_test/_internal/bootstrap.py
@@ -35,8 +35,8 @@ class Bootstrap:
"""The bootstrap type to pass to the bootstrapping script."""
return self.__class__.__name__.replace('Bootstrap', '').lower()
- def get_variables(self): # type: () -> t.Dict[str, str]
- """The variables to template in the boostrapping script."""
+ def get_variables(self): # type: () -> t.Dict[str, t.Union[str, t.List[str]]]
+ """The variables to template in the bootstrapping script."""
return dict(
bootstrap_type=self.bootstrap_type,
controller='yes' if self.controller else '',
@@ -65,8 +65,8 @@ class Bootstrap:
@dataclasses.dataclass
class BootstrapDocker(Bootstrap):
"""Bootstrap docker instances."""
- def get_variables(self): # type: () -> t.Dict[str, str]
- """The variables to template in the boostrapping script."""
+ def get_variables(self): # type: () -> t.Dict[str, t.Union[str, t.List[str]]]
+ """The variables to template in the bootstrapping script."""
variables = super().get_variables()
variables.update(
@@ -83,8 +83,8 @@ class BootstrapRemote(Bootstrap):
platform: str
platform_version: str
- def get_variables(self): # type: () -> t.Dict[str, str]
- """The variables to template in the boostrapping script."""
+ def get_variables(self): # type: () -> t.Dict[str, t.Union[str, t.List[str]]]
+ """The variables to template in the bootstrapping script."""
variables = super().get_variables()
variables.update(
diff --git a/test/lib/ansible_test/_internal/ci/__init__.py b/test/lib/ansible_test/_internal/ci/__init__.py
index db5ca501..3d0f79e8 100644
--- a/test/lib/ansible_test/_internal/ci/__init__.py
+++ b/test/lib/ansible_test/_internal/ci/__init__.py
@@ -114,7 +114,7 @@ class AuthHelper(metaclass=abc.ABCMeta):
def initialize_private_key(self): # type: () -> str
"""
Initialize and publish a new key pair (if needed) and return the private key.
- The private key is cached across ansible-test invocations so it is only generated and published once per CI job.
+ The private key is cached across ansible-test invocations, so it is only generated and published once per CI job.
"""
path = os.path.expanduser('~/.ansible-core-ci-private.key')
@@ -166,14 +166,12 @@ class CryptographyAuthHelper(AuthHelper, metaclass=abc.ABCMeta):
private_key = ec.generate_private_key(ec.SECP384R1(), default_backend())
public_key = private_key.public_key()
- # noinspection PyUnresolvedReferences
- private_key_pem = to_text(private_key.private_bytes(
+ private_key_pem = to_text(private_key.private_bytes( # type: ignore[attr-defined] # documented method, but missing from type stubs
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption(),
))
- # noinspection PyTypeChecker
public_key_pem = to_text(public_key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo,
diff --git a/test/lib/ansible_test/_internal/classification/__init__.py b/test/lib/ansible_test/_internal/classification/__init__.py
index 532fa680..7a7e918b 100644
--- a/test/lib/ansible_test/_internal/classification/__init__.py
+++ b/test/lib/ansible_test/_internal/classification/__init__.py
@@ -15,6 +15,7 @@ from ..target import (
walk_sanity_targets,
load_integration_prefixes,
analyze_integration_target_dependencies,
+ IntegrationTarget,
)
from ..util import (
@@ -63,14 +64,14 @@ def categorize_changes(args, paths, verbose_command=None): # type: (TestConfig,
'integration': set(),
'windows-integration': set(),
'network-integration': set(),
- }
+ } # type: t.Dict[str, t.Set[str]]
focused_commands = collections.defaultdict(set)
- deleted_paths = set()
- original_paths = set()
- additional_paths = set()
- no_integration_paths = set()
+ deleted_paths = set() # type: t.Set[str]
+ original_paths = set() # type: t.Set[str]
+ additional_paths = set() # type: t.Set[str]
+ no_integration_paths = set() # type: t.Set[str]
for path in paths:
if not os.path.exists(path):
@@ -110,7 +111,7 @@ def categorize_changes(args, paths, verbose_command=None): # type: (TestConfig,
tests = all_tests(args) # not categorized, run all tests
display.warning('Path not categorized: %s' % path)
else:
- focused_target = tests.pop(FOCUSED_TARGET, False) and path in original_paths
+ focused_target = bool(tests.pop(FOCUSED_TARGET, None)) and path in original_paths
tests = dict((key, value) for key, value in tests.items() if value)
@@ -155,18 +156,18 @@ def categorize_changes(args, paths, verbose_command=None): # type: (TestConfig,
if any(target == 'all' for target in targets):
commands[command] = {'all'}
- commands = dict((c, sorted(targets)) for c, targets in commands.items() if targets)
+ sorted_commands = dict((c, sorted(targets)) for c, targets in commands.items() if targets)
focused_commands = dict((c, sorted(targets)) for c, targets in focused_commands.items())
- for command, targets in commands.items():
+ for command, targets in sorted_commands.items():
if targets == ['all']:
- commands[command] = [] # changes require testing all targets, do not filter targets
+ sorted_commands[command] = [] # changes require testing all targets, do not filter targets
changes = ChangeDescription()
changes.command = verbose_command
changes.changed_paths = sorted(original_paths)
changes.deleted_paths = sorted(deleted_paths)
- changes.regular_command_targets = commands
+ changes.regular_command_targets = sorted_commands
changes.focused_command_targets = focused_commands
changes.no_integration_paths = sorted(no_integration_paths)
@@ -205,11 +206,11 @@ class PathMapper:
self.prefixes = load_integration_prefixes()
self.integration_dependencies = analyze_integration_target_dependencies(self.integration_targets)
- self.python_module_utils_imports = {} # populated on first use to reduce overhead when not needed
- self.powershell_module_utils_imports = {} # populated on first use to reduce overhead when not needed
- self.csharp_module_utils_imports = {} # populated on first use to reduce overhead when not needed
+ self.python_module_utils_imports = {} # type: t.Dict[str, t.Set[str]] # populated on first use to reduce overhead when not needed
+ self.powershell_module_utils_imports = {} # type: t.Dict[str, t.Set[str]] # populated on first use to reduce overhead when not needed
+ self.csharp_module_utils_imports = {} # type: t.Dict[str, t.Set[str]] # populated on first use to reduce overhead when not needed
- self.paths_to_dependent_targets = {}
+ self.paths_to_dependent_targets = {} # type: t.Dict[str, t.Set[IntegrationTarget]]
for target in self.integration_targets:
for path in target.needs_file:
@@ -341,7 +342,7 @@ class PathMapper:
filename = os.path.basename(path)
name, ext = os.path.splitext(filename)
- minimal = {}
+ minimal = {} # type: t.Dict[str, str]
if os.path.sep not in path:
if filename in (
@@ -372,7 +373,7 @@ class PathMapper:
'integration': target.name if 'posix/' in target.aliases else None,
'windows-integration': target.name if 'windows/' in target.aliases else None,
'network-integration': target.name if 'network/' in target.aliases else None,
- FOCUSED_TARGET: True,
+ FOCUSED_TARGET: target.name,
}
if is_subdir(path, data_context().content.integration_path):
@@ -430,7 +431,7 @@ class PathMapper:
'integration': self.posix_integration_by_module.get(module_name) if ext == '.py' else None,
'windows-integration': self.windows_integration_by_module.get(module_name) if ext in ['.cs', '.ps1'] else None,
'network-integration': self.network_integration_by_module.get(module_name),
- FOCUSED_TARGET: True,
+ FOCUSED_TARGET: module_name,
}
return minimal
@@ -582,7 +583,7 @@ class PathMapper:
'windows-integration': target.name if target and 'windows/' in target.aliases else None,
'network-integration': target.name if target and 'network/' in target.aliases else None,
'units': units_path,
- FOCUSED_TARGET: target is not None,
+ FOCUSED_TARGET: target.name if target else None,
}
if is_subdir(path, data_context().content.plugin_paths['filter']):
@@ -630,7 +631,7 @@ class PathMapper:
filename = os.path.basename(path)
dummy, ext = os.path.splitext(filename)
- minimal = {}
+ minimal = {} # type: t.Dict[str, str]
if path.startswith('changelogs/'):
return minimal
@@ -674,7 +675,7 @@ class PathMapper:
filename = os.path.basename(path)
name, ext = os.path.splitext(filename)
- minimal = {}
+ minimal = {} # type: t.Dict[str, str]
if path.startswith('bin/'):
return all_tests(self.args) # broad impact, run all tests
@@ -721,7 +722,6 @@ class PathMapper:
if path.startswith('test/lib/ansible_test/config/'):
if name.startswith('cloud-config-'):
- # noinspection PyTypeChecker
cloud_target = 'cloud/%s/' % name.split('-')[2].split('.')[0]
if cloud_target in self.integration_targets_by_alias:
diff --git a/test/lib/ansible_test/_internal/classification/powershell.py b/test/lib/ansible_test/_internal/classification/powershell.py
index 72715de0..bc73b748 100644
--- a/test/lib/ansible_test/_internal/classification/powershell.py
+++ b/test/lib/ansible_test/_internal/classification/powershell.py
@@ -83,7 +83,7 @@ def extract_powershell_module_utils_imports(path, module_utils): # type: (str,
for line in lines:
line_number += 1
- match = re.search(r'(?i)^#\s*(?:requires\s+-module(?:s?)|ansiblerequires\s+-powershell)\s*((?:Ansible|ansible_collections|\.)\..+)', line)
+ match = re.search(r'(?i)^#\s*(?:requires\s+-modules?|ansiblerequires\s+-powershell)\s*((?:Ansible|ansible_collections|\.)\..+)', line)
if not match:
continue
diff --git a/test/lib/ansible_test/_internal/classification/python.py b/test/lib/ansible_test/_internal/classification/python.py
index ac2d99a7..6141bb80 100644
--- a/test/lib/ansible_test/_internal/classification/python.py
+++ b/test/lib/ansible_test/_internal/classification/python.py
@@ -236,7 +236,7 @@ class ModuleUtilFinder(ast.NodeVisitor):
def __init__(self, path, module_utils): # type: (str, t.Set[str]) -> None
self.path = path
self.module_utils = module_utils
- self.imports = set()
+ self.imports = set() # type: t.Set[str]
# implicitly import parent package
@@ -260,7 +260,6 @@ class ModuleUtilFinder(ast.NodeVisitor):
('^hacking/build_library/build_ansible/', 'build_ansible/'),
('^lib/ansible/', 'ansible/'),
('^test/lib/ansible_test/_util/controller/sanity/validate-modules/', 'validate_modules/'),
- ('^test/lib/ansible_test/_util/target/legacy_collection_loader/', 'legacy_collection_loader/'),
('^test/units/', 'test/units/'),
('^test/lib/ansible_test/_internal/', 'ansible_test/_internal/'),
('^test/integration/targets/.*/ansible_collections/(?P<ns>[^/]*)/(?P<col>[^/]*)/', r'ansible_collections/\g<ns>/\g<col>/'),
@@ -277,7 +276,6 @@ class ModuleUtilFinder(ast.NodeVisitor):
# While that will usually be true, there are exceptions which will result in this resolution being incorrect.
self.module = path_to_module(os.path.join(data_context().content.collection.directory, self.path))
- # noinspection PyPep8Naming
# pylint: disable=locally-disabled, invalid-name
def visit_Import(self, node): # type: (ast.Import) -> None
"""Visit an import node."""
@@ -287,7 +285,6 @@ class ModuleUtilFinder(ast.NodeVisitor):
# import ansible_collections.{ns}.{col}.plugins.module_utils.module_utils.MODULE[.MODULE]
self.add_imports([alias.name for alias in node.names], node.lineno)
- # noinspection PyPep8Naming
# pylint: disable=locally-disabled, invalid-name
def visit_ImportFrom(self, node): # type: (ast.ImportFrom) -> None
"""Visit an import from node."""
diff --git a/test/lib/ansible_test/_internal/cli/__init__.py b/test/lib/ansible_test/_internal/cli/__init__.py
index 21c45b6e..64280e82 100644
--- a/test/lib/ansible_test/_internal/cli/__init__.py
+++ b/test/lib/ansible_test/_internal/cli/__init__.py
@@ -4,6 +4,7 @@ from __future__ import annotations
import argparse
import os
import sys
+import typing as t
from .argparsing import (
CompositeActionCompletionFinder,
@@ -13,23 +14,26 @@ from .commands import (
do_commands,
)
+from .epilog import (
+ get_epilog,
+)
from .compat import (
HostSettings,
convert_legacy_args,
)
+from ..util import (
+ get_ansible_version,
+)
+
-def parse_args(): # type: () -> argparse.Namespace
+def parse_args(argv=None): # type: (t.Optional[t.List[str]]) -> argparse.Namespace
"""Parse command line arguments."""
completer = CompositeActionCompletionFinder()
- if completer.enabled:
- epilog = 'Tab completion available using the "argcomplete" python package.'
- else:
- epilog = 'Install the "argcomplete" python package to enable tab completion.'
-
- parser = argparse.ArgumentParser(epilog=epilog)
+ parser = argparse.ArgumentParser(prog='ansible-test', epilog=get_epilog(completer), formatter_class=argparse.RawDescriptionHelpFormatter)
+ parser.add_argument('--version', action='version', version=f'%(prog)s version {get_ansible_version()}')
do_commands(parser, completer)
@@ -38,7 +42,11 @@ def parse_args(): # type: () -> argparse.Namespace
always_complete_options=False,
)
- argv = sys.argv[1:]
+ if argv is None:
+ argv = sys.argv[1:]
+ else:
+ argv = argv[1:]
+
args = parser.parse_args(argv)
if args.explain and not args.verbosity:
diff --git a/test/lib/ansible_test/_internal/cli/argparsing/__init__.py b/test/lib/ansible_test/_internal/cli/argparsing/__init__.py
index 8a087ebf..66dfc4e4 100644
--- a/test/lib/ansible_test/_internal/cli/argparsing/__init__.py
+++ b/test/lib/ansible_test/_internal/cli/argparsing/__init__.py
@@ -37,7 +37,7 @@ class RegisteredCompletionFinder(OptionCompletionFinder):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self.registered_completions = None # type: t.Optional[str]
+ self.registered_completions = None # type: t.Optional[t.List[str]]
def completer(
self,
@@ -88,20 +88,18 @@ class CompositeAction(argparse.Action, metaclass=abc.ABCMeta):
"""Base class for actions that parse composite arguments."""
documentation_state = {} # type: t.Dict[t.Type[CompositeAction], DocumentationState]
- # noinspection PyUnusedLocal
def __init__(
self,
*args,
- dest, # type: str
**kwargs,
):
- del dest
-
self.definition = self.create_parser()
self.documentation_state[type(self)] = documentation_state = DocumentationState()
self.definition.document(documentation_state)
- super().__init__(*args, dest=self.definition.dest, **kwargs)
+ kwargs.update(dest=self.definition.dest)
+
+ super().__init__(*args, **kwargs)
register_safe_action(type(self))
@@ -139,10 +137,12 @@ class CompositeActionCompletionFinder(RegisteredCompletionFinder):
def get_completions(
self,
prefix, # type: str
- action, # type: CompositeAction
+ action, # type: argparse.Action
parsed_args, # type: argparse.Namespace
): # type: (...) -> t.List[str]
"""Return a list of completions appropriate for the given prefix and action, taking into account the arguments that have already been parsed."""
+ assert isinstance(action, CompositeAction)
+
state = ParserState(
mode=ParserMode.LIST if self.list_mode else ParserMode.COMPLETE,
remainder=prefix,
@@ -238,6 +238,8 @@ def complete(
"""Perform argument completion using the given completer and return the completion result."""
value = state.remainder
+ answer: Completion
+
try:
completer.parse(state)
raise ParserError('completion expected')
diff --git a/test/lib/ansible_test/_internal/cli/argparsing/actions.py b/test/lib/ansible_test/_internal/cli/argparsing/actions.py
index c2b573e6..e3d0fd1c 100644
--- a/test/lib/ansible_test/_internal/cli/argparsing/actions.py
+++ b/test/lib/ansible_test/_internal/cli/argparsing/actions.py
@@ -7,8 +7,8 @@ import typing as t
class EnumAction(argparse.Action):
- """Parse an enum using the lowercases enum names."""
- def __init__(self, **kwargs): # type: (t.Dict[str, t.Any]) -> None
+ """Parse an enum using the lowercase enum names."""
+ def __init__(self, **kwargs: t.Any) -> None:
self.enum_type = kwargs.pop('type', None) # type: t.Type[enum.Enum]
kwargs.setdefault('choices', tuple(e.name.lower() for e in self.enum_type))
super().__init__(**kwargs)
diff --git a/test/lib/ansible_test/_internal/cli/argparsing/parsers.py b/test/lib/ansible_test/_internal/cli/argparsing/parsers.py
index fe80a68e..cdd9956b 100644
--- a/test/lib/ansible_test/_internal/cli/argparsing/parsers.py
+++ b/test/lib/ansible_test/_internal/cli/argparsing/parsers.py
@@ -173,7 +173,7 @@ class ParserState:
self.namespaces.append(namespace)
@contextlib.contextmanager
- def delimit(self, delimiters, required=True): # type: (str, bool) -> t.ContextManager[ParserBoundary]
+ def delimit(self, delimiters, required=True): # type: (str, bool) -> t.Iterator[ParserBoundary]
"""Context manager for delimiting parsing of input."""
boundary = ParserBoundary(delimiters=delimiters, required=required)
@@ -394,7 +394,7 @@ class FileParser(Parser):
else:
path = ''
- with state.delimit(PATH_DELIMITER, required=False) as boundary:
+ with state.delimit(PATH_DELIMITER, required=False) as boundary: # type: ParserBoundary
while boundary.ready:
directory = path or '.'
@@ -420,7 +420,7 @@ class AbsolutePathParser(Parser):
"""Parse the input from the given state and return the result."""
path = ''
- with state.delimit(PATH_DELIMITER, required=False) as boundary:
+ with state.delimit(PATH_DELIMITER, required=False) as boundary: # type: ParserBoundary
while boundary.ready:
if path:
path += AnyParser(nothing=True).parse(state)
@@ -506,7 +506,7 @@ class KeyValueParser(Parser, metaclass=abc.ABCMeta):
parsers = self.get_parsers(state)
keys = list(parsers)
- with state.delimit(PAIR_DELIMITER, required=False) as pair:
+ with state.delimit(PAIR_DELIMITER, required=False) as pair: # type: ParserBoundary
while pair.ready:
with state.delimit(ASSIGNMENT_DELIMITER):
key = ChoicesParser(keys).parse(state)
@@ -528,7 +528,7 @@ class PairParser(Parser, metaclass=abc.ABCMeta):
state.set_namespace(namespace)
- with state.delimit(self.delimiter, self.required) as boundary:
+ with state.delimit(self.delimiter, self.required) as boundary: # type: ParserBoundary
choice = self.get_left_parser(state).parse(state)
if boundary.match:
diff --git a/test/lib/ansible_test/_internal/cli/commands/__init__.py b/test/lib/ansible_test/_internal/cli/commands/__init__.py
index 5cd37f4f..81bb4653 100644
--- a/test/lib/ansible_test/_internal/cli/commands/__init__.py
+++ b/test/lib/ansible_test/_internal/cli/commands/__init__.py
@@ -11,6 +11,7 @@ from ...util import (
from ..completers import (
complete_target,
+ register_completer,
)
from ..environments import (
@@ -110,33 +111,33 @@ def do_commands(
testing = test.add_argument_group(title='common testing arguments')
- testing.add_argument(
+ register_completer(testing.add_argument(
'include',
metavar='TARGET',
nargs='*',
help='test the specified target',
- ).completer = functools.partial(complete_target, completer)
+ ), functools.partial(complete_target, completer))
- testing.add_argument(
+ register_completer(testing.add_argument(
'--include',
metavar='TARGET',
action='append',
help='include the specified target',
- ).completer = functools.partial(complete_target, completer)
+ ), functools.partial(complete_target, completer))
- testing.add_argument(
+ register_completer(testing.add_argument(
'--exclude',
metavar='TARGET',
action='append',
help='exclude the specified target',
- ).completer = functools.partial(complete_target, completer)
+ ), functools.partial(complete_target, completer))
- testing.add_argument(
+ register_completer(testing.add_argument(
'--require',
metavar='TARGET',
action='append',
help='require the specified target',
- ).completer = functools.partial(complete_target, completer)
+ ), functools.partial(complete_target, completer))
testing.add_argument(
'--coverage',
diff --git a/test/lib/ansible_test/_internal/cli/commands/integration/__init__.py b/test/lib/ansible_test/_internal/cli/commands/integration/__init__.py
index f79fb1cf..7ef28919 100644
--- a/test/lib/ansible_test/_internal/cli/commands/integration/__init__.py
+++ b/test/lib/ansible_test/_internal/cli/commands/integration/__init__.py
@@ -5,6 +5,7 @@ import argparse
from ...completers import (
complete_target,
+ register_completer,
)
from ...environments import (
@@ -43,12 +44,12 @@ def do_integration(
def add_integration_common(
parser, # type: argparse.ArgumentParser
):
- """Add common integration argumetns."""
- parser.add_argument(
+ """Add common integration arguments."""
+ register_completer(parser.add_argument(
'--start-at',
metavar='TARGET',
help='start at the specified target',
- ).completer = complete_target
+ ), complete_target)
parser.add_argument(
'--start-at-task',
diff --git a/test/lib/ansible_test/_internal/cli/commands/integration/network.py b/test/lib/ansible_test/_internal/cli/commands/integration/network.py
index d070afda..86729195 100644
--- a/test/lib/ansible_test/_internal/cli/commands/integration/network.py
+++ b/test/lib/ansible_test/_internal/cli/commands/integration/network.py
@@ -28,6 +28,10 @@ from ...environments import (
add_environments,
)
+from ...completers import (
+ register_completer,
+)
+
def do_network_integration(
subparsers,
@@ -51,16 +55,16 @@ def do_network_integration(
add_integration_common(network_integration)
- network_integration.add_argument(
+ register_completer(network_integration.add_argument(
'--testcase',
metavar='TESTCASE',
help='limit a test to a specified testcase',
- ).completer = complete_network_testcase
+ ), complete_network_testcase)
add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.NETWORK_INTEGRATION) # network-integration
-def complete_network_testcase(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str]
+def complete_network_testcase(prefix: str, parsed_args: argparse.Namespace, **_) -> t.List[str]:
"""Return a list of test cases matching the given prefix if only one target was parsed from the command line, otherwise return an empty list."""
testcases = []
diff --git a/test/lib/ansible_test/_internal/cli/compat.py b/test/lib/ansible_test/_internal/cli/compat.py
index 2090aac7..dfa7cfa6 100644
--- a/test/lib/ansible_test/_internal/cli/compat.py
+++ b/test/lib/ansible_test/_internal/cli/compat.py
@@ -55,7 +55,7 @@ from ..data import (
)
-def filter_python(version, versions): # type: (t.Optional[str], t.Optional[t.List[str]]) -> t.Optional[str]
+def filter_python(version, versions): # type: (t.Optional[str], t.Optional[t.Sequence[str]]) -> t.Optional[str]
"""If a Python version is given and is in the given version list, return that Python version, otherwise return None."""
return version if version in versions else None
@@ -237,8 +237,8 @@ def convert_legacy_args(
args.targets = targets
if used_default_pythons:
- targets = t.cast(t.List[ControllerConfig], targets)
- skipped_python_versions = sorted_versions(list(set(SUPPORTED_PYTHON_VERSIONS) - {target.python.version for target in targets}))
+ control_targets = t.cast(t.List[ControllerConfig], targets)
+ skipped_python_versions = sorted_versions(list(set(SUPPORTED_PYTHON_VERSIONS) - {target.python.version for target in control_targets}))
else:
skipped_python_versions = []
@@ -260,10 +260,12 @@ def controller_targets(
mode, # type: TargetMode
options, # type: LegacyHostOptions
controller, # type: ControllerHostConfig
-): # type: (...) -> t.List[ControllerConfig]
+): # type: (...) -> t.List[HostConfig]
"""Return the configuration for controller targets."""
python = native_python(options)
+ targets: t.List[HostConfig]
+
if python:
targets = [ControllerConfig(python=python)]
else:
@@ -283,7 +285,7 @@ def native_python(options): # type: (LegacyHostOptions) -> t.Optional[NativePyt
def get_legacy_host_config(
mode, # type: TargetMode
options, # type: LegacyHostOptions
-): # type: (...) -> t.Tuple[HostConfig, t.List[HostConfig], t.Optional[FallbackDetail]]
+): # type: (...) -> t.Tuple[ControllerHostConfig, t.List[HostConfig], t.Optional[FallbackDetail]]
"""
Returns controller and target host configs derived from the provided legacy host options.
The goal is to match the original behavior, by using non-split testing whenever possible.
@@ -296,6 +298,9 @@ def get_legacy_host_config(
controller_fallback = None # type: t.Optional[t.Tuple[str, str, FallbackReason]]
+ controller: t.Optional[ControllerHostConfig]
+ targets: t.List[HostConfig]
+
if options.venv:
if controller_python(options.python) or not options.python:
controller = OriginConfig(python=VirtualPythonConfig(version=options.python or 'default', system_site_packages=options.venv_system_site_packages))
@@ -304,14 +309,21 @@ def get_legacy_host_config(
controller = OriginConfig(python=VirtualPythonConfig(version='default', system_site_packages=options.venv_system_site_packages))
if mode in (TargetMode.SANITY, TargetMode.UNITS):
- targets = controller_targets(mode, options, controller)
+ python = native_python(options)
+
+ if python:
+ control_targets = [ControllerConfig(python=python)]
+ else:
+ control_targets = controller.get_default_targets(HostContext(controller_config=controller))
# Target sanity tests either have no Python requirements or manage their own virtual environments.
- # Thus there is no point in setting up virtual environments ahead of time for them.
+ # Thus, there is no point in setting up virtual environments ahead of time for them.
if mode == TargetMode.UNITS:
targets = [ControllerConfig(python=VirtualPythonConfig(version=target.python.version, path=target.python.path,
- system_site_packages=options.venv_system_site_packages)) for target in targets]
+ system_site_packages=options.venv_system_site_packages)) for target in control_targets]
+ else:
+ targets = t.cast(t.List[HostConfig], control_targets)
else:
targets = [ControllerConfig(python=VirtualPythonConfig(version=options.python or 'default',
system_site_packages=options.venv_system_site_packages))]
@@ -448,17 +460,19 @@ def handle_non_posix_targets(
targets = [WindowsInventoryConfig(path=options.inventory)]
elif mode == TargetMode.NETWORK_INTEGRATION:
if options.platform:
- targets = [NetworkRemoteConfig(name=platform, provider=options.remote_provider) for platform in options.platform]
+ network_targets = [NetworkRemoteConfig(name=platform, provider=options.remote_provider) for platform in options.platform]
for platform, collection in options.platform_collection or []:
- for entry in targets:
+ for entry in network_targets:
if entry.platform == platform:
entry.collection = collection
for platform, connection in options.platform_connection or []:
- for entry in targets:
+ for entry in network_targets:
if entry.platform == platform:
entry.connection = connection
+
+ targets = t.cast(t.List[HostConfig], network_targets)
else:
targets = [NetworkInventoryConfig(path=options.inventory)]
@@ -470,12 +484,14 @@ def default_targets(
controller, # type: ControllerHostConfig
): # type: (...) -> t.List[HostConfig]
"""Return a list of default targets for the given target mode."""
+ targets: t.List[HostConfig]
+
if mode == TargetMode.WINDOWS_INTEGRATION:
targets = [WindowsInventoryConfig(path=os.path.abspath(os.path.join(data_context().content.integration_path, 'inventory.winrm')))]
elif mode == TargetMode.NETWORK_INTEGRATION:
targets = [NetworkInventoryConfig(path=os.path.abspath(os.path.join(data_context().content.integration_path, 'inventory.networking')))]
elif mode.multiple_pythons:
- targets = controller.get_default_targets(HostContext(controller_config=controller))
+ targets = t.cast(t.List[HostConfig], controller.get_default_targets(HostContext(controller_config=controller)))
else:
targets = [ControllerConfig()]
diff --git a/test/lib/ansible_test/_internal/cli/completers.py b/test/lib/ansible_test/_internal/cli/completers.py
index a4b9c04f..278b1062 100644
--- a/test/lib/ansible_test/_internal/cli/completers.py
+++ b/test/lib/ansible_test/_internal/cli/completers.py
@@ -13,14 +13,19 @@ from .argparsing.argcompletion import (
)
-def complete_target(completer, prefix, parsed_args, **_): # type: (OptionCompletionFinder, str, argparse.Namespace, ...) -> t.List[str]
+def complete_target(completer: OptionCompletionFinder, prefix: str, parsed_args: argparse.Namespace, **_) -> t.List[str]:
"""Perform completion for the targets configured for the command being parsed."""
matches = find_target_completion(parsed_args.targets_func, prefix, completer.list_mode)
completer.disable_completion_mangling = completer.list_mode and len(matches) > 1
return matches
-def complete_choices(choices, prefix, **_): # type: (t.List[str], str, ...) -> t.List[str]
+def complete_choices(choices: t.List[str], prefix: str, **_) -> t.List[str]:
"""Perform completion using the provided choices."""
matches = [choice for choice in choices if choice.startswith(prefix)]
return matches
+
+
+def register_completer(action: argparse.Action, completer) -> None:
+ """Register the given completer with the specified action."""
+ action.completer = completer # type: ignore[attr-defined] # intentionally using an attribute that does not exist
diff --git a/test/lib/ansible_test/_internal/cli/environments.py b/test/lib/ansible_test/_internal/cli/environments.py
index 3c0230ca..5709c7c1 100644
--- a/test/lib/ansible_test/_internal/cli/environments.py
+++ b/test/lib/ansible_test/_internal/cli/environments.py
@@ -53,12 +53,17 @@ from ..config import (
from .completers import (
complete_choices,
+ register_completer,
)
from .converters import (
key_value_type,
)
+from .epilog import (
+ get_epilog,
+)
+
from ..ci import (
get_ci_provider,
)
@@ -98,6 +103,8 @@ def add_environments(
if not get_ci_provider().supports_core_ci_auth():
sections.append('Remote provisioning options have been hidden since no Ansible Core CI API key was found.')
+ sections.append(get_epilog(completer))
+
parser.formatter_class = argparse.RawDescriptionHelpFormatter
parser.epilog = '\n\n'.join(sections)
@@ -169,40 +176,40 @@ def add_composite_environment_options(
if controller_mode == ControllerMode.NO_DELEGATION:
composite_parser.set_defaults(controller=None)
else:
- composite_parser.add_argument(
+ register_completer(composite_parser.add_argument(
'--controller',
metavar='OPT',
action=register_action_type(DelegatedControllerAction if controller_mode == ControllerMode.DELEGATED else OriginControllerAction),
help='configuration for the controller',
- ).completer = completer.completer
+ ), completer.completer)
if target_mode == TargetMode.NO_TARGETS:
composite_parser.set_defaults(targets=[])
elif target_mode == TargetMode.SHELL:
group = composite_parser.add_mutually_exclusive_group()
- group.add_argument(
+ register_completer(group.add_argument(
'--target-posix',
metavar='OPT',
action=register_action_type(PosixSshTargetAction),
help='configuration for the target',
- ).completer = completer.completer
+ ), completer.completer)
suppress = None if get_ci_provider().supports_core_ci_auth() else argparse.SUPPRESS
- group.add_argument(
+ register_completer(group.add_argument(
'--target-windows',
metavar='OPT',
action=WindowsSshTargetAction if suppress else register_action_type(WindowsSshTargetAction),
help=suppress or 'configuration for the target',
- ).completer = completer.completer
+ ), completer.completer)
- group.add_argument(
+ register_completer(group.add_argument(
'--target-network',
metavar='OPT',
action=NetworkSshTargetAction if suppress else register_action_type(NetworkSshTargetAction),
help=suppress or 'configuration for the target',
- ).completer = completer.completer
+ ), completer.completer)
else:
if target_mode.multiple_pythons:
target_option = '--target-python'
@@ -224,12 +231,12 @@ def add_composite_environment_options(
target_action = target_actions[target_mode]
- composite_parser.add_argument(
+ register_completer(composite_parser.add_argument(
target_option,
metavar='OPT',
action=register_action_type(target_action),
help=target_help,
- ).completer = completer.completer
+ ), completer.completer)
return action_types
@@ -240,9 +247,8 @@ def add_legacy_environment_options(
target_mode, # type: TargetMode
):
"""Add legacy options for controlling the test environment."""
- # noinspection PyTypeChecker
- environment = parser.add_argument_group(
- title='environment arguments (mutually exclusive with "composite environment arguments" below)') # type: argparse.ArgumentParser
+ environment: argparse.ArgumentParser = parser.add_argument_group( # type: ignore[assignment] # real type private
+ title='environment arguments (mutually exclusive with "composite environment arguments" below)')
add_environments_python(environment, target_mode)
add_environments_host(environment, controller_mode, target_mode)
@@ -253,6 +259,8 @@ def add_environments_python(
target_mode, # type: TargetMode
): # type: (...) -> None
"""Add environment arguments to control the Python version(s) used."""
+ python_versions: t.Tuple[str, ...]
+
if target_mode.has_python:
python_versions = SUPPORTED_PYTHON_VERSIONS
else:
@@ -278,8 +286,7 @@ def add_environments_host(
target_mode # type: TargetMode
): # type: (...) -> None
"""Add environment arguments for the given host and argument modes."""
- # noinspection PyTypeChecker
- environments_exclusive_group = environments_parser.add_mutually_exclusive_group() # type: argparse.ArgumentParser
+ environments_exclusive_group: argparse.ArgumentParser = environments_parser.add_mutually_exclusive_group() # type: ignore[assignment] # real type private
add_environment_local(environments_exclusive_group)
add_environment_venv(environments_exclusive_group, environments_parser)
@@ -299,28 +306,28 @@ def add_environment_network(
environments_parser, # type: argparse.ArgumentParser
): # type: (...) -> None
"""Add environment arguments for running on a windows host."""
- environments_parser.add_argument(
+ register_completer(environments_parser.add_argument(
'--platform',
metavar='PLATFORM',
action='append',
help='network platform/version',
- ).completer = complete_network_platform
+ ), complete_network_platform)
- environments_parser.add_argument(
+ register_completer(environments_parser.add_argument(
'--platform-collection',
type=key_value_type,
metavar='PLATFORM=COLLECTION',
action='append',
help='collection used to test platform',
- ).completer = complete_network_platform_collection
+ ), complete_network_platform_collection)
- environments_parser.add_argument(
+ register_completer(environments_parser.add_argument(
'--platform-connection',
type=key_value_type,
metavar='PLATFORM=CONNECTION',
action='append',
help='connection used to test platform',
- ).completer = complete_network_platform_connection
+ ), complete_network_platform_connection)
environments_parser.add_argument(
'--inventory',
@@ -333,12 +340,12 @@ def add_environment_windows(
environments_parser, # type: argparse.ArgumentParser
): # type: (...) -> None
"""Add environment arguments for running on a windows host."""
- environments_parser.add_argument(
+ register_completer(environments_parser.add_argument(
'--windows',
metavar='VERSION',
action='append',
help='windows version',
- ).completer = complete_windows
+ ), complete_windows)
environments_parser.add_argument(
'--inventory',
@@ -429,13 +436,13 @@ def add_environment_docker(
else:
docker_images = sorted(filter_completion(docker_completion(), controller_only=True))
- exclusive_parser.add_argument(
+ register_completer(exclusive_parser.add_argument(
'--docker',
metavar='IMAGE',
nargs='?',
const='default',
help='run from a docker container',
- ).completer = functools.partial(complete_choices, docker_images)
+ ), functools.partial(complete_choices, docker_images))
environments_parser.add_argument(
'--docker-privileged',
@@ -474,12 +481,12 @@ def add_global_remote(
suppress = None if get_ci_provider().supports_core_ci_auth() else argparse.SUPPRESS
- parser.add_argument(
+ register_completer(parser.add_argument(
'--remote-stage',
metavar='STAGE',
default='prod',
help=suppress or 'remote stage to use: prod, dev',
- ).completer = complete_remote_stage
+ ), complete_remote_stage)
parser.add_argument(
'--remote-endpoint',
@@ -512,11 +519,11 @@ def add_environment_remote(
suppress = None if get_ci_provider().supports_core_ci_auth() else argparse.SUPPRESS
- exclusive_parser.add_argument(
+ register_completer(exclusive_parser.add_argument(
'--remote',
metavar='NAME',
help=suppress or 'run from a remote instance',
- ).completer = functools.partial(complete_choices, remote_platforms)
+ ), functools.partial(complete_choices, remote_platforms))
environments_parser.add_argument(
'--remote-provider',
@@ -526,24 +533,24 @@ def add_environment_remote(
)
-def complete_remote_stage(prefix, **_): # type: (str, ...) -> t.List[str]
+def complete_remote_stage(prefix: str, **_) -> t.List[str]:
"""Return a list of supported stages matching the given prefix."""
return [stage for stage in ('prod', 'dev') if stage.startswith(prefix)]
-def complete_windows(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str]
+def complete_windows(prefix: str, parsed_args: argparse.Namespace, **_) -> t.List[str]:
"""Return a list of supported Windows versions matching the given prefix, excluding versions already parsed from the command line."""
return [i for i in get_windows_version_choices() if i.startswith(prefix) and (not parsed_args.windows or i not in parsed_args.windows)]
-def complete_network_platform(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str]
+def complete_network_platform(prefix: str, parsed_args: argparse.Namespace, **_) -> t.List[str]:
"""Return a list of supported network platforms matching the given prefix, excluding platforms already parsed from the command line."""
images = sorted(filter_completion(network_completion()))
return [i for i in images if i.startswith(prefix) and (not parsed_args.platform or i not in parsed_args.platform)]
-def complete_network_platform_collection(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str]
+def complete_network_platform_collection(prefix: str, parsed_args: argparse.Namespace, **_) -> t.List[str]:
"""Return a list of supported network platforms matching the given prefix, excluding collection platforms already parsed from the command line."""
left = prefix.split('=')[0]
images = sorted(set(image.platform for image in filter_completion(network_completion()).values()))
@@ -551,7 +558,7 @@ def complete_network_platform_collection(prefix, parsed_args, **_): # type: (st
return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_collection or i not in [x[0] for x in parsed_args.platform_collection])]
-def complete_network_platform_connection(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str]
+def complete_network_platform_connection(prefix: str, parsed_args: argparse.Namespace, **_) -> t.List[str]:
"""Return a list of supported network platforms matching the given prefix, excluding connection platforms already parsed from the command line."""
left = prefix.split('=')[0]
images = sorted(set(image.platform for image in filter_completion(network_completion()).values()))
diff --git a/test/lib/ansible_test/_internal/cli/epilog.py b/test/lib/ansible_test/_internal/cli/epilog.py
new file mode 100644
index 00000000..3800ff1c
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/epilog.py
@@ -0,0 +1,23 @@
+"""Argument parsing epilog generation."""
+from __future__ import annotations
+
+from .argparsing import (
+ CompositeActionCompletionFinder,
+)
+
+from ..data import (
+ data_context,
+)
+
+
+def get_epilog(completer: CompositeActionCompletionFinder) -> str:
+ """Generate and return the epilog to use for help output."""
+ if completer.enabled:
+ epilog = 'Tab completion available using the "argcomplete" python package.'
+ else:
+ epilog = 'Install the "argcomplete" python package to enable tab completion.'
+
+ if data_context().content.unsupported:
+ epilog += '\n\n' + data_context().explain_working_directory()
+
+ return epilog
diff --git a/test/lib/ansible_test/_internal/cli/parsers/__init__.py b/test/lib/ansible_test/_internal/cli/parsers/__init__.py
index 25bac916..e870d9f8 100644
--- a/test/lib/ansible_test/_internal/cli/parsers/__init__.py
+++ b/test/lib/ansible_test/_internal/cli/parsers/__init__.py
@@ -73,7 +73,7 @@ class DelegatedControllerParser(ControllerNamespaceParser, TypeParser):
"""Composite argument parser for the controller when delegation is supported."""
def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
"""Return a dictionary of type names and type parsers."""
- parsers = dict(
+ parsers: t.Dict[str, Parser] = dict(
origin=OriginParser(),
docker=DockerParser(controller=True),
)
@@ -99,7 +99,7 @@ class PosixTargetParser(TargetNamespaceParser, TypeParser):
"""Composite argument parser for a POSIX target."""
def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
"""Return a dictionary of type names and type parsers."""
- parsers = dict(
+ parsers: t.Dict[str, Parser] = dict(
controller=ControllerParser(),
docker=DockerParser(controller=False),
)
@@ -142,7 +142,7 @@ class WindowsTargetParser(TargetsNamespaceParser, TypeParser):
def get_internal_parsers(self, targets): # type: (t.List[WindowsConfig]) -> t.Dict[str, Parser]
"""Return a dictionary of type names and type parsers."""
- parsers = {}
+ parsers = {} # type: t.Dict[str, Parser]
if self.allow_inventory and not targets:
parsers.update(
@@ -184,7 +184,7 @@ class NetworkTargetParser(TargetsNamespaceParser, TypeParser):
def get_internal_parsers(self, targets): # type: (t.List[NetworkConfig]) -> t.Dict[str, Parser]
"""Return a dictionary of type names and type parsers."""
- parsers = {}
+ parsers = {} # type: t.Dict[str, Parser]
if self.allow_inventory and not targets:
parsers.update(
diff --git a/test/lib/ansible_test/_internal/cli/parsers/helpers.py b/test/lib/ansible_test/_internal/cli/parsers/helpers.py
index 8dc7a65c..03f3cb79 100644
--- a/test/lib/ansible_test/_internal/cli/parsers/helpers.py
+++ b/test/lib/ansible_test/_internal/cli/parsers/helpers.py
@@ -27,7 +27,7 @@ def get_docker_pythons(name, controller, strict): # type: (str, bool, bool) ->
available_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS
if not image_config:
- return [] if strict else available_pythons
+ return [] if strict else list(available_pythons)
supported_pythons = [python for python in image_config.supported_pythons if python in available_pythons]
@@ -40,7 +40,7 @@ def get_remote_pythons(name, controller, strict): # type: (str, bool, bool) ->
available_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS
if not platform_config:
- return [] if strict else available_pythons
+ return [] if strict else list(available_pythons)
supported_pythons = [python for python in platform_config.supported_pythons if python in available_pythons]
@@ -54,6 +54,6 @@ def get_controller_pythons(controller_config, strict): # type: (HostConfig, boo
elif isinstance(controller_config, PosixRemoteConfig):
pythons = get_remote_pythons(controller_config.name, False, strict)
else:
- pythons = SUPPORTED_PYTHON_VERSIONS
+ pythons = list(SUPPORTED_PYTHON_VERSIONS)
return pythons
diff --git a/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py
index 1aae8821..d09ab7cc 100644
--- a/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py
+++ b/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py
@@ -5,6 +5,7 @@ import typing as t
from ...host_configs import (
NativePythonConfig,
+ PythonConfig,
VirtualPythonConfig,
)
@@ -18,6 +19,7 @@ from ..argparsing.parsers import (
Parser,
ParserError,
ParserState,
+ ParserBoundary,
)
@@ -58,7 +60,7 @@ class PythonParser(Parser):
The origin host and unknown environments assume all relevant Python versions are available.
"""
def __init__(self,
- versions, # type: t.List[str]
+ versions, # type: t.Sequence[str]
*,
allow_default, # type: bool
allow_venv, # type: bool
@@ -85,9 +87,13 @@ class PythonParser(Parser):
def parse(self, state): # type: (ParserState) -> t.Any
"""Parse the input from the given state and return the result."""
+ boundary: ParserBoundary
+
with state.delimit('@/', required=False) as boundary:
version = ChoicesParser(self.first_choices).parse(state)
+ python: PythonConfig
+
if version == 'venv':
with state.delimit('@/', required=False) as boundary:
version = ChoicesParser(self.venv_choices).parse(state)
@@ -156,7 +162,7 @@ class SshConnectionParser(Parser):
setattr(namespace, 'user', user)
- with state.delimit(':', required=False) as colon:
+ with state.delimit(':', required=False) as colon: # type: ParserBoundary
host = AnyParser(no_match_message=f'Expected {{host}} from: {self.EXPECTED_FORMAT}').parse(state)
setattr(namespace, 'host', host)
diff --git a/test/lib/ansible_test/_internal/commands/coverage/__init__.py b/test/lib/ansible_test/_internal/commands/coverage/__init__.py
index 50bc8263..1e59ac6f 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/__init__.py
@@ -152,7 +152,7 @@ def enumerate_python_arcs(
modules, # type: t.Dict[str, str]
collection_search_re, # type: t.Optional[t.Pattern]
collection_sub_re, # type: t.Optional[t.Pattern]
-): # type: (...) -> t.Generator[t.Tuple[str, t.Set[t.Tuple[int, int]]]]
+): # type: (...) -> t.Generator[t.Tuple[str, t.Set[t.Tuple[int, int]]], None, None]
"""Enumerate Python code coverage arcs in the given file."""
if os.path.getsize(path) == 0:
display.warning('Empty coverage file: %s' % path, verbosity=2)
@@ -193,7 +193,7 @@ def enumerate_powershell_lines(
path, # type: str
collection_search_re, # type: t.Optional[t.Pattern]
collection_sub_re, # type: t.Optional[t.Pattern]
-): # type: (...) -> t.Generator[t.Tuple[str, t.Dict[int, int]]]
+): # type: (...) -> t.Generator[t.Tuple[str, t.Dict[int, int]], None, None]
"""Enumerate PowerShell code coverage lines in the given file."""
if os.path.getsize(path) == 0:
display.warning('Empty coverage file: %s' % path, verbosity=2)
@@ -298,7 +298,7 @@ class PathChecker:
def __init__(self, args, collection_search_re=None): # type: (CoverageConfig, t.Optional[t.Pattern]) -> None
self.args = args
self.collection_search_re = collection_search_re
- self.invalid_paths = []
+ self.invalid_paths = [] # type: t.List[str]
self.invalid_path_chars = 0
def check_path(self, path): # type: (str) -> bool
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py
index a39d12c8..f94b7360 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py
@@ -18,14 +18,13 @@ from .. import (
CoverageAnalyzeConfig,
)
-if t.TYPE_CHECKING:
- TargetKey = t.TypeVar('TargetKey', int, t.Tuple[int, int])
- NamedPoints = t.Dict[str, t.Dict[TargetKey, t.Set[str]]]
- IndexedPoints = t.Dict[str, t.Dict[TargetKey, t.Set[int]]]
- Arcs = t.Dict[str, t.Dict[t.Tuple[int, int], t.Set[int]]]
- Lines = t.Dict[str, t.Dict[int, t.Set[int]]]
- TargetIndexes = t.Dict[str, int]
- TargetSetIndexes = t.Dict[t.FrozenSet[int], int]
+TargetKey = t.TypeVar('TargetKey', int, t.Tuple[int, int])
+NamedPoints = t.Dict[str, t.Dict[TargetKey, t.Set[str]]]
+IndexedPoints = t.Dict[str, t.Dict[TargetKey, t.Set[int]]]
+Arcs = t.Dict[str, t.Dict[t.Tuple[int, int], t.Set[int]]]
+Lines = t.Dict[str, t.Dict[int, t.Set[int]]]
+TargetIndexes = t.Dict[str, int]
+TargetSetIndexes = t.Dict[t.FrozenSet[int], int]
class CoverageAnalyzeTargetsConfig(CoverageAnalyzeConfig):
@@ -38,7 +37,7 @@ class CoverageAnalyzeTargetsConfig(CoverageAnalyzeConfig):
def make_report(target_indexes, arcs, lines): # type: (TargetIndexes, Arcs, Lines) -> t.Dict[str, t.Any]
"""Condense target indexes, arcs and lines into a compact report."""
- set_indexes = {}
+ set_indexes = {} # type: TargetSetIndexes
arc_refs = dict((path, dict((format_arc(arc), get_target_set_index(indexes, set_indexes)) for arc, indexes in data.items())) for path, data in arcs.items())
line_refs = dict((path, dict((line, get_target_set_index(indexes, set_indexes)) for line, indexes in data.items())) for path, data in lines.items())
@@ -95,6 +94,11 @@ def write_report(args, report, path): # type: (CoverageAnalyzeTargetsConfig, t.
), verbosity=1)
+def format_line(value): # type: (int) -> str
+ """Format line as a string."""
+ return str(value) # putting this in a function keeps both pylint and mypy happy
+
+
def format_arc(value): # type: (t.Tuple[int, int]) -> str
"""Format an arc tuple as a string."""
return '%d:%d' % value
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py
index d68edc02..1ea9d59e 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py
@@ -18,13 +18,12 @@ from . import (
write_report,
)
-if t.TYPE_CHECKING:
- from . import (
- Arcs,
- IndexedPoints,
- Lines,
- TargetIndexes,
- )
+from . import (
+ Arcs,
+ IndexedPoints,
+ Lines,
+ TargetIndexes,
+)
class CoverageAnalyzeTargetsCombineConfig(CoverageAnalyzeTargetsConfig):
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py
index 6ca6e6d3..d9283424 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py
@@ -19,6 +19,7 @@ from . import (
CoverageAnalyzeTargetsConfig,
expand_indexes,
format_arc,
+ format_line,
read_report,
)
@@ -43,7 +44,7 @@ def command_coverage_analyze_targets_expand(args): # type: (CoverageAnalyzeTarg
report = dict(
arcs=expand_indexes(covered_path_arcs, covered_targets, format_arc),
- lines=expand_indexes(covered_path_lines, covered_targets, str),
+ lines=expand_indexes(covered_path_lines, covered_targets, format_line),
)
if not args.explain:
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py
index e5d2f500..e5e0dff7 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py
@@ -21,11 +21,10 @@ from . import (
write_report,
)
-if t.TYPE_CHECKING:
- from . import (
- NamedPoints,
- TargetIndexes,
- )
+from . import (
+ NamedPoints,
+ TargetIndexes,
+)
class CoverageAnalyzeTargetsFilterConfig(CoverageAnalyzeTargetsConfig):
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py
index 3f9bca74..54b2516f 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py
@@ -43,12 +43,11 @@ from . import (
write_report,
)
-if t.TYPE_CHECKING:
- from . import (
- Arcs,
- Lines,
- TargetIndexes,
- )
+from . import (
+ Arcs,
+ Lines,
+ TargetIndexes,
+)
class CoverageAnalyzeTargetsGenerateConfig(CoverageAnalyzeTargetsConfig):
@@ -68,7 +67,7 @@ def command_coverage_analyze_targets_generate(args): # type: (CoverageAnalyzeTa
raise Delegate(host_state)
root = data_context().content.root
- target_indexes = {}
+ target_indexes = {} # type: TargetIndexes
arcs = dict((os.path.relpath(path, root), data) for path, data in analyze_python_coverage(args, host_state, args.input_dir, target_indexes).items())
lines = dict((os.path.relpath(path, root), data) for path, data in analyze_powershell_coverage(args, args.input_dir, target_indexes).items())
report = make_report(target_indexes, arcs, lines)
@@ -139,7 +138,7 @@ def analyze_powershell_coverage(
def prune_invalid_filenames(
args, # type: CoverageAnalyzeTargetsGenerateConfig
results, # type: t.Dict[str, t.Any]
- collection_search_re=None, # type: t.Optional[str]
+ collection_search_re=None, # type: t.Optional[t.Pattern]
): # type: (...) -> None
"""Remove invalid filenames from the given result set."""
path_checker = PathChecker(args, collection_search_re)
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py
index 9b6d696d..f3cdfe5b 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py
@@ -24,11 +24,10 @@ from . import (
write_report,
)
-if t.TYPE_CHECKING:
- from . import (
- TargetIndexes,
- IndexedPoints,
- )
+from . import (
+ TargetIndexes,
+ IndexedPoints,
+)
class CoverageAnalyzeTargetsMissingConfig(CoverageAnalyzeTargetsConfig):
@@ -53,7 +52,7 @@ def command_coverage_analyze_targets_missing(args): # type: (CoverageAnalyzeTar
from_targets, from_path_arcs, from_path_lines = read_report(args.from_file)
to_targets, to_path_arcs, to_path_lines = read_report(args.to_file)
- target_indexes = {}
+ target_indexes = {} # type: TargetIndexes
if args.only_gaps:
arcs = find_gaps(from_path_arcs, from_targets, to_path_arcs, target_indexes, args.only_exists)
@@ -74,7 +73,7 @@ def find_gaps(
only_exists, # type: bool
): # type: (...) -> IndexedPoints
"""Find gaps in coverage between the from and to data sets."""
- target_data = {}
+ target_data = {} # type: IndexedPoints
for from_path, from_points in from_data.items():
if only_exists and not os.path.isfile(to_bytes(from_path)):
@@ -100,7 +99,7 @@ def find_missing(
only_exists, # type: bool
): # type: (...) -> IndexedPoints
"""Find coverage in from_data not present in to_data (arcs or lines)."""
- target_data = {}
+ target_data = {} # type: IndexedPoints
for from_path, from_points in from_data.items():
if only_exists and not os.path.isfile(to_bytes(from_path)):
diff --git a/test/lib/ansible_test/_internal/commands/coverage/combine.py b/test/lib/ansible_test/_internal/commands/coverage/combine.py
index b240df46..8cf4c105 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/combine.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/combine.py
@@ -60,6 +60,8 @@ from . import (
PathChecker,
)
+TValue = t.TypeVar('TValue')
+
def command_coverage_combine(args): # type: (CoverageCombineConfig) -> None
"""Patch paths in coverage files and merge into a single file."""
@@ -113,9 +115,12 @@ def _command_coverage_combine_python(args, host_state): # type: (CoverageCombin
coverage_files = get_python_coverage_files()
+ def _default_stub_value(source_paths: list[str]) -> dict[str, set[tuple[int, int]]]:
+ return {path: set() for path in source_paths}
+
counter = 0
sources = _get_coverage_targets(args, walk_compile_targets)
- groups = _build_stub_groups(args, sources, lambda s: dict((name, set()) for name in s))
+ groups = _build_stub_groups(args, sources, _default_stub_value)
collection_search_re, collection_sub_re = get_collection_path_regexes()
@@ -185,7 +190,7 @@ def _command_coverage_combine_powershell(args): # type: (CoverageCombineConfig)
"""Combine PowerShell coverage files and return a list of the output files."""
coverage_files = get_powershell_coverage_files()
- def _default_stub_value(source_paths):
+ def _default_stub_value(source_paths: list[str]) -> dict[str, dict[int, int]]:
cmd = ['pwsh', os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'coverage_stub.ps1')]
cmd.extend(source_paths)
@@ -234,12 +239,9 @@ def _command_coverage_combine_powershell(args): # type: (CoverageCombineConfig)
coverage_data = dict((filename, data) for filename, data in groups[group].items() if path_checker.check_path(filename))
if args.all:
- # Add 0 line entries for files not in coverage_data
- for source, source_line_count in sources:
- if source in coverage_data:
- continue
-
- coverage_data[source] = _default_stub_value(source_line_count)
+ missing_sources = [source for source, _source_line_count in sources if source not in coverage_data]
+ stubs = _default_stub_value(missing_sources)
+ coverage_data.update(stubs)
if not args.explain:
if args.export:
@@ -278,17 +280,19 @@ def _get_coverage_targets(args, walk_func): # type: (CoverageCombineConfig, t.C
return sources
-def _build_stub_groups(args, sources, default_stub_value):
+def _build_stub_groups(
+ args: CoverageCombineConfig,
+ sources: list[tuple[str, int]],
+ default_stub_value: t.Callable[[list[str]], dict[str, TValue]],
+) -> dict[str, dict[str, TValue]]:
"""
- :type args: CoverageCombineConfig
- :type sources: List[tuple[str, int]]
- :type default_stub_value: Func[List[str]]
- :rtype: dict
+ Split the given list of sources with line counts into groups, maintaining a maximum line count for each group.
+ Each group consists of a dictionary of sources and default coverage stubs generated by the provided default_stub_value function.
"""
groups = {}
if args.stub:
- stub_group = []
+ stub_group: list[str] = []
stub_groups = [stub_group]
stub_line_limit = 500000
stub_line_count = 0
@@ -315,7 +319,6 @@ def get_coverage_group(args, coverage_file): # type: (CoverageCombineConfig, st
"""Return the name of the coverage group for the specified coverage file, or None if no group was found."""
parts = os.path.basename(coverage_file).split('=', 4)
- # noinspection PyTypeChecker
if len(parts) != 5 or not parts[4].startswith('coverage.'):
return None
diff --git a/test/lib/ansible_test/_internal/commands/coverage/xml.py b/test/lib/ansible_test/_internal/commands/coverage/xml.py
index ed9603c2..c498d1c2 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/xml.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/xml.py
@@ -76,7 +76,7 @@ def _generate_powershell_xml(coverage_file): # type: (str) -> Element
content_root = data_context().content.root
is_ansible = data_context().content.is_ansible
- packages = {}
+ packages = {} # type: t.Dict[str, t.Dict[str, t.Dict[str, int]]]
for path, results in coverage_info.items():
filename = os.path.splitext(os.path.basename(path))[0]
@@ -131,7 +131,7 @@ def _generate_powershell_xml(coverage_file): # type: (str) -> Element
return elem_coverage
-def _add_cobertura_package(packages, package_name, package_data): # type: (SubElement, str, t.Dict[str, t.Dict[str, int]]) -> t.Tuple[int, int]
+def _add_cobertura_package(packages, package_name, package_data): # type: (Element, str, t.Dict[str, t.Dict[str, int]]) -> t.Tuple[int, int]
"""Add a package element to the given packages element."""
elem_package = SubElement(packages, 'package')
elem_classes = SubElement(elem_package, 'classes')
diff --git a/test/lib/ansible_test/_internal/commands/env/__init__.py b/test/lib/ansible_test/_internal/commands/env/__init__.py
index c625209c..d8f11b87 100644
--- a/test/lib/ansible_test/_internal/commands/env/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/env/__init__.py
@@ -166,7 +166,7 @@ def show_dict(data, verbose, root_verbosity=0, path=None): # type: (t.Dict[str,
display.info(indent + '%s: %s' % (key, value), verbosity=verbosity)
-def get_docker_details(args): # type: (EnvConfig) -> t.Dict[str, str]
+def get_docker_details(args): # type: (EnvConfig) -> t.Dict[str, t.Any]
"""Return details about docker."""
docker = get_docker_command()
diff --git a/test/lib/ansible_test/_internal/commands/integration/__init__.py b/test/lib/ansible_test/_internal/commands/integration/__init__.py
index a9a49aa1..247bce08 100644
--- a/test/lib/ansible_test/_internal/commands/integration/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/integration/__init__.py
@@ -134,7 +134,7 @@ def generate_dependency_map(integration_targets): # type: (t.List[IntegrationTa
"""Analyze the given list of integration test targets and return a dictionary expressing target names and the targets on which they depend."""
targets_dict = dict((target.name, target) for target in integration_targets)
target_dependencies = analyze_integration_target_dependencies(integration_targets)
- dependency_map = {}
+ dependency_map = {} # type: t.Dict[str, t.Set[IntegrationTarget]]
invalid_targets = set()
@@ -159,7 +159,7 @@ def generate_dependency_map(integration_targets): # type: (t.List[IntegrationTa
def get_files_needed(target_dependencies): # type: (t.List[IntegrationTarget]) -> t.List[str]
"""Return a list of files needed by the given list of target dependencies."""
- files_needed = []
+ files_needed = [] # type: t.List[str]
for target_dependency in target_dependencies:
files_needed += target_dependency.needs_file
@@ -241,7 +241,7 @@ def integration_test_environment(
args, # type: IntegrationConfig
target, # type: IntegrationTarget
inventory_path_src, # type: str
-): # type: (...) -> t.ContextManager[IntegrationEnvironment]
+): # type: (...) -> t.Iterator[IntegrationEnvironment]
"""Context manager that prepares the integration test environment and cleans it up."""
ansible_config_src = args.get_ansible_config()
ansible_config_relative = os.path.join(data_context().content.integration_path, '%s.cfg' % args.command)
@@ -324,7 +324,7 @@ def integration_test_environment(
display.info('Copying %s/ to %s/' % (dir_src, dir_dst), verbosity=2)
if not args.explain:
- shutil.copytree(to_bytes(dir_src), to_bytes(dir_dst), symlinks=True)
+ shutil.copytree(to_bytes(dir_src), to_bytes(dir_dst), symlinks=True) # type: ignore[arg-type] # incorrect type stub omits bytes path support
for file_src, file_dst in file_copies:
display.info('Copying %s to %s' % (file_src, file_dst), verbosity=2)
@@ -344,7 +344,7 @@ def integration_test_config_file(
args, # type: IntegrationConfig
env_config, # type: CloudEnvironmentConfig
integration_dir, # type: str
-): # type: (...) -> t.ContextManager[t.Optional[str]]
+): # type: (...) -> t.Iterator[t.Optional[str]]
"""Context manager that provides a config file for integration tests, if needed."""
if not env_config:
yield None
@@ -361,7 +361,7 @@ def integration_test_config_file(
config_file = json.dumps(config_vars, indent=4, sort_keys=True)
- with named_temporary_file(args, 'config-file-', '.json', integration_dir, config_file) as path:
+ with named_temporary_file(args, 'config-file-', '.json', integration_dir, config_file) as path: # type: str
filename = os.path.relpath(path, integration_dir)
display.info('>>> Config File: %s\n%s' % (filename, config_file), verbosity=3)
@@ -398,8 +398,8 @@ def create_inventory(
def command_integration_filtered(
args, # type: IntegrationConfig
host_state, # type: HostState
- targets, # type: t.Tuple[IntegrationTarget]
- all_targets, # type: t.Tuple[IntegrationTarget]
+ targets, # type: t.Tuple[IntegrationTarget, ...]
+ all_targets, # type: t.Tuple[IntegrationTarget, ...]
inventory_path, # type: str
pre_target=None, # type: t.Optional[t.Callable[[IntegrationTarget], None]]
post_target=None, # type: t.Optional[t.Callable[[IntegrationTarget], None]]
@@ -413,7 +413,7 @@ def command_integration_filtered(
all_targets_dict = dict((target.name, target) for target in all_targets)
setup_errors = []
- setup_targets_executed = set()
+ setup_targets_executed = set() # type: t.Set[str]
for target in all_targets:
for setup_target in target.setup_once + target.setup_always:
@@ -538,7 +538,7 @@ def command_integration_filtered(
failed.append(target)
if args.continue_on_error:
- display.error(ex)
+ display.error(str(ex))
continue
display.notice('To resume at this test target, use the option: --start-at %s' % target.name)
@@ -597,7 +597,7 @@ def command_integration_script(
module_defaults=env_config.module_defaults,
), indent=4, sort_keys=True), verbosity=3)
- with integration_test_environment(args, target, inventory_path) as test_env:
+ with integration_test_environment(args, target, inventory_path) as test_env: # type: IntegrationEnvironment
cmd = ['./%s' % os.path.basename(target.script_path)]
if args.verbosity:
@@ -614,7 +614,7 @@ def command_integration_script(
if env_config and env_config.env_vars:
env.update(env_config.env_vars)
- with integration_test_config_file(args, env_config, test_env.integration_dir) as config_path:
+ with integration_test_config_file(args, env_config, test_env.integration_dir) as config_path: # type: t.Optional[str]
if config_path:
cmd += ['-e', '@%s' % config_path]
@@ -673,7 +673,7 @@ def command_integration_role(
module_defaults=env_config.module_defaults,
), indent=4, sort_keys=True), verbosity=3)
- with integration_test_environment(args, target, inventory_path) as test_env:
+ with integration_test_environment(args, target, inventory_path) as test_env: # type: IntegrationEnvironment
if os.path.exists(test_env.vars_file):
vars_files.append(os.path.relpath(test_env.vars_file, test_env.integration_dir))
@@ -745,7 +745,7 @@ def run_setup_targets(
args, # type: IntegrationConfig
host_state, # type: HostState
test_dir, # type: str
- target_names, # type: t.List[str]
+ target_names, # type: t.Sequence[str]
targets_dict, # type: t.Dict[str, IntegrationTarget]
targets_executed, # type: t.Set[str]
inventory_path, # type: str
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py b/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py
index 70f8afaf..5afde048 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py
@@ -59,8 +59,8 @@ def get_cloud_plugins(): # type: () -> t.Tuple[t.Dict[str, t.Type[CloudProvider
"""Import cloud plugins and load them into the plugin dictionaries."""
import_plugins('commands/integration/cloud')
- providers = {}
- environments = {}
+ providers = {} # type: t.Dict[str, t.Type[CloudProvider]]
+ environments = {} # type: t.Dict[str, t.Type[CloudEnvironment]]
load_plugins(CloudProvider, providers)
load_plugins(CloudEnvironment, environments)
@@ -134,7 +134,7 @@ def cloud_filter(args, targets): # type: (IntegrationConfig, t.Tuple[Integratio
if args.metadata.cloud_config is not None:
return [] # cloud filter already performed prior to delegation
- exclude = []
+ exclude = [] # type: t.List[str]
for provider in get_cloud_providers(args, targets):
provider.filter(targets, exclude)
@@ -206,7 +206,7 @@ class CloudBase(metaclass=abc.ABCMeta):
@property
def setup_executed(self): # type: () -> bool
"""True if setup has been executed, otherwise False."""
- return self._get_cloud_config(self._SETUP_EXECUTED, False)
+ return t.cast(bool, self._get_cloud_config(self._SETUP_EXECUTED, False))
@setup_executed.setter
def setup_executed(self, value): # type: (bool) -> None
@@ -216,7 +216,7 @@ class CloudBase(metaclass=abc.ABCMeta):
@property
def config_path(self): # type: () -> str
"""Path to the configuration file."""
- return os.path.join(data_context().content.root, self._get_cloud_config(self._CONFIG_PATH))
+ return os.path.join(data_context().content.root, str(self._get_cloud_config(self._CONFIG_PATH)))
@config_path.setter
def config_path(self, value): # type: (str) -> None
@@ -226,7 +226,7 @@ class CloudBase(metaclass=abc.ABCMeta):
@property
def resource_prefix(self): # type: () -> str
"""Resource prefix."""
- return self._get_cloud_config(self._RESOURCE_PREFIX)
+ return str(self._get_cloud_config(self._RESOURCE_PREFIX))
@resource_prefix.setter
def resource_prefix(self, value): # type: (str) -> None
@@ -236,7 +236,7 @@ class CloudBase(metaclass=abc.ABCMeta):
@property
def managed(self): # type: () -> bool
"""True if resources are managed by ansible-test, otherwise False."""
- return self._get_cloud_config(self._MANAGED)
+ return t.cast(bool, self._get_cloud_config(self._MANAGED))
@managed.setter
def managed(self, value): # type: (bool) -> None
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py b/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py
index a34d714d..b2b02095 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py
@@ -104,9 +104,8 @@ class AwsCloudEnvironment(CloudEnvironment):
ansible_vars = dict(
resource_prefix=self.resource_prefix,
tiny_prefix=uuid.uuid4().hex[0:12]
- )
+ ) # type: t.Dict[str, t.Any]
- # noinspection PyTypeChecker
ansible_vars.update(dict(parser.items('default')))
display.sensitive.add(ansible_vars.get('aws_secret_key'))
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py b/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py
index 002fa581..cf16c7f5 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py
@@ -2,14 +2,8 @@
from __future__ import annotations
import configparser
-import os
-import urllib.parse
import typing as t
-from ....io import (
- read_text_file,
-)
-
from ....util import (
ApplicationError,
display,
@@ -23,10 +17,6 @@ from ....target import (
IntegrationTarget,
)
-from ....http import (
- HttpClient,
-)
-
from ....core_ci import (
AnsibleCoreCI,
)
@@ -40,12 +30,10 @@ from . import (
class AzureCloudProvider(CloudProvider):
"""Azure cloud provider plugin. Sets up cloud resources before delegation."""
- SHERLOCK_CONFIG_PATH = os.path.expanduser('~/.ansible-sherlock-ci.cfg')
-
def __init__(self, args): # type: (IntegrationConfig) -> None
super().__init__(args)
- self.aci = None
+ self.aci = None # type: t.Optional[AnsibleCoreCI]
self.uses_config = True
@@ -56,9 +44,6 @@ class AzureCloudProvider(CloudProvider):
if aci.available:
return
- if os.path.isfile(self.SHERLOCK_CONFIG_PATH):
- return
-
super().filter(targets, exclude)
def setup(self): # type: () -> None
@@ -78,40 +63,19 @@ class AzureCloudProvider(CloudProvider):
super().cleanup()
def _setup_dynamic(self): # type: () -> None
- """Request Azure credentials through Sherlock."""
+ """Request Azure credentials through ansible-core-ci."""
display.info('Provisioning %s cloud environment.' % self.platform, verbosity=1)
config = self._read_config_template()
response = {}
- if os.path.isfile(self.SHERLOCK_CONFIG_PATH):
- sherlock_uri = read_text_file(self.SHERLOCK_CONFIG_PATH).splitlines()[0].strip() + '&rgcount=2'
-
- parts = urllib.parse.urlparse(sherlock_uri)
- query_string = urllib.parse.parse_qs(parts.query)
- base_uri = urllib.parse.urlunparse(parts[:4] + ('', ''))
-
- if 'code' not in query_string:
- example_uri = 'https://example.azurewebsites.net/api/sandbox-provisioning'
- raise ApplicationError('The Sherlock URI must include the API key in the query string. Example: %s?code=xxx' % example_uri)
-
- display.info('Initializing azure/sherlock from: %s' % base_uri, verbosity=1)
-
- http = HttpClient(self.args)
- result = http.get(sherlock_uri)
-
- display.info('Started azure/sherlock from: %s' % base_uri, verbosity=1)
-
- if not self.args.explain:
- response = result.json()
- else:
- aci = self._create_ansible_core_ci()
+ aci = self._create_ansible_core_ci()
- aci_result = aci.start()
+ aci_result = aci.start()
- if not self.args.explain:
- response = aci_result['azure']
- self.aci = aci
+ if not self.args.explain:
+ response = aci_result['azure']
+ self.aci = aci
if not self.args.explain:
values = dict(
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py b/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py
index b4ca48f7..86a38fef 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py
@@ -85,8 +85,8 @@ class ForemanEnvironment(CloudEnvironment):
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
"""Return environment configuration for use in the test environment after delegation."""
env_vars = dict(
- FOREMAN_HOST=self._get_cloud_config('FOREMAN_HOST'),
- FOREMAN_PORT=self._get_cloud_config('FOREMAN_PORT'),
+ FOREMAN_HOST=str(self._get_cloud_config('FOREMAN_HOST')),
+ FOREMAN_PORT=str(self._get_cloud_config('FOREMAN_PORT')),
)
return CloudEnvironmentConfig(
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py b/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py
index de58cbf5..302a2919 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py
@@ -145,8 +145,8 @@ class GalaxyEnvironment(CloudEnvironment):
"""Galaxy environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
"""Return environment configuration for use in the test environment after delegation."""
- pulp_user = self._get_cloud_config('PULP_USER')
- pulp_password = self._get_cloud_config('PULP_PASSWORD')
+ pulp_user = str(self._get_cloud_config('PULP_USER'))
+ pulp_password = str(self._get_cloud_config('PULP_PASSWORD'))
pulp_host = self._get_cloud_config('PULP_HOST')
galaxy_port = self._get_cloud_config('GALAXY_PORT')
pulp_port = self._get_cloud_config('PULP_PORT')
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py b/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py
index 2d8217e9..00c62b76 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py
@@ -87,6 +87,6 @@ class HttptesterEnvironment(CloudEnvironment):
return CloudEnvironmentConfig(
env_vars=dict(
HTTPTESTER='1', # backwards compatibility for tests intended to work with or without HTTP Tester
- KRB5_PASSWORD=self._get_cloud_config(KRB5_PASSWORD_ENV),
+ KRB5_PASSWORD=str(self._get_cloud_config(KRB5_PASSWORD_ENV)),
)
)
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py b/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py
index 4c695fc6..dee73aa6 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py
@@ -30,7 +30,7 @@ class NiosProvider(CloudProvider):
#
# It's source source itself resides at:
# https://github.com/ansible/nios-test-container
- DOCKER_IMAGE = 'quay.io/ansible/nios-test-container:1.3.0'
+ DOCKER_IMAGE = 'quay.io/ansible/nios-test-container:1.4.0'
def __init__(self, args): # type: (IntegrationConfig) -> None
super().__init__(args)
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py b/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py
index fb69b9b2..2093b461 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py
@@ -107,14 +107,14 @@ class VcenterEnvironment(CloudEnvironment):
ansible_vars.update(dict(parser.items('DEFAULT', raw=True)))
except KeyError: # govcsim
env_vars = dict(
- VCENTER_HOSTNAME=self._get_cloud_config('vcenter_hostname'),
+ VCENTER_HOSTNAME=str(self._get_cloud_config('vcenter_hostname')),
VCENTER_USERNAME='user',
VCENTER_PASSWORD='pass',
)
ansible_vars = dict(
- vcsim=self._get_cloud_config('vcenter_hostname'),
- vcenter_hostname=self._get_cloud_config('vcenter_hostname'),
+ vcsim=str(self._get_cloud_config('vcenter_hostname')),
+ vcenter_hostname=str(self._get_cloud_config('vcenter_hostname')),
vcenter_username='user',
vcenter_password='pass',
)
diff --git a/test/lib/ansible_test/_internal/commands/integration/coverage.py b/test/lib/ansible_test/_internal/commands/integration/coverage.py
index c36b4403..6b8a0a6e 100644
--- a/test/lib/ansible_test/_internal/commands/integration/coverage.py
+++ b/test/lib/ansible_test/_internal/commands/integration/coverage.py
@@ -271,7 +271,7 @@ class WindowsCoverageHandler(CoverageHandler[WindowsConfig]):
@property
def is_active(self): # type: () -> bool
"""True if the handler should be used, otherwise False."""
- return self.profiles and not self.args.coverage_check
+ return bool(self.profiles) and not self.args.coverage_check
def setup(self): # type: () -> None
"""Perform setup for code coverage."""
diff --git a/test/lib/ansible_test/_internal/commands/integration/filters.py b/test/lib/ansible_test/_internal/commands/integration/filters.py
index 9854de57..0396ce92 100644
--- a/test/lib/ansible_test/_internal/commands/integration/filters.py
+++ b/test/lib/ansible_test/_internal/commands/integration/filters.py
@@ -221,7 +221,7 @@ class NetworkInventoryTargetFilter(TargetFilter[NetworkInventoryConfig]):
"""Target filter for network inventory."""
-class OriginTargetFilter(TargetFilter[OriginConfig]):
+class OriginTargetFilter(PosixTargetFilter[OriginConfig]):
"""Target filter for localhost."""
diff --git a/test/lib/ansible_test/_internal/commands/sanity/__init__.py b/test/lib/ansible_test/_internal/commands/sanity/__init__.py
index 8c1340f2..d819c37e 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/__init__.py
@@ -123,6 +123,11 @@ COMMAND = 'sanity'
SANITY_ROOT = os.path.join(ANSIBLE_TEST_CONTROLLER_ROOT, 'sanity')
TARGET_SANITY_ROOT = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'sanity')
+# NOTE: must match ansible.constants.DOCUMENTABLE_PLUGINS, but with 'module' replaced by 'modules'!
+DOCUMENTABLE_PLUGINS = (
+ 'become', 'cache', 'callback', 'cliconf', 'connection', 'httpapi', 'inventory', 'lookup', 'netconf', 'modules', 'shell', 'strategy', 'vars'
+)
+
created_venvs = [] # type: t.List[str]
@@ -142,7 +147,7 @@ def command_sanity(args): # type: (SanityConfig) -> None
if not targets.include:
raise AllTargetsSkipped()
- tests = sanity_get_tests()
+ tests = list(sanity_get_tests())
if args.test:
disabled = []
@@ -170,6 +175,8 @@ def command_sanity(args): # type: (SanityConfig) -> None
total = 0
failed = []
+ result: t.Optional[TestResult]
+
for test in tests:
if args.list_tests:
display.info(test.name)
@@ -201,14 +208,14 @@ def command_sanity(args): # type: (SanityConfig) -> None
else:
raise Exception('Unsupported test type: %s' % type(test))
- all_targets = targets.targets
+ all_targets = list(targets.targets)
if test.all_targets:
- usable_targets = targets.targets
+ usable_targets = list(targets.targets)
elif test.no_targets:
- usable_targets = tuple()
+ usable_targets = []
else:
- usable_targets = targets.include
+ usable_targets = list(targets.include)
all_targets = SanityTargets.filter_and_inject_targets(test, all_targets)
usable_targets = SanityTargets.filter_and_inject_targets(test, usable_targets)
@@ -503,12 +510,15 @@ class SanityIgnoreParser:
def load(args): # type: (SanityConfig) -> SanityIgnoreParser
"""Return the current SanityIgnore instance, initializing it if needed."""
try:
- return SanityIgnoreParser.instance
+ return SanityIgnoreParser.instance # type: ignore[attr-defined]
except AttributeError:
pass
- SanityIgnoreParser.instance = SanityIgnoreParser(args)
- return SanityIgnoreParser.instance
+ instance = SanityIgnoreParser(args)
+
+ SanityIgnoreParser.instance = instance # type: ignore[attr-defined]
+
+ return instance
class SanityIgnoreProcessor:
@@ -571,7 +581,7 @@ class SanityIgnoreProcessor:
def get_errors(self, paths): # type: (t.List[str]) -> t.List[SanityMessage]
"""Return error messages related to issues with the file."""
- messages = []
+ messages = [] # type: t.List[SanityMessage]
# unused errors
@@ -621,7 +631,7 @@ class SanityFailure(TestFailure):
self,
test, # type: str
python_version=None, # type: t.Optional[str]
- messages=None, # type: t.Optional[t.List[SanityMessage]]
+ messages=None, # type: t.Optional[t.Sequence[SanityMessage]]
summary=None, # type: t.Optional[str]
): # type: (...) -> None
super().__init__(COMMAND, test, python_version, messages, summary)
@@ -633,7 +643,7 @@ class SanityMessage(TestMessage):
class SanityTargets:
"""Sanity test target information."""
- def __init__(self, targets, include): # type: (t.Tuple[TestTarget], t.Tuple[TestTarget]) -> None
+ def __init__(self, targets, include): # type: (t.Tuple[TestTarget, ...], t.Tuple[TestTarget, ...]) -> None
self.targets = targets
self.include = include
@@ -671,11 +681,13 @@ class SanityTargets:
def get_targets(): # type: () -> t.Tuple[TestTarget, ...]
"""Return a tuple of sanity test targets. Uses a cached version when available."""
try:
- return SanityTargets.get_targets.targets
+ return SanityTargets.get_targets.targets # type: ignore[attr-defined]
except AttributeError:
- SanityTargets.get_targets.targets = tuple(sorted(walk_sanity_targets()))
+ targets = tuple(sorted(walk_sanity_targets()))
+
+ SanityTargets.get_targets.targets = targets # type: ignore[attr-defined]
- return SanityTargets.get_targets.targets
+ return targets
class SanityTest(metaclass=abc.ABCMeta):
@@ -695,7 +707,7 @@ class SanityTest(metaclass=abc.ABCMeta):
# Because these errors can be unpredictable they behave differently than normal error codes:
# * They are not reported by default. The `--enable-optional-errors` option must be used to display these errors.
# * They cannot be ignored. This is done to maintain the integrity of the ignore system.
- self.optional_error_codes = set()
+ self.optional_error_codes = set() # type: t.Set[str]
@property
def error_code(self): # type: () -> t.Optional[str]
@@ -785,7 +797,9 @@ class SanityTest(metaclass=abc.ABCMeta):
# utility code that runs in target environments and requires support for remote-only Python versions
is_subdir(target.path, 'test/lib/ansible_test/_util/target/') or
# integration test support modules/module_utils continue to require support for remote-only Python versions
- re.search('^test/support/integration/.*/(modules|module_utils)/', target.path)
+ re.search('^test/support/integration/.*/(modules|module_utils)/', target.path) or
+ # collection loader requires support for remote-only Python versions
+ re.search('^lib/ansible/utils/collection_loader/', target.path)
))
)]
@@ -952,7 +966,7 @@ class SanityCodeSmellTest(SanitySingleVersion):
elif self.output == 'path-message':
pattern = '^(?P<path>[^:]*): (?P<message>.*)$'
else:
- pattern = ApplicationError('Unsupported output type: %s' % self.output)
+ raise ApplicationError('Unsupported output type: %s' % self.output)
if not self.no_targets:
data = '\n'.join(paths)
diff --git a/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py b/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py
index 82d9f751..0b421ed3 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py
@@ -7,10 +7,12 @@ import re
import typing as t
from . import (
+ DOCUMENTABLE_PLUGINS,
SanitySingleVersion,
SanityFailure,
SanitySuccess,
SanityTargets,
+ SanityMessage,
)
from ...test import (
@@ -49,22 +51,7 @@ class AnsibleDocTest(SanitySingleVersion):
"""Sanity test for ansible-doc."""
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test."""
- # This should use documentable plugins from constants instead
- unsupported_plugin_types = {
- # not supported by ansible-doc
- 'action',
- 'doc_fragments',
- 'filter',
- 'module_utils',
- 'terminal',
- 'test',
- # The following are plugin directories not directly supported by ansible-core (and thus also not by ansible-doc)
- # (https://github.com/ansible-collections/overview/blob/main/collection_requirements.rst#modules--plugins)
- 'plugin_utils',
- 'sub_plugins',
- }
-
- plugin_paths = [plugin_path for plugin_type, plugin_path in data_context().content.plugin_paths.items() if plugin_type not in unsupported_plugin_types]
+ plugin_paths = [plugin_path for plugin_type, plugin_path in data_context().content.plugin_paths.items() if plugin_type in DOCUMENTABLE_PLUGINS]
return [target for target in targets
if os.path.splitext(target.path)[1] == '.py'
@@ -77,8 +64,8 @@ class AnsibleDocTest(SanitySingleVersion):
paths = [target.path for target in targets.include]
- doc_targets = collections.defaultdict(list)
- target_paths = collections.defaultdict(dict)
+ doc_targets = collections.defaultdict(list) # type: t.Dict[str, t.List[str]]
+ target_paths = collections.defaultdict(dict) # type: t.Dict[str, t.Dict[str, str]]
remap_types = dict(
modules='module',
@@ -97,7 +84,7 @@ class AnsibleDocTest(SanitySingleVersion):
target_paths[plugin_type][data_context().content.prefix + plugin_name] = plugin_file_path
env = ansible_environment(args, color=False)
- error_messages = []
+ error_messages = [] # type: t.List[SanityMessage]
for doc_type in sorted(doc_targets):
for format_option in [None, '--json']:
diff --git a/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py b/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py
deleted file mode 100644
index 5dc582fa..00000000
--- a/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py
+++ /dev/null
@@ -1,102 +0,0 @@
-"""Sanity test for symlinks in the bin directory."""
-from __future__ import annotations
-
-import os
-import typing as t
-
-from . import (
- SanityVersionNeutral,
- SanityMessage,
- SanityFailure,
- SanitySuccess,
- SanityTargets,
-)
-
-from ...constants import (
- __file__ as symlink_map_full_path,
-)
-
-from ...test import (
- TestResult,
-)
-
-from ...config import (
- SanityConfig,
-)
-
-from ...data import (
- data_context,
-)
-
-from ...payload import (
- ANSIBLE_BIN_SYMLINK_MAP,
-)
-
-from ...util import (
- ANSIBLE_BIN_PATH,
-)
-
-
-class BinSymlinksTest(SanityVersionNeutral):
- """Sanity test for symlinks in the bin directory."""
- ansible_only = True
-
- @property
- def can_ignore(self): # type: () -> bool
- """True if the test supports ignore entries."""
- return False
-
- @property
- def no_targets(self): # type: () -> bool
- """True if the test does not use test targets. Mutually exclusive with all_targets."""
- return True
-
- def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
- bin_root = ANSIBLE_BIN_PATH
- bin_names = os.listdir(bin_root)
- bin_paths = sorted(os.path.join(bin_root, path) for path in bin_names)
-
- errors = [] # type: t.List[t.Tuple[str, str]]
-
- symlink_map_path = os.path.relpath(symlink_map_full_path, data_context().content.root)
-
- for bin_path in bin_paths:
- if not os.path.islink(bin_path):
- errors.append((bin_path, 'not a symbolic link'))
- continue
-
- dest = os.readlink(bin_path)
-
- if not os.path.exists(bin_path):
- errors.append((bin_path, 'points to non-existent path "%s"' % dest))
- continue
-
- if not os.path.isfile(bin_path):
- errors.append((bin_path, 'points to non-file "%s"' % dest))
- continue
-
- map_dest = ANSIBLE_BIN_SYMLINK_MAP.get(os.path.basename(bin_path))
-
- if not map_dest:
- errors.append((bin_path, 'missing from ANSIBLE_BIN_SYMLINK_MAP in file "%s"' % symlink_map_path))
- continue
-
- if dest != map_dest:
- errors.append((bin_path, 'points to "%s" instead of "%s" from ANSIBLE_BIN_SYMLINK_MAP in file "%s"' % (dest, map_dest, symlink_map_path)))
- continue
-
- if not os.access(bin_path, os.X_OK):
- errors.append((bin_path, 'points to non-executable file "%s"' % dest))
- continue
-
- for bin_name, dest in ANSIBLE_BIN_SYMLINK_MAP.items():
- if bin_name not in bin_names:
- bin_path = os.path.join(bin_root, bin_name)
- errors.append((bin_path, 'missing symlink to "%s" defined in ANSIBLE_BIN_SYMLINK_MAP in file "%s"' % (dest, symlink_map_path)))
-
- messages = [SanityMessage(message=message, path=os.path.relpath(path, data_context().content.root), confidence=100) for path, message in errors]
-
- if errors:
- return SanityFailure(self.name, messages=messages)
-
- return SanitySuccess(self.name)
diff --git a/test/lib/ansible_test/_internal/commands/sanity/ignores.py b/test/lib/ansible_test/_internal/commands/sanity/ignores.py
index 9a39955a..867243ad 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/ignores.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/ignores.py
@@ -2,6 +2,7 @@
from __future__ import annotations
import os
+import typing as t
from . import (
SanityFailure,
@@ -38,7 +39,7 @@ class IgnoresTest(SanityVersionNeutral):
def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
sanity_ignore = SanityIgnoreParser.load(args)
- messages = []
+ messages = [] # type: t.List[SanityMessage]
# parse errors
diff --git a/test/lib/ansible_test/_internal/commands/sanity/import.py b/test/lib/ansible_test/_internal/commands/sanity/import.py
index aa0239d5..9098c5e6 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/import.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/import.py
@@ -21,10 +21,6 @@ from ...constants import (
REMOTE_ONLY_PYTHON_VERSIONS,
)
-from ...io import (
- write_text_file,
-)
-
from ...test import (
TestResult,
)
@@ -40,7 +36,6 @@ from ...util import (
parse_to_list_of_dict,
is_subdir,
ANSIBLE_TEST_TOOLS_ROOT,
- ANSIBLE_TEST_TARGET_ROOT,
)
from ...util_common import (
@@ -91,8 +86,17 @@ class ImportTest(SanityMultipleVersion):
"""Sanity test for proper import exception handling."""
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test."""
+ if data_context().content.is_ansible:
+ # all of ansible-core must pass the import test, not just plugins/modules
+ # modules/module_utils will be tested using the module context
+ # everything else will be tested using the plugin context
+ paths = ['lib/ansible']
+ else:
+ # only plugins/modules must pass the import test for collections
+ paths = list(data_context().content.plugin_paths.values())
+
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' and
- any(is_subdir(target.path, path) for path in data_context().content.plugin_paths.values())]
+ any(is_subdir(target.path, path) for path in paths)]
@property
def needs_pypi(self): # type: () -> bool
@@ -111,7 +115,7 @@ class ImportTest(SanityMultipleVersion):
try:
install_requirements(args, python, virtualenv=True, controller=False) # sanity (import)
except PipUnavailableError as ex:
- display.warning(ex)
+ display.warning(str(ex))
temp_root = os.path.join(ResultType.TMP.path, 'sanity', 'import')
@@ -210,11 +214,4 @@ def get_ansible_test_python_path(): # type: () -> str
The temporary directory created will be cached for the lifetime of the process and cleaned up at exit.
"""
python_path = create_temp_dir(prefix='ansible-test-')
- ansible_test_path = os.path.join(python_path, 'ansible_test')
-
- # legacy collection loader required by all python versions not supported by the controller
- write_text_file(os.path.join(ansible_test_path, '__init__.py'), '', True)
- write_text_file(os.path.join(ansible_test_path, '_internal', '__init__.py'), '', True)
- os.symlink(os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'legacy_collection_loader'), os.path.join(ansible_test_path, '_internal', 'legacy_collection_loader'))
-
return python_path
diff --git a/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py b/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py
deleted file mode 100644
index 6d29968b..00000000
--- a/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py
+++ /dev/null
@@ -1,389 +0,0 @@
-"""Sanity test to check integration test aliases."""
-from __future__ import annotations
-
-import json
-import textwrap
-import os
-import typing as t
-
-from . import (
- SanitySingleVersion,
- SanityMessage,
- SanityFailure,
- SanitySuccess,
- SanityTargets,
- SANITY_ROOT,
-)
-
-from ...test import (
- TestResult,
-)
-
-from ...config import (
- SanityConfig,
-)
-
-from ...target import (
- filter_targets,
- walk_posix_integration_targets,
- walk_windows_integration_targets,
- walk_integration_targets,
- walk_module_targets,
- CompletionTarget,
-)
-
-from ..integration.cloud import (
- get_cloud_platforms,
-)
-
-from ...io import (
- read_text_file,
-)
-
-from ...util import (
- display,
- raw_command,
-)
-
-from ...util_common import (
- write_json_test_results,
- ResultType,
-)
-
-from ...host_configs import (
- PythonConfig,
-)
-
-
-class IntegrationAliasesTest(SanitySingleVersion):
- """Sanity test to evaluate integration test aliases."""
- CI_YML = '.azure-pipelines/azure-pipelines.yml'
- TEST_ALIAS_PREFIX = 'shippable' # this will be changed at some point in the future
-
- DISABLED = 'disabled/'
- UNSTABLE = 'unstable/'
- UNSUPPORTED = 'unsupported/'
-
- EXPLAIN_URL = 'https://docs.ansible.com/ansible-core/devel/dev_guide/testing/sanity/integration-aliases.html'
-
- TEMPLATE_DISABLED = """
- The following integration tests are **disabled** [[explain]({explain_url}#disabled)]:
-
- {tests}
-
- Consider fixing the integration tests before or alongside changes.
- """
-
- TEMPLATE_UNSTABLE = """
- The following integration tests are **unstable** [[explain]({explain_url}#unstable)]:
-
- {tests}
-
- Tests may need to be restarted due to failures unrelated to changes.
- """
-
- TEMPLATE_UNSUPPORTED = """
- The following integration tests are **unsupported** [[explain]({explain_url}#unsupported)]:
-
- {tests}
-
- Consider running the tests manually or extending test infrastructure to add support.
- """
-
- TEMPLATE_UNTESTED = """
- The following modules have **no integration tests** [[explain]({explain_url}#untested)]:
-
- {tests}
-
- Consider adding integration tests before or alongside changes.
- """
-
- ansible_only = True
-
- def __init__(self):
- super().__init__()
-
- self._ci_config = {} # type: t.Dict[str, t.Any]
- self._ci_test_groups = {} # type: t.Dict[str, t.List[int]]
-
- @property
- def can_ignore(self): # type: () -> bool
- """True if the test supports ignore entries."""
- return False
-
- @property
- def no_targets(self): # type: () -> bool
- """True if the test does not use test targets. Mutually exclusive with all_targets."""
- return True
-
- def load_ci_config(self, python): # type: (PythonConfig) -> t.Dict[str, t.Any]
- """Load and return the CI YAML configuration."""
- if not self._ci_config:
- self._ci_config = self.load_yaml(python, self.CI_YML)
-
- return self._ci_config
-
- @property
- def ci_test_groups(self): # type: () -> t.Dict[str, t.List[int]]
- """Return a dictionary of CI test names and their group(s)."""
- if not self._ci_test_groups:
- test_groups = {}
-
- for stage in self._ci_config['stages']:
- for job in stage['jobs']:
- if job.get('template') != 'templates/matrix.yml':
- continue
-
- parameters = job['parameters']
-
- groups = parameters.get('groups', [])
- test_format = parameters.get('testFormat', '{0}')
- test_group_format = parameters.get('groupFormat', '{0}/{{1}}')
-
- for target in parameters['targets']:
- test = target.get('test') or target.get('name')
-
- if groups:
- tests_formatted = [test_group_format.format(test_format).format(test, group) for group in groups]
- else:
- tests_formatted = [test_format.format(test)]
-
- for test_formatted in tests_formatted:
- parts = test_formatted.split('/')
- key = parts[0]
-
- if key in ('sanity', 'units'):
- continue
-
- try:
- group = int(parts[-1])
- except ValueError:
- continue
-
- if group < 1 or group > 99:
- continue
-
- group_set = test_groups.setdefault(key, set())
- group_set.add(group)
-
- self._ci_test_groups = dict((key, sorted(value)) for key, value in test_groups.items())
-
- return self._ci_test_groups
-
- def format_test_group_alias(self, name, fallback=''): # type: (str, str) -> str
- """Return a test group alias using the given name and fallback."""
- group_numbers = self.ci_test_groups.get(name, None)
-
- if group_numbers:
- if min(group_numbers) != 1:
- display.warning('Min test group "%s" in %s is %d instead of 1.' % (name, self.CI_YML, min(group_numbers)), unique=True)
-
- if max(group_numbers) != len(group_numbers):
- display.warning('Max test group "%s" in %s is %d instead of %d.' % (name, self.CI_YML, max(group_numbers), len(group_numbers)), unique=True)
-
- if max(group_numbers) > 9:
- alias = '%s/%s/group(%s)/' % (self.TEST_ALIAS_PREFIX, name, '|'.join(str(i) for i in range(min(group_numbers), max(group_numbers) + 1)))
- elif len(group_numbers) > 1:
- alias = '%s/%s/group[%d-%d]/' % (self.TEST_ALIAS_PREFIX, name, min(group_numbers), max(group_numbers))
- else:
- alias = '%s/%s/group%d/' % (self.TEST_ALIAS_PREFIX, name, min(group_numbers))
- elif fallback:
- alias = '%s/%s/group%d/' % (self.TEST_ALIAS_PREFIX, fallback, 1)
- else:
- raise Exception('cannot find test group "%s" in %s' % (name, self.CI_YML))
-
- return alias
-
- def load_yaml(self, python, path): # type: (PythonConfig, str) -> t.Dict[str, t.Any]
- """Load the specified YAML file and return the contents."""
- yaml_to_json_path = os.path.join(SANITY_ROOT, self.name, 'yaml_to_json.py')
- return json.loads(raw_command([python.path, yaml_to_json_path], data=read_text_file(path), capture=True)[0])
-
- def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
- if args.explain:
- return SanitySuccess(self.name)
-
- if not os.path.isfile(self.CI_YML):
- return SanityFailure(self.name, messages=[SanityMessage(
- message='file missing',
- path=self.CI_YML,
- )])
-
- results = dict(
- comments=[],
- labels={},
- )
-
- self.load_ci_config(python)
- self.check_changes(args, results)
-
- write_json_test_results(ResultType.BOT, 'data-sanity-ci.json', results)
-
- messages = []
-
- messages += self.check_posix_targets(args)
- messages += self.check_windows_targets()
-
- if messages:
- return SanityFailure(self.name, messages=messages)
-
- return SanitySuccess(self.name)
-
- def check_posix_targets(self, args): # type: (SanityConfig) -> t.List[SanityMessage]
- """Check POSIX integration test targets and return messages with any issues found."""
- posix_targets = tuple(walk_posix_integration_targets())
-
- clouds = get_cloud_platforms(args, posix_targets)
- cloud_targets = ['cloud/%s/' % cloud for cloud in clouds]
-
- all_cloud_targets = tuple(filter_targets(posix_targets, ['cloud/'], directories=False, errors=False))
- invalid_cloud_targets = tuple(filter_targets(all_cloud_targets, cloud_targets, include=False, directories=False, errors=False))
-
- messages = []
-
- for target in invalid_cloud_targets:
- for alias in target.aliases:
- if alias.startswith('cloud/') and alias != 'cloud/':
- if any(alias.startswith(cloud_target) for cloud_target in cloud_targets):
- continue
-
- messages.append(SanityMessage('invalid alias `%s`' % alias, '%s/aliases' % target.path))
-
- messages += self.check_ci_group(
- targets=tuple(filter_targets(posix_targets, ['cloud/', '%s/generic/' % self.TEST_ALIAS_PREFIX], include=False, directories=False, errors=False)),
- find=self.format_test_group_alias('linux').replace('linux', 'posix'),
- find_incidental=['%s/posix/incidental/' % self.TEST_ALIAS_PREFIX],
- )
-
- messages += self.check_ci_group(
- targets=tuple(filter_targets(posix_targets, ['%s/generic/' % self.TEST_ALIAS_PREFIX], directories=False, errors=False)),
- find=self.format_test_group_alias('generic'),
- )
-
- for cloud in clouds:
- if cloud == 'httptester':
- find = self.format_test_group_alias('linux').replace('linux', 'posix')
- find_incidental = ['%s/posix/incidental/' % self.TEST_ALIAS_PREFIX]
- else:
- find = self.format_test_group_alias(cloud, 'generic')
- find_incidental = ['%s/%s/incidental/' % (self.TEST_ALIAS_PREFIX, cloud), '%s/cloud/incidental/' % self.TEST_ALIAS_PREFIX]
-
- messages += self.check_ci_group(
- targets=tuple(filter_targets(posix_targets, ['cloud/%s/' % cloud], directories=False, errors=False)),
- find=find,
- find_incidental=find_incidental,
- )
-
- return messages
-
- def check_windows_targets(self):
- """
- :rtype: list[SanityMessage]
- """
- windows_targets = tuple(walk_windows_integration_targets())
-
- messages = []
-
- messages += self.check_ci_group(
- targets=windows_targets,
- find=self.format_test_group_alias('windows'),
- find_incidental=['%s/windows/incidental/' % self.TEST_ALIAS_PREFIX],
- )
-
- return messages
-
- def check_ci_group(
- self,
- targets, # type: t.Tuple[CompletionTarget, ...]
- find, # type: str
- find_incidental=None, # type: t.Optional[t.List[str]]
- ): # type: (...) -> t.List[SanityMessage]
- """Check the CI groups set in the provided targets and return a list of messages with any issues found."""
- all_paths = set(target.path for target in targets)
- supported_paths = set(target.path for target in filter_targets(targets, [find], directories=False, errors=False))
- unsupported_paths = set(target.path for target in filter_targets(targets, [self.UNSUPPORTED], directories=False, errors=False))
-
- if find_incidental:
- incidental_paths = set(target.path for target in filter_targets(targets, find_incidental, directories=False, errors=False))
- else:
- incidental_paths = set()
-
- unassigned_paths = all_paths - supported_paths - unsupported_paths - incidental_paths
- conflicting_paths = supported_paths & unsupported_paths
-
- unassigned_message = 'missing alias `%s` or `%s`' % (find.strip('/'), self.UNSUPPORTED.strip('/'))
- conflicting_message = 'conflicting alias `%s` and `%s`' % (find.strip('/'), self.UNSUPPORTED.strip('/'))
-
- messages = []
-
- for path in unassigned_paths:
- messages.append(SanityMessage(unassigned_message, '%s/aliases' % path))
-
- for path in conflicting_paths:
- messages.append(SanityMessage(conflicting_message, '%s/aliases' % path))
-
- return messages
-
- def check_changes(self, args, results): # type: (SanityConfig, t.Dict[str, t.Any]) -> None
- """Check changes and store results in the provided results dictionary."""
- integration_targets = list(walk_integration_targets())
- module_targets = list(walk_module_targets())
-
- integration_targets_by_name = dict((target.name, target) for target in integration_targets)
- module_names_by_path = dict((target.path, target.module) for target in module_targets)
-
- disabled_targets = []
- unstable_targets = []
- unsupported_targets = []
-
- for command in [command for command in args.metadata.change_description.focused_command_targets if 'integration' in command]:
- for target in args.metadata.change_description.focused_command_targets[command]:
- if self.DISABLED in integration_targets_by_name[target].aliases:
- disabled_targets.append(target)
- elif self.UNSTABLE in integration_targets_by_name[target].aliases:
- unstable_targets.append(target)
- elif self.UNSUPPORTED in integration_targets_by_name[target].aliases:
- unsupported_targets.append(target)
-
- untested_modules = []
-
- for path in args.metadata.change_description.no_integration_paths:
- module = module_names_by_path.get(path)
-
- if module:
- untested_modules.append(module)
-
- comments = [
- self.format_comment(self.TEMPLATE_DISABLED, disabled_targets),
- self.format_comment(self.TEMPLATE_UNSTABLE, unstable_targets),
- self.format_comment(self.TEMPLATE_UNSUPPORTED, unsupported_targets),
- self.format_comment(self.TEMPLATE_UNTESTED, untested_modules),
- ]
-
- comments = [comment for comment in comments if comment]
-
- labels = dict(
- needs_tests=bool(untested_modules),
- disabled_tests=bool(disabled_targets),
- unstable_tests=bool(unstable_targets),
- unsupported_tests=bool(unsupported_targets),
- )
-
- results['comments'] += comments
- results['labels'].update(labels)
-
- def format_comment(self, template, targets): # type: (str, t.List[str]) -> t.Optional[str]
- """Format and return a comment based on the given template and targets, or None if there are no targets."""
- if not targets:
- return None
-
- tests = '\n'.join('- %s' % target for target in targets)
-
- data = dict(
- explain_url=self.EXPLAIN_URL,
- tests=tests,
- )
-
- message = textwrap.dedent(template).strip().format(**data)
-
- return message
diff --git a/test/lib/ansible_test/_internal/commands/sanity/mypy.py b/test/lib/ansible_test/_internal/commands/sanity/mypy.py
new file mode 100644
index 00000000..5b83aa8b
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/sanity/mypy.py
@@ -0,0 +1,250 @@
+"""Sanity test which executes mypy."""
+from __future__ import annotations
+
+import dataclasses
+import os
+import re
+import typing as t
+
+from . import (
+ SanityMultipleVersion,
+ SanityMessage,
+ SanityFailure,
+ SanitySuccess,
+ SanitySkipped,
+ SanityTargets,
+ create_sanity_virtualenv,
+)
+
+from ...constants import (
+ CONTROLLER_PYTHON_VERSIONS,
+ REMOTE_ONLY_PYTHON_VERSIONS,
+)
+
+from ...test import (
+ TestResult,
+)
+
+from ...target import (
+ TestTarget,
+)
+
+from ...util import (
+ SubprocessError,
+ display,
+ parse_to_list_of_dict,
+ ANSIBLE_TEST_CONTROLLER_ROOT,
+ ApplicationError,
+ is_subdir,
+)
+
+from ...util_common import (
+ intercept_python,
+)
+
+from ...ansible_util import (
+ ansible_environment,
+)
+
+from ...config import (
+ SanityConfig,
+)
+
+from ...host_configs import (
+ PythonConfig,
+ VirtualPythonConfig,
+)
+
+
+class MypyTest(SanityMultipleVersion):
+ """Sanity test which executes mypy."""
+ ansible_only = True
+
+ def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
+ """Return the given list of test targets, filtered to include only those relevant for the test."""
+ return [target for target in targets if os.path.splitext(target.path)[1] == '.py' and (
+ target.path.startswith('lib/ansible/') or target.path.startswith('test/lib/ansible_test/_internal/')
+ or target.path.startswith('test/lib/ansible_test/_util/target/sanity/import/'))]
+
+ @property
+ def error_code(self): # type: () -> t.Optional[str]
+ """Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
+ return 'ansible-test'
+
+ @property
+ def needs_pypi(self): # type: () -> bool
+ """True if the test requires PyPI, otherwise False."""
+ return True
+
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
+ settings = self.load_processor(args, python.version)
+
+ paths = [target.path for target in targets.include]
+
+ virtualenv_python = create_sanity_virtualenv(args, args.controller_python, self.name)
+
+ if args.prime_venvs:
+ return SanitySkipped(self.name, python_version=python.version)
+
+ if not virtualenv_python:
+ display.warning(f'Skipping sanity test "{self.name}" due to missing virtual environment support on Python {args.controller_python.version}.')
+ return SanitySkipped(self.name, python.version)
+
+ contexts = (
+ MyPyContext('ansible-test', ['test/lib/ansible_test/_util/target/sanity/import/'], CONTROLLER_PYTHON_VERSIONS),
+ MyPyContext('ansible-test', ['test/lib/ansible_test/_internal/'], CONTROLLER_PYTHON_VERSIONS),
+ MyPyContext('ansible-core', ['lib/ansible/'], CONTROLLER_PYTHON_VERSIONS),
+ MyPyContext('modules', ['lib/ansible/modules/', 'lib/ansible/module_utils/'], REMOTE_ONLY_PYTHON_VERSIONS),
+ )
+
+ unfiltered_messages = [] # type: t.List[SanityMessage]
+
+ for context in contexts:
+ if python.version not in context.python_versions:
+ continue
+
+ unfiltered_messages.extend(self.test_context(args, virtualenv_python, python, context, paths))
+
+ notices = []
+ messages = []
+
+ for message in unfiltered_messages:
+ if message.level != 'error':
+ notices.append(message)
+ continue
+
+ match = re.search(r'^(?P<message>.*) {2}\[(?P<code>.*)]$', message.message)
+
+ messages.append(SanityMessage(
+ message=match.group('message'),
+ path=message.path,
+ line=message.line,
+ column=message.column,
+ level=message.level,
+ code=match.group('code'),
+ ))
+
+ for notice in notices:
+ display.info(notice.format(), verbosity=3)
+
+ # The following error codes from mypy indicate that results are incomplete.
+ # That prevents the test from completing successfully, just as if mypy were to traceback or generate unexpected output.
+ fatal_error_codes = {
+ 'import',
+ 'syntax',
+ }
+
+ fatal_errors = [message for message in messages if message.code in fatal_error_codes]
+
+ if fatal_errors:
+ error_message = '\n'.join(error.format() for error in fatal_errors)
+ raise ApplicationError(f'Encountered {len(fatal_errors)} fatal errors reported by mypy:\n{error_message}')
+
+ paths_set = set(paths)
+
+ # Only report messages for paths that were specified as targets.
+ # Imports in our code are followed by mypy in order to perform its analysis, which is important for accurate results.
+ # However, it will also report issues on those files, which is not the desired behavior.
+ messages = [message for message in messages if message.path in paths_set]
+
+ results = settings.process_errors(messages, paths)
+
+ if results:
+ return SanityFailure(self.name, messages=results, python_version=python.version)
+
+ return SanitySuccess(self.name, python_version=python.version)
+
+ @staticmethod
+ def test_context(
+ args, # type: SanityConfig
+ virtualenv_python, # type: VirtualPythonConfig
+ python, # type: PythonConfig
+ context, # type: MyPyContext
+ paths, # type: t.List[str]
+ ): # type: (...) -> t.List[SanityMessage]
+ """Run mypy tests for the specified context."""
+ context_paths = [path for path in paths if any(is_subdir(path, match_path) for match_path in context.paths)]
+
+ if not context_paths:
+ return []
+
+ config_path = os.path.join(ANSIBLE_TEST_CONTROLLER_ROOT, 'sanity', 'mypy', f'{context.name}.ini')
+
+ display.info(f'Checking context "{context.name}"', verbosity=1)
+
+ env = ansible_environment(args, color=False)
+ env['MYPYPATH'] = env['PYTHONPATH']
+
+ # The --no-site-packages option should not be used, as it will prevent loading of type stubs from the sanity test virtual environment.
+
+ # Enabling the --warn-unused-configs option would help keep the config files clean.
+ # However, the option can only be used when all files in tested contexts are evaluated.
+ # Unfortunately sanity tests have no way of making that determination currently.
+ # The option is also incompatible with incremental mode and caching.
+
+ cmd = [
+ # Below are arguments common to all contexts.
+ # They are kept here to avoid repetition in each config file.
+ virtualenv_python.path,
+ '-m', 'mypy',
+ '--show-column-numbers',
+ '--show-error-codes',
+ '--no-error-summary',
+ # This is a fairly common pattern in our code, so we'll allow it.
+ '--allow-redefinition',
+ # Since we specify the path(s) to test, it's important that mypy is configured to use the default behavior of following imports.
+ '--follow-imports', 'normal',
+ # Incremental results and caching do not provide significant performance benefits.
+ # It also prevents the use of the --warn-unused-configs option.
+ '--no-incremental',
+ '--cache-dir', '/dev/null',
+ # The platform is specified here so that results are consistent regardless of what platform the tests are run from.
+ # In the future, if testing of other platforms is desired, the platform should become part of the test specification, just like the Python version.
+ '--platform', 'linux',
+ # Despite what the documentation [1] states, the --python-version option does not cause mypy to search for a corresponding Python executable.
+ # It will instead use the Python executable that is used to run mypy itself.
+ # The --python-executable option can be used to specify the Python executable, with the default being the executable used to run mypy.
+ # As a precaution, that option is used in case the behavior of mypy is updated in the future to match the documentation.
+ # That should help guarantee that the Python executable providing type hints is the one used to run mypy.
+ # [1] https://mypy.readthedocs.io/en/stable/command_line.html#cmdoption-mypy-python-version
+ '--python-executable', virtualenv_python.path,
+ '--python-version', python.version,
+ # Below are context specific arguments.
+ # They are primarily useful for listing individual 'ignore_missing_imports' entries instead of using a global ignore.
+ '--config-file', config_path,
+ ]
+
+ cmd.extend(context_paths)
+
+ try:
+ stdout, stderr = intercept_python(args, virtualenv_python, cmd, env, capture=True)
+
+ if stdout or stderr:
+ raise SubprocessError(cmd, stdout=stdout, stderr=stderr)
+ except SubprocessError as ex:
+ if ex.status != 1 or ex.stderr or not ex.stdout:
+ raise
+
+ stdout = ex.stdout
+
+ pattern = r'^(?P<path>[^:]*):(?P<line>[0-9]+):((?P<column>[0-9]+):)? (?P<level>[^:]+): (?P<message>.*)$'
+
+ parsed = parse_to_list_of_dict(pattern, stdout)
+
+ messages = [SanityMessage(
+ level=r['level'],
+ message=r['message'],
+ path=r['path'],
+ line=int(r['line']),
+ column=int(r.get('column') or '0'),
+ ) for r in parsed]
+
+ return messages
+
+
+@dataclasses.dataclass(frozen=True)
+class MyPyContext:
+ """Context details for a single run of mypy."""
+ name: str
+ paths: t.List[str]
+ python_versions: t.Tuple[str, ...]
diff --git a/test/lib/ansible_test/_internal/commands/sanity/pep8.py b/test/lib/ansible_test/_internal/commands/sanity/pep8.py
index 71241c91..2610e730 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/pep8.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/pep8.py
@@ -92,7 +92,7 @@ class Pep8Test(SanitySingleVersion):
else:
results = []
- results = [SanityMessage(
+ messages = [SanityMessage(
message=r['message'],
path=r['path'],
line=int(r['line']),
@@ -101,7 +101,7 @@ class Pep8Test(SanitySingleVersion):
code=r['code'],
) for r in results]
- errors = settings.process_errors(results, paths)
+ errors = settings.process_errors(messages, paths)
if errors:
return SanityFailure(self.name, messages=errors)
diff --git a/test/lib/ansible_test/_internal/commands/sanity/pylint.py b/test/lib/ansible_test/_internal/commands/sanity/pylint.py
index 5bd17878..0e6ace8e 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/pylint.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/pylint.py
@@ -128,7 +128,6 @@ class PylintTest(SanitySingleVersion):
add_context(remaining_paths, 'validate-modules', filter_path('test/lib/ansible_test/_util/controller/sanity/validate-modules/'))
add_context(remaining_paths, 'validate-modules-unit', filter_path('test/lib/ansible_test/tests/validate-modules-unit/'))
add_context(remaining_paths, 'code-smell', filter_path('test/lib/ansible_test/_util/controller/sanity/code-smell/'))
- add_context(remaining_paths, 'legacy-collection-loader', filter_path('test/lib/ansible_test/_util/target/legacy_collection_loader/'))
add_context(remaining_paths, 'ansible-test-target', filter_path('test/lib/ansible_test/_util/target/'))
add_context(remaining_paths, 'ansible-test', filter_path('test/lib/'))
add_context(remaining_paths, 'test', filter_path('test/'))
diff --git a/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py b/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py
index 0eccc01f..e0fbac64 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py
@@ -1,11 +1,13 @@
"""Sanity test using validate-modules."""
from __future__ import annotations
+import collections
import json
import os
import typing as t
from . import (
+ DOCUMENTABLE_PLUGINS,
SanitySingleVersion,
SanityMessage,
SanityFailure,
@@ -64,28 +66,56 @@ class ValidateModulesTest(SanitySingleVersion):
'deprecated-date',
])
+ self._prefixes = {
+ plugin_type: plugin_path + '/'
+ for plugin_type, plugin_path in data_context().content.plugin_paths.items()
+ if plugin_type in DOCUMENTABLE_PLUGINS
+ }
+
+ self._exclusions = set()
+
+ if not data_context().content.collection:
+ self._exclusions.add('lib/ansible/plugins/cache/base.py')
+
@property
def error_code(self): # type: () -> t.Optional[str]
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'A100'
+ def get_plugin_type(self, target): # type: (TestTarget) -> t.Optional[str]
+ """Return the plugin type of the given target, or None if it is not a plugin or module."""
+ if target.path.endswith('/__init__.py'):
+ return None
+
+ if target.path in self._exclusions:
+ return None
+
+ for plugin_type, prefix in self._prefixes.items():
+ if target.path.startswith(prefix):
+ return plugin_type
+
+ return None
+
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test."""
- return [target for target in targets if target.module]
+ return [target for target in targets if self.get_plugin_type(target) is not None]
def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
env = ansible_environment(args, color=False)
settings = self.load_processor(args)
- paths = [target.path for target in targets.include]
+ target_per_type = collections.defaultdict(list)
+
+ for target in targets.include:
+ target_per_type[self.get_plugin_type(target)].append(target)
cmd = [
python.path,
- os.path.join(SANITY_ROOT, 'validate-modules', 'validate-modules'),
+ os.path.join(SANITY_ROOT, 'validate-modules', 'validate.py'),
'--format', 'json',
'--arg-spec',
- ] + paths
+ ]
if data_context().content.collection:
cmd.extend(['--collection', data_context().content.collection.directory])
@@ -109,39 +139,52 @@ class ValidateModulesTest(SanitySingleVersion):
else:
display.warning('Cannot perform module comparison against the base branch because the base branch was not detected.')
- try:
- stdout, stderr = run_command(args, cmd, env=env, capture=True)
- status = 0
- except SubprocessError as ex:
- stdout = ex.stdout
- stderr = ex.stderr
- status = ex.status
+ errors = []
- if stderr or status not in (0, 3):
- raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
+ for plugin_type, plugin_targets in sorted(target_per_type.items()):
+ paths = [target.path for target in plugin_targets]
+ plugin_cmd = list(cmd)
- if args.explain:
- return SanitySuccess(self.name)
+ if plugin_type != 'modules':
+ plugin_cmd += ['--plugin-type', plugin_type]
- messages = json.loads(stdout)
+ plugin_cmd += paths
- errors = []
+ try:
+ stdout, stderr = run_command(args, plugin_cmd, env=env, capture=True)
+ status = 0
+ except SubprocessError as ex:
+ stdout = ex.stdout
+ stderr = ex.stderr
+ status = ex.status
+
+ if stderr or status not in (0, 3):
+ raise SubprocessError(cmd=plugin_cmd, status=status, stderr=stderr, stdout=stdout)
+
+ if args.explain:
+ continue
- for filename in messages:
- output = messages[filename]
+ messages = json.loads(stdout)
- for item in output['errors']:
- errors.append(SanityMessage(
- path=filename,
- line=int(item['line']) if 'line' in item else 0,
- column=int(item['column']) if 'column' in item else 0,
- code='%s' % item['code'],
- message=item['msg'],
- ))
+ for filename in messages:
+ output = messages[filename]
- errors = settings.process_errors(errors, paths)
+ for item in output['errors']:
+ errors.append(SanityMessage(
+ path=filename,
+ line=int(item['line']) if 'line' in item else 0,
+ column=int(item['column']) if 'column' in item else 0,
+ code='%s' % item['code'],
+ message=item['msg'],
+ ))
+
+ all_paths = [target.path for target in targets.include]
+ all_errors = settings.process_errors(errors, all_paths)
+
+ if args.explain:
+ return SanitySuccess(self.name)
- if errors:
- return SanityFailure(self.name, messages=errors)
+ if all_errors:
+ return SanityFailure(self.name, messages=all_errors)
return SanitySuccess(self.name)
diff --git a/test/lib/ansible_test/_internal/commands/shell/__init__.py b/test/lib/ansible_test/_internal/commands/shell/__init__.py
index 7364819e..4b205171 100644
--- a/test/lib/ansible_test/_internal/commands/shell/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/shell/__init__.py
@@ -18,6 +18,7 @@ from ...executor import (
)
from ...connections import (
+ Connection,
LocalConnection,
SshConnection,
)
@@ -55,13 +56,13 @@ def command_shell(args): # type: (ShellConfig) -> None
if isinstance(target_profile, ControllerProfile):
# run the shell locally unless a target was requested
- con = LocalConnection(args)
+ con = LocalConnection(args) # type: Connection
else:
# a target was requested, connect to it over SSH
con = target_profile.get_controller_target_connections()[0]
if isinstance(con, SshConnection) and args.raw:
- cmd = []
+ cmd = [] # type: t.List[str]
elif isinstance(target_profile, PosixProfile):
cmd = []
diff --git a/test/lib/ansible_test/_internal/commands/units/__init__.py b/test/lib/ansible_test/_internal/commands/units/__init__.py
index 9a2b1827..02fae8dd 100644
--- a/test/lib/ansible_test/_internal/commands/units/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/units/__init__.py
@@ -245,18 +245,13 @@ def command_units(args): # type: (UnitsConfig) -> None
'-p', 'no:cacheprovider',
'-c', os.path.join(ANSIBLE_TEST_DATA_ROOT, 'pytest.ini'),
'--junit-xml', os.path.join(ResultType.JUNIT.path, 'python%s-%s-units.xml' % (python.version, test_context)),
+ '--strict-markers', # added in pytest 4.5.0
+ '--rootdir', data_context().content.root,
]
if not data_context().content.collection:
cmd.append('--durations=25')
- if python.version == '2.6':
- # same as --strict-markers in older versions of pytest which still support python 2.6
- cmd.append('--strict')
- else:
- # added in pytest 4.5.0, which requires python 2.7+
- cmd.append('--strict-markers')
-
plugins = []
if args.coverage:
@@ -296,9 +291,9 @@ def get_units_ansible_python_path(args, test_context): # type: (UnitsConfig, st
return get_ansible_python_path(args)
try:
- cache = get_units_ansible_python_path.cache
+ cache = get_units_ansible_python_path.cache # type: ignore[attr-defined]
except AttributeError:
- cache = get_units_ansible_python_path.cache = {}
+ cache = get_units_ansible_python_path.cache = {} # type: ignore[attr-defined]
python_path = cache.get(test_context)
@@ -324,7 +319,6 @@ def get_units_ansible_python_path(args, test_context): # type: (UnitsConfig, st
# legacy collection loader required by all python versions not supported by the controller
write_text_file(os.path.join(ansible_test_path, '__init__.py'), '', True)
write_text_file(os.path.join(ansible_test_path, '_internal', '__init__.py'), '', True)
- os.symlink(os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'legacy_collection_loader'), os.path.join(ansible_test_path, '_internal', 'legacy_collection_loader'))
elif test_context == TestContext.modules:
# only non-collection ansible module tests should have access to ansible built-in modules
os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'modules'), os.path.join(ansible_path, 'modules'))
diff --git a/test/lib/ansible_test/_internal/compat/packaging.py b/test/lib/ansible_test/_internal/compat/packaging.py
index a38e1abc..44c2bdbb 100644
--- a/test/lib/ansible_test/_internal/compat/packaging.py
+++ b/test/lib/ansible_test/_internal/compat/packaging.py
@@ -1,14 +1,16 @@
"""Packaging compatibility."""
from __future__ import annotations
+import typing as t
+
try:
from packaging import (
specifiers,
version,
)
- SpecifierSet = specifiers.SpecifierSet
- Version = version.Version
+ SpecifierSet = specifiers.SpecifierSet # type: t.Optional[t.Type[specifiers.SpecifierSet]]
+ Version = version.Version # type: t.Optional[t.Type[version.Version]]
PACKAGING_IMPORT_ERROR = None
except ImportError as ex:
SpecifierSet = None # pylint: disable=invalid-name
diff --git a/test/lib/ansible_test/_internal/compat/yaml.py b/test/lib/ansible_test/_internal/compat/yaml.py
index daa5ef0e..e4dbb651 100644
--- a/test/lib/ansible_test/_internal/compat/yaml.py
+++ b/test/lib/ansible_test/_internal/compat/yaml.py
@@ -1,6 +1,8 @@
"""PyYAML compatibility."""
from __future__ import annotations
+import typing as t
+
from functools import (
partial,
)
@@ -13,7 +15,7 @@ except ImportError as ex:
YAML_IMPORT_ERROR = ex
else:
try:
- _SafeLoader = _yaml.CSafeLoader
+ _SafeLoader = _yaml.CSafeLoader # type: t.Union[t.Type[_yaml.CSafeLoader], t.Type[_yaml.SafeLoader]]
except AttributeError:
_SafeLoader = _yaml.SafeLoader
diff --git a/test/lib/ansible_test/_internal/completion.py b/test/lib/ansible_test/_internal/completion.py
index 86674cb2..7aee99ed 100644
--- a/test/lib/ansible_test/_internal/completion.py
+++ b/test/lib/ansible_test/_internal/completion.py
@@ -211,9 +211,9 @@ def filter_completion(
controller_only=False, # type: bool
include_defaults=False, # type: bool
): # type: (...) -> t.Dict[str, TCompletionConfig]
- """Return a the given completion dictionary, filtering out configs which do not support the controller if controller_only is specified."""
+ """Return the given completion dictionary, filtering out configs which do not support the controller if controller_only is specified."""
if controller_only:
- completion = {name: config for name, config in completion.items() if config.controller_supported}
+ completion = {name: config for name, config in completion.items() if isinstance(config, PosixCompletionConfig) and config.controller_supported}
if not include_defaults:
completion = {name: config for name, config in completion.items() if not config.is_default}
diff --git a/test/lib/ansible_test/_internal/config.py b/test/lib/ansible_test/_internal/config.py
index 5b276eb1..0a14a806 100644
--- a/test/lib/ansible_test/_internal/config.py
+++ b/test/lib/ansible_test/_internal/config.py
@@ -10,6 +10,7 @@ from .util import (
display,
verify_sys_executable,
version_to_str,
+ type_guard,
)
from .util_common import (
@@ -96,7 +97,7 @@ class EnvironmentConfig(CommonConfig):
not isinstance(self.controller, OriginConfig)
or isinstance(self.controller.python, VirtualPythonConfig)
or self.controller.python.version != version_to_str(sys.version_info[:2])
- or verify_sys_executable(self.controller.python.path)
+ or bool(verify_sys_executable(self.controller.python.path))
)
self.docker_network = args.docker_network # type: t.Optional[str]
@@ -161,16 +162,14 @@ class EnvironmentConfig(CommonConfig):
def only_targets(self, target_type): # type: (t.Type[THostConfig]) -> t.List[THostConfig]
"""
Return a list of target host configurations.
- Requires that there are one or more targets, all of the specified type.
+ Requires that there are one or more targets, all the specified type.
"""
if not self.targets:
raise Exception('There must be one or more targets.')
- for target in self.targets:
- if not isinstance(target, target_type):
- raise Exception(f'Target is {type(target_type)} instead of {target_type}.')
+ assert type_guard(self.targets, target_type)
- return self.targets
+ return t.cast(t.List[THostConfig], self.targets)
@property
def target_type(self): # type: () -> t.Type[HostConfig]
@@ -218,7 +217,7 @@ class TestConfig(EnvironmentConfig):
self.failure_ok = getattr(args, 'failure_ok', False) # type: bool
self.metadata = Metadata.from_file(args.metadata) if args.metadata else Metadata()
- self.metadata_path = None
+ self.metadata_path = None # type: t.Optional[str]
if self.coverage_check:
self.coverage = True
diff --git a/test/lib/ansible_test/_internal/connections.py b/test/lib/ansible_test/_internal/connections.py
index ddf4e8df..14234b2d 100644
--- a/test/lib/ansible_test/_internal/connections.py
+++ b/test/lib/ansible_test/_internal/connections.py
@@ -58,8 +58,6 @@ class Connection(metaclass=abc.ABCMeta):
src, # type: t.IO[bytes]
):
"""Extract the given archive file stream in the specified directory."""
- # This will not work on AIX.
- # However, AIX isn't supported as a controller, which is where this would be needed.
tar_cmd = ['tar', 'oxzf', '-', '-C', chdir]
retry(lambda: self.run(tar_cmd, stdin=src))
@@ -75,13 +73,11 @@ class Connection(metaclass=abc.ABCMeta):
gzip_cmd = ['gzip']
if exclude:
- # This will not work on AIX.
- # However, AIX isn't supported as a controller, which is where this would be needed.
tar_cmd += ['--exclude', exclude]
tar_cmd.append(name)
- # Using gzip to compress the archive allows this to work on all POSIX systems we support, including AIX.
+ # Using gzip to compress the archive allows this to work on all POSIX systems we support.
commands = [tar_cmd, gzip_cmd]
sh_cmd = ['sh', '-c', ' | '.join(' '.join(shlex.quote(cmd) for cmd in command) for command in commands)]
diff --git a/test/lib/ansible_test/_internal/constants.py b/test/lib/ansible_test/_internal/constants.py
index cac72408..84667851 100644
--- a/test/lib/ansible_test/_internal/constants.py
+++ b/test/lib/ansible_test/_internal/constants.py
@@ -35,15 +35,15 @@ SECCOMP_CHOICES = [
# It is necessary for payload creation to reconstruct the bin directory when running ansible-test from an installed version of ansible.
# It is also used to construct the injector directory at runtime.
ANSIBLE_BIN_SYMLINK_MAP = {
- 'ansible': '../lib/ansible/cli/scripts/ansible_cli_stub.py',
- 'ansible-config': 'ansible',
+ 'ansible': '../lib/ansible/cli/adhoc.py',
+ 'ansible-config': '../lib/ansible/cli/config.py',
'ansible-connection': '../lib/ansible/cli/scripts/ansible_connection_cli_stub.py',
- 'ansible-console': 'ansible',
- 'ansible-doc': 'ansible',
- 'ansible-galaxy': 'ansible',
- 'ansible-inventory': 'ansible',
- 'ansible-playbook': 'ansible',
- 'ansible-pull': 'ansible',
+ 'ansible-console': '../lib/ansible/cli/console.py',
+ 'ansible-doc': '../lib/ansible/cli/doc.py',
+ 'ansible-galaxy': '../lib/ansible/cli/galaxy.py',
+ 'ansible-inventory': '../lib/ansible/cli/inventory.py',
+ 'ansible-playbook': '../lib/ansible/cli/playbook.py',
+ 'ansible-pull': '../lib/ansible/cli/pull.py',
'ansible-test': '../test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py',
- 'ansible-vault': 'ansible',
+ 'ansible-vault': '../lib/ansible/cli/vault.py',
}
diff --git a/test/lib/ansible_test/_internal/containers.py b/test/lib/ansible_test/_internal/containers.py
index 7ffbfb4c..5e29c6ac 100644
--- a/test/lib/ansible_test/_internal/containers.py
+++ b/test/lib/ansible_test/_internal/containers.py
@@ -15,7 +15,6 @@ from .util import (
ApplicationError,
SubprocessError,
display,
- get_host_ip,
sanitize_host_name,
)
@@ -43,6 +42,7 @@ from .docker_util import (
docker_start,
get_docker_container_id,
get_docker_host_ip,
+ get_podman_host_ip,
require_docker,
)
@@ -103,7 +103,7 @@ def run_support_container(
args, # type: EnvironmentConfig
context, # type: str
image, # type: str
- name, # type: name
+ name, # type: str
ports, # type: t.List[int]
aliases=None, # type: t.Optional[t.List[str]]
start=True, # type: bool
@@ -223,7 +223,7 @@ def run_support_container(
def get_container_database(args): # type: (EnvironmentConfig) -> ContainerDatabase
"""Return the current container database, creating it as needed, or returning the one provided on the command line through delegation."""
try:
- return get_container_database.database
+ return get_container_database.database # type: ignore[attr-defined]
except AttributeError:
pass
@@ -236,9 +236,9 @@ def get_container_database(args): # type: (EnvironmentConfig) -> ContainerDatab
display.info('>>> Container Database\n%s' % json.dumps(database.to_dict(), indent=4, sort_keys=True), verbosity=3)
- get_container_database.database = database
+ get_container_database.database = database # type: ignore[attr-defined]
- return get_container_database.database
+ return database
class ContainerAccess:
@@ -286,7 +286,7 @@ class ContainerAccess:
def to_dict(self): # type: () -> t.Dict[str, t.Any]
"""Return a dict of the current instance."""
- value = dict(
+ value: t.Dict[str, t.Any] = dict(
host_ip=self.host_ip,
names=self.names,
)
@@ -350,8 +350,12 @@ def create_container_database(args): # type: (EnvironmentConfig) -> ContainerDa
for name, container in support_containers.items():
if container.details.published_ports:
+ if require_docker().command == 'podman':
+ host_ip_func = get_podman_host_ip
+ else:
+ host_ip_func = get_docker_host_ip
published_access = ContainerAccess(
- host_ip=get_docker_host_ip(),
+ host_ip=host_ip_func(),
names=container.aliases,
ports=None,
forwards=dict((port, published_port) for port, published_port in container.details.published_ports.items()),
@@ -370,7 +374,7 @@ def create_container_database(args): # type: (EnvironmentConfig) -> ContainerDa
elif require_docker().command == 'podman':
# published ports for rootless podman containers should be accessible from the host's IP
container_access = ContainerAccess(
- host_ip=get_host_ip(),
+ host_ip=get_podman_host_ip(),
names=container.aliases,
ports=None,
forwards=dict((port, published_port) for port, published_port in container.details.published_ports.items()),
@@ -457,7 +461,7 @@ class SupportContainerContext:
def support_container_context(
args, # type: EnvironmentConfig
ssh, # type: t.Optional[SshConnectionDetail]
-): # type: (...) -> t.Optional[ContainerDatabase]
+): # type: (...) -> t.Iterator[t.Optional[ContainerDatabase]]
"""Create a context manager for integration tests that use support containers."""
if not isinstance(args, (IntegrationConfig, UnitsConfig, SanityConfig, ShellConfig)):
yield None # containers are only needed for commands that have targets (hosts or pythons)
@@ -514,7 +518,7 @@ def create_support_container_context(
try:
port_forwards = process.collect_port_forwards()
- contexts = {}
+ contexts = {} # type: t.Dict[str, t.Dict[str, ContainerAccess]]
for forward, forwarded_port in port_forwards.items():
access_host, access_port = forward
@@ -702,8 +706,8 @@ def create_container_hooks(
else:
managed_type = 'posix'
- control_state = {}
- managed_state = {}
+ control_state = {} # type: t.Dict[str, t.Tuple[t.List[str], t.List[SshProcess]]]
+ managed_state = {} # type: t.Dict[str, t.Tuple[t.List[str], t.List[SshProcess]]]
def pre_target(target):
"""Configure hosts for SSH port forwarding required by the specified target."""
@@ -722,7 +726,7 @@ def create_container_hooks(
def create_managed_contexts(control_contexts): # type: (t.Dict[str, t.Dict[str, ContainerAccess]]) -> t.Dict[str, t.Dict[str, ContainerAccess]]
"""Create managed contexts from the given control contexts."""
- managed_contexts = {}
+ managed_contexts = {} # type: t.Dict[str, t.Dict[str, ContainerAccess]]
for context_name, control_context in control_contexts.items():
managed_context = managed_contexts[context_name] = {}
@@ -789,7 +793,7 @@ def forward_ssh_ports(
hosts_entries = create_hosts_entries(test_context)
inventory = generate_ssh_inventory(ssh_connections)
- with named_temporary_file(args, 'ssh-inventory-', '.json', None, inventory) as inventory_path:
+ with named_temporary_file(args, 'ssh-inventory-', '.json', None, inventory) as inventory_path: # type: str
run_playbook(args, inventory_path, playbook, dict(hosts_entries=hosts_entries))
ssh_processes = [] # type: t.List[SshProcess]
@@ -822,7 +826,7 @@ def cleanup_ssh_ports(
inventory = generate_ssh_inventory(ssh_connections)
- with named_temporary_file(args, 'ssh-inventory-', '.json', None, inventory) as inventory_path:
+ with named_temporary_file(args, 'ssh-inventory-', '.json', None, inventory) as inventory_path: # type: str
run_playbook(args, inventory_path, playbook, dict(hosts_entries=hosts_entries))
if ssh_processes:
diff --git a/test/lib/ansible_test/_internal/core_ci.py b/test/lib/ansible_test/_internal/core_ci.py
index 023b5655..dbb428ae 100644
--- a/test/lib/ansible_test/_internal/core_ci.py
+++ b/test/lib/ansible_test/_internal/core_ci.py
@@ -107,7 +107,7 @@ class AnsibleCoreCI:
self._clear()
if self.instance_id:
- self.started = True
+ self.started = True # type: bool
else:
self.started = False
self.instance_id = str(uuid.uuid4())
diff --git a/test/lib/ansible_test/_internal/coverage_util.py b/test/lib/ansible_test/_internal/coverage_util.py
index e705db76..5c489a02 100644
--- a/test/lib/ansible_test/_internal/coverage_util.py
+++ b/test/lib/ansible_test/_internal/coverage_util.py
@@ -110,7 +110,7 @@ def get_coverage_environment(
def get_coverage_config(args): # type: (TestConfig) -> str
"""Return the path to the coverage config, creating the config if it does not already exist."""
try:
- return get_coverage_config.path
+ return get_coverage_config.path # type: ignore[attr-defined]
except AttributeError:
pass
@@ -122,11 +122,13 @@ def get_coverage_config(args): # type: (TestConfig) -> str
temp_dir = tempfile.mkdtemp()
atexit.register(lambda: remove_tree(temp_dir))
- path = get_coverage_config.path = os.path.join(temp_dir, COVERAGE_CONFIG_NAME)
+ path = os.path.join(temp_dir, COVERAGE_CONFIG_NAME)
if not args.explain:
write_text_file(path, coverage_config)
+ get_coverage_config.path = path # type: ignore[attr-defined]
+
return path
diff --git a/test/lib/ansible_test/_internal/data.py b/test/lib/ansible_test/_internal/data.py
index c3b2187c..42fa5a2a 100644
--- a/test/lib/ansible_test/_internal/data.py
+++ b/test/lib/ansible_test/_internal/data.py
@@ -9,6 +9,7 @@ from .util import (
ApplicationError,
import_plugins,
is_subdir,
+ is_valid_identifier,
ANSIBLE_LIB_ROOT,
ANSIBLE_TEST_ROOT,
ANSIBLE_SOURCE_ROOT,
@@ -34,11 +35,19 @@ from .provider.source.installed import (
InstalledSource,
)
+from .provider.source.unsupported import (
+ UnsupportedSource,
+)
+
from .provider.layout import (
ContentLayout,
LayoutProvider,
)
+from .provider.layout.unsupported import (
+ UnsupportedLayout,
+)
+
class DataContext:
"""Data context providing details about the current execution environment for ansible-test."""
@@ -109,14 +118,20 @@ class DataContext:
walk, # type: bool
): # type: (...) -> ContentLayout
"""Create a content layout using the given providers and root path."""
- layout_provider = find_path_provider(LayoutProvider, layout_providers, root, walk)
+ try:
+ layout_provider = find_path_provider(LayoutProvider, layout_providers, root, walk)
+ except ProviderNotFoundForPath:
+ layout_provider = UnsupportedLayout(root)
try:
# Begin the search for the source provider at the layout provider root.
# This intentionally ignores version control within subdirectories of the layout root, a condition which was previously an error.
# Doing so allows support for older git versions for which it is difficult to distinguish between a super project and a sub project.
# It also provides a better user experience, since the solution for the user would effectively be the same -- to remove the nested version control.
- source_provider = find_path_provider(SourceProvider, source_providers, layout_provider.root, walk)
+ if isinstance(layout_provider, UnsupportedLayout):
+ source_provider = UnsupportedSource(layout_provider.root) # type: SourceProvider
+ else:
+ source_provider = find_path_provider(SourceProvider, source_providers, layout_provider.root, walk)
except ProviderNotFoundForPath:
source_provider = UnversionedSource(layout_provider.root)
@@ -161,6 +176,48 @@ class DataContext:
"""Register the given payload callback."""
self.payload_callbacks.append(callback)
+ def check_layout(self) -> None:
+ """Report an error if the layout is unsupported."""
+ if self.content.unsupported:
+ raise ApplicationError(self.explain_working_directory())
+
+ def explain_working_directory(self) -> str:
+ """Return a message explaining the working directory requirements."""
+ blocks = [
+ 'The current working directory must be within the source tree being tested.',
+ '',
+ ]
+
+ if ANSIBLE_SOURCE_ROOT:
+ blocks.append(f'Testing Ansible: {ANSIBLE_SOURCE_ROOT}/')
+ blocks.append('')
+
+ cwd = os.getcwd()
+
+ blocks.append('Testing an Ansible collection: {...}/ansible_collections/{namespace}/{collection}/')
+ blocks.append('Example #1: community.general -> ~/code/ansible_collections/community/general/')
+ blocks.append('Example #2: ansible.util -> ~/.ansible/collections/ansible_collections/ansible/util/')
+ blocks.append('')
+ blocks.append(f'Current working directory: {cwd}/')
+
+ if os.path.basename(os.path.dirname(cwd)) == 'ansible_collections':
+ blocks.append(f'Expected parent directory: {os.path.dirname(cwd)}/{{namespace}}/{{collection}}/')
+ elif os.path.basename(cwd) == 'ansible_collections':
+ blocks.append(f'Expected parent directory: {cwd}/{{namespace}}/{{collection}}/')
+ elif 'ansible_collections' not in cwd.split(os.path.sep):
+ blocks.append('No "ansible_collections" parent directory was found.')
+
+ if self.content.collection:
+ if not is_valid_identifier(self.content.collection.namespace):
+ blocks.append(f'The namespace "{self.content.collection.namespace}" is an invalid identifier or a reserved keyword.')
+
+ if not is_valid_identifier(self.content.collection.name):
+ blocks.append(f'The name "{self.content.collection.name}" is an invalid identifier or a reserved keyword.')
+
+ message = '\n'.join(blocks)
+
+ return message
+
@cache
def data_context(): # type: () -> DataContext
@@ -173,21 +230,7 @@ def data_context(): # type: () -> DataContext
for provider_type in provider_types:
import_plugins('provider/%s' % provider_type)
- try:
- context = DataContext()
- except ProviderNotFoundForPath:
- options = [
- ' - an Ansible collection: {...}/ansible_collections/{namespace}/{collection}/',
- ]
-
- if ANSIBLE_SOURCE_ROOT:
- options.insert(0, ' - the Ansible source: %s/' % ANSIBLE_SOURCE_ROOT)
-
- raise ApplicationError('''The current working directory must be at or below:
-
-%s
-
-Current working directory: %s''' % ('\n'.join(options), os.getcwd()))
+ context = DataContext()
return context
diff --git a/test/lib/ansible_test/_internal/delegation.py b/test/lib/ansible_test/_internal/delegation.py
index 83ff24c0..6298bf24 100644
--- a/test/lib/ansible_test/_internal/delegation.py
+++ b/test/lib/ansible_test/_internal/delegation.py
@@ -12,6 +12,7 @@ from .io import (
)
from .config import (
+ CommonConfig,
EnvironmentConfig,
IntegrationConfig,
SanityConfig,
@@ -36,6 +37,7 @@ from .util_common import (
from .containers import (
support_container_context,
+ ContainerDatabase,
)
from .data import (
@@ -68,7 +70,7 @@ from .provisioning import (
@contextlib.contextmanager
-def delegation_context(args, host_state): # type: (EnvironmentConfig, HostState) -> None
+def delegation_context(args, host_state): # type: (EnvironmentConfig, HostState) -> t.Iterator[None]
"""Context manager for serialized host state during delegation."""
make_dirs(ResultType.TMP.path)
@@ -88,8 +90,10 @@ def delegation_context(args, host_state): # type: (EnvironmentConfig, HostState
args.host_path = None
-def delegate(args, host_state, exclude, require): # type: (EnvironmentConfig, HostState, t.List[str], t.List[str]) -> None
+def delegate(args, host_state, exclude, require): # type: (CommonConfig, HostState, t.List[str], t.List[str]) -> None
"""Delegate execution of ansible-test to another environment."""
+ assert isinstance(args, EnvironmentConfig)
+
with delegation_context(args, host_state):
if isinstance(args, TestConfig):
args.metadata.ci_provider = get_ci_provider().code
@@ -142,7 +146,7 @@ def delegate_command(args, host_state, exclude, require): # type: (EnvironmentC
if not args.allow_destructive:
options.append('--allow-destructive')
- with support_container_context(args, ssh) as containers:
+ with support_container_context(args, ssh) as containers: # type: t.Optional[ContainerDatabase]
if containers:
options.extend(['--containers', json.dumps(containers.to_dict())])
@@ -168,7 +172,14 @@ def delegate_command(args, host_state, exclude, require): # type: (EnvironmentC
if networks is not None:
for network in networks:
- con.disconnect_network(network)
+ try:
+ con.disconnect_network(network)
+ except SubprocessError:
+ display.warning(
+ 'Unable to disconnect network "%s" (this is normal under podman). '
+ 'Tests will not be isolated from the network. Network-related tests may '
+ 'misbehave.' % (network,)
+ )
else:
display.warning('Network disconnection is not supported (this is normal under podman). '
'Tests will not be isolated from the network. Network-related tests may misbehave.')
diff --git a/test/lib/ansible_test/_internal/docker_util.py b/test/lib/ansible_test/_internal/docker_util.py
index da113f02..12509076 100644
--- a/test/lib/ansible_test/_internal/docker_util.py
+++ b/test/lib/ansible_test/_internal/docker_util.py
@@ -37,6 +37,10 @@ DOCKER_COMMANDS = [
'podman',
]
+# Max number of open files in a docker container.
+# Passed with --ulimit option to the docker run command.
+MAX_NUM_OPEN_FILES = 10240
+
class DockerCommand:
"""Details about the available docker command."""
@@ -118,6 +122,77 @@ def get_docker_hostname(): # type: () -> str
@cache
+def get_podman_host_ip(): # type: () -> str
+ """Return the IP of the Podman host."""
+ podman_host_ip = socket.gethostbyname(get_podman_hostname())
+
+ display.info('Detected Podman host IP: %s' % podman_host_ip, verbosity=1)
+
+ return podman_host_ip
+
+
+@cache
+def get_podman_default_hostname(): # type: () -> str
+ """Return the default hostname of the Podman service.
+
+ --format was added in podman 3.3.0, this functionality depends on it's availability
+ """
+ hostname = None
+ try:
+ stdout = raw_command(['podman', 'system', 'connection', 'list', '--format=json'], capture=True)[0]
+ except SubprocessError:
+ stdout = '[]'
+
+ try:
+ connections = json.loads(stdout)
+ except json.decoder.JSONDecodeError:
+ return hostname
+
+ for connection in connections:
+ # A trailing indicates the default
+ if connection['Name'][-1] == '*':
+ hostname = connection['URI']
+ break
+
+ return hostname
+
+
+@cache
+def _get_podman_remote(): # type: () -> t.Optional[str]
+ # URL value resolution precedence:
+ # - command line value
+ # - environment variable CONTAINER_HOST
+ # - containers.conf
+ # - unix://run/podman/podman.sock
+ hostname = None
+
+ podman_host = os.environ.get('CONTAINER_HOST')
+ if not podman_host:
+ podman_host = get_podman_default_hostname()
+
+ if podman_host and podman_host.startswith('ssh://'):
+ try:
+ hostname = urllib.parse.urlparse(podman_host).hostname
+ except ValueError:
+ display.warning('Could not parse podman URI "%s"' % podman_host)
+ else:
+ display.info('Detected Podman remote: %s' % hostname, verbosity=1)
+ return hostname
+
+
+@cache
+def get_podman_hostname(): # type: () -> str
+ """Return the hostname of the Podman service."""
+ hostname = _get_podman_remote()
+
+ if not hostname:
+ hostname = 'localhost'
+ display.info('Assuming Podman is available on localhost.', verbosity=1)
+
+ return hostname
+
+
+@cache
def get_docker_container_id(): # type: () -> t.Optional[str]
"""Return the current container ID if running in a container, otherwise return None."""
path = '/proc/self/cpuset'
@@ -150,7 +225,7 @@ def get_docker_preferred_network_name(args): # type: (EnvironmentConfig) -> str
- the default docker network (returns None)
"""
try:
- return get_docker_preferred_network_name.network
+ return get_docker_preferred_network_name.network # type: ignore[attr-defined]
except AttributeError:
pass
@@ -167,14 +242,14 @@ def get_docker_preferred_network_name(args): # type: (EnvironmentConfig) -> str
container = docker_inspect(args, current_container_id, always=True)
network = container.get_network_name()
- get_docker_preferred_network_name.network = network
+ get_docker_preferred_network_name.network = network # type: ignore[attr-defined]
return network
def is_docker_user_defined_network(network): # type: (str) -> bool
"""Return True if the network being used is a user-defined network."""
- return network and network != 'bridge'
+ return bool(network) and network != 'bridge'
def docker_pull(args, image): # type: (EnvironmentConfig, str) -> None
@@ -232,6 +307,8 @@ def docker_run(
# Only when the network is not the default bridge network.
options.extend(['--network', network])
+ options.extend(['--ulimit', 'nofile=%s' % MAX_NUM_OPEN_FILES])
+
for _iteration in range(1, 3):
try:
stdout = docker_command(args, [command] + options + [image] + cmd, capture=True)[0]
@@ -241,14 +318,14 @@ def docker_run(
return stdout.strip()
except SubprocessError as ex:
- display.error(ex)
+ display.error(ex.message)
display.warning('Failed to run docker image "%s". Waiting a few seconds before trying again.' % image)
time.sleep(3)
raise ApplicationError('Failed to run docker image "%s".' % image)
-def docker_start(args, container_id, options=None): # type: (EnvironmentConfig, str, t.Optional[t.List[str]]) -> (t.Optional[str], t.Optional[str])
+def docker_start(args, container_id, options=None): # type: (EnvironmentConfig, str, t.Optional[t.List[str]]) -> t.Tuple[t.Optional[str], t.Optional[str]]
"""
Start a docker container by name or ID
"""
@@ -259,7 +336,7 @@ def docker_start(args, container_id, options=None): # type: (EnvironmentConfig,
try:
return docker_command(args, ['start'] + options + [container_id], capture=True)
except SubprocessError as ex:
- display.error(ex)
+ display.error(ex.message)
display.warning('Failed to start docker container "%s". Waiting a few seconds before trying again.' % container_id)
time.sleep(3)
@@ -395,11 +472,11 @@ class DockerInspect:
def docker_inspect(args, identifier, always=False): # type: (EnvironmentConfig, str, bool) -> DockerInspect
"""
- Return the results of `docker inspect` for the specified container.
+ Return the results of `docker container inspect` for the specified container.
Raises a ContainerNotFoundError if the container was not found.
"""
try:
- stdout = docker_command(args, ['inspect', identifier], capture=True, always=always)[0]
+ stdout = docker_command(args, ['container', 'inspect', identifier], capture=True, always=always)[0]
except SubprocessError as ex:
stdout = ex.stdout
@@ -435,8 +512,8 @@ def docker_exec(
cmd, # type: t.List[str]
options=None, # type: t.Optional[t.List[str]]
capture=False, # type: bool
- stdin=None, # type: t.Optional[t.BinaryIO]
- stdout=None, # type: t.Optional[t.BinaryIO]
+ stdin=None, # type: t.Optional[t.IO[bytes]]
+ stdout=None, # type: t.Optional[t.IO[bytes]]
data=None, # type: t.Optional[str]
): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
"""Execute the given command in the specified container."""
@@ -465,19 +542,21 @@ def docker_command(
args, # type: CommonConfig
cmd, # type: t.List[str]
capture=False, # type: bool
- stdin=None, # type: t.Optional[t.BinaryIO]
- stdout=None, # type: t.Optional[t.BinaryIO]
+ stdin=None, # type: t.Optional[t.IO[bytes]]
+ stdout=None, # type: t.Optional[t.IO[bytes]]
always=False, # type: bool
data=None, # type: t.Optional[str]
): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
"""Run the specified docker command."""
env = docker_environment()
- command = require_docker().command
- return run_command(args, [command] + cmd, env=env, capture=capture, stdin=stdin, stdout=stdout, always=always, data=data)
+ command = [require_docker().command]
+ if command[0] == 'podman' and _get_podman_remote():
+ command.append('--remote')
+ return run_command(args, command + cmd, env=env, capture=capture, stdin=stdin, stdout=stdout, always=always, data=data)
def docker_environment(): # type: () -> t.Dict[str, str]
"""Return a dictionary of docker related environment variables found in the current environment."""
env = common_environment()
- env.update(dict((key, os.environ[key]) for key in os.environ if key.startswith('DOCKER_')))
+ env.update(dict((key, os.environ[key]) for key in os.environ if key.startswith('DOCKER_') or key.startswith('CONTAINER_')))
return env
diff --git a/test/lib/ansible_test/_internal/host_configs.py b/test/lib/ansible_test/_internal/host_configs.py
index 87030ae0..fee741e8 100644
--- a/test/lib/ansible_test/_internal/host_configs.py
+++ b/test/lib/ansible_test/_internal/host_configs.py
@@ -183,8 +183,10 @@ class PosixConfig(HostConfig, metaclass=abc.ABCMeta):
def get_defaults(self, context): # type: (HostContext) -> PosixCompletionConfig
"""Return the default settings."""
- def apply_defaults(self, context, defaults): # type: (HostContext, PosixCompletionConfig) -> None
+ def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None
"""Apply default settings."""
+ assert isinstance(defaults, PosixCompletionConfig)
+
super().apply_defaults(context, defaults)
self.python = self.python or NativePythonConfig()
@@ -206,17 +208,19 @@ class RemoteConfig(HostConfig, metaclass=abc.ABCMeta):
provider: t.Optional[str] = None
@property
- def platform(self):
+ def platform(self): # type: () -> str
"""The name of the platform."""
return self.name.partition('/')[0]
@property
- def version(self):
+ def version(self): # type: () -> str
"""The version of the platform."""
return self.name.partition('/')[2]
- def apply_defaults(self, context, defaults): # type: (HostContext, RemoteCompletionConfig) -> None
+ def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None
"""Apply default settings."""
+ assert isinstance(defaults, RemoteCompletionConfig)
+
super().apply_defaults(context, defaults)
if self.provider == 'default':
@@ -262,8 +266,9 @@ class InventoryConfig(HostConfig):
"""Return the default settings."""
return InventoryCompletionConfig()
- def apply_defaults(self, context, defaults): # type: (HostContext, InventoryCompletionConfig) -> None
+ def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None
"""Apply default settings."""
+ assert isinstance(defaults, InventoryCompletionConfig)
@dataclasses.dataclass
@@ -293,8 +298,10 @@ class DockerConfig(ControllerHostConfig, PosixConfig):
return [ControllerConfig(python=NativePythonConfig(version=version, path=path)) for version, path in pythons.items()]
- def apply_defaults(self, context, defaults): # type: (HostContext, DockerCompletionConfig) -> None
+ def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None
"""Apply default settings."""
+ assert isinstance(defaults, DockerCompletionConfig)
+
super().apply_defaults(context, defaults)
self.name = defaults.name
@@ -358,9 +365,7 @@ class WindowsRemoteConfig(RemoteConfig, WindowsConfig):
"""Configuration for a remoe Windows host."""
def get_defaults(self, context): # type: (HostContext) -> WindowsRemoteCompletionConfig
"""Return the default settings."""
- return filter_completion(windows_completion()).get(self.name) or WindowsRemoteCompletionConfig(
- name=self.name,
- )
+ return filter_completion(windows_completion()).get(self.name) or windows_completion().get(self.platform)
@dataclasses.dataclass
@@ -385,8 +390,10 @@ class NetworkRemoteConfig(RemoteConfig, NetworkConfig):
name=self.name,
)
- def apply_defaults(self, context, defaults): # type: (HostContext, NetworkRemoteCompletionConfig) -> None
+ def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None
"""Apply default settings."""
+ assert isinstance(defaults, NetworkRemoteCompletionConfig)
+
super().apply_defaults(context, defaults)
self.collection = self.collection or defaults.collection
@@ -412,7 +419,7 @@ class OriginConfig(ControllerHostConfig, PosixConfig):
@property
def have_root(self): # type: () -> bool
"""True if root is available, otherwise False."""
- return os.getuid() != 0
+ return os.getuid() == 0
@dataclasses.dataclass
@@ -424,8 +431,10 @@ class ControllerConfig(PosixConfig):
"""Return the default settings."""
return context.controller_config.get_defaults(context)
- def apply_defaults(self, context, defaults): # type: (HostContext, PosixCompletionConfig) -> None
+ def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None
"""Apply default settings."""
+ assert isinstance(defaults, PosixCompletionConfig)
+
self.controller = context.controller_config
if not self.python and not defaults.supported_pythons:
@@ -449,7 +458,7 @@ class ControllerConfig(PosixConfig):
class FallbackReason(enum.Enum):
- """Reason fallback was peformed."""
+ """Reason fallback was performed."""
ENVIRONMENT = enum.auto()
PYTHON = enum.auto()
diff --git a/test/lib/ansible_test/_internal/host_profiles.py b/test/lib/ansible_test/_internal/host_profiles.py
index e3aeeeeb..9079c7e9 100644
--- a/test/lib/ansible_test/_internal/host_profiles.py
+++ b/test/lib/ansible_test/_internal/host_profiles.py
@@ -96,6 +96,7 @@ from .connections import (
)
from .become import (
+ Become,
Su,
Sudo,
)
@@ -109,11 +110,11 @@ TRemoteConfig = t.TypeVar('TRemoteConfig', bound=RemoteConfig)
@dataclasses.dataclass(frozen=True)
class Inventory:
"""Simple representation of an Ansible inventory."""
- host_groups: t.Dict[str, t.Dict[str, t.Dict[str, str]]]
+ host_groups: t.Dict[str, t.Dict[str, t.Dict[str, t.Union[str, int]]]]
extra_groups: t.Optional[t.Dict[str, t.List[str]]] = None
@staticmethod
- def create_single_host(name, variables): # type: (str, t.Dict[str, str]) -> Inventory
+ def create_single_host(name, variables): # type: (str, t.Dict[str, t.Union[str, int]]) -> Inventory
"""Return an inventory instance created from the given hostname and variables."""
return Inventory(host_groups=dict(all={name: variables}))
@@ -448,7 +449,7 @@ class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]):
"""Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
self.wait_until_ready()
- def get_inventory_variables(self):
+ def get_inventory_variables(self): # type: () -> t.Dict[str, t.Optional[t.Union[str, int]]]
"""Return inventory variables for accessing this host."""
core_ci = self.wait_for_instance()
connection = core_ci.connection
@@ -461,7 +462,7 @@ class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]):
ansible_user=connection.username,
ansible_ssh_private_key_file=core_ci.ssh_key.key,
ansible_network_os=f'{self.config.collection}.{self.config.platform}' if self.config.collection else self.config.platform,
- )
+ ) # type: t.Dict[str, t.Optional[t.Union[str, int]]]
return variables
@@ -562,7 +563,7 @@ class PosixRemoteProfile(ControllerHostProfile[PosixRemoteConfig], RemoteProfile
)
if settings.user == 'root':
- become = None
+ become = None # type: t.Optional[Become]
elif self.config.platform == 'freebsd':
become = Su()
elif self.config.platform == 'macos':
@@ -672,7 +673,7 @@ class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]):
"""Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
self.wait_until_ready()
- def get_inventory_variables(self):
+ def get_inventory_variables(self): # type: () -> t.Dict[str, t.Optional[t.Union[str, int]]]
"""Return inventory variables for accessing this host."""
core_ci = self.wait_for_instance()
connection = core_ci.connection
@@ -686,7 +687,7 @@ class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]):
ansible_user=connection.username,
ansible_password=connection.password,
ansible_ssh_private_key_file=core_ci.ssh_key.key,
- )
+ ) # type: t.Dict[str, t.Optional[t.Union[str, int]]]
# HACK: force 2016 to use NTLM + HTTP message encryption
if self.config.version == '2016':
diff --git a/test/lib/ansible_test/_internal/inventory.py b/test/lib/ansible_test/_internal/inventory.py
index 73a9ae9c..f5a245ec 100644
--- a/test/lib/ansible_test/_internal/inventory.py
+++ b/test/lib/ansible_test/_internal/inventory.py
@@ -94,7 +94,7 @@ def create_network_inventory(args, path, target_hosts): # type: (EnvironmentCon
return
target_hosts = t.cast(t.List[NetworkRemoteProfile], target_hosts)
- host_groups = {target_host.config.platform: {} for target_host in target_hosts}
+ host_groups = {target_host.config.platform: {} for target_host in target_hosts} # type: t.Dict[str, t.Dict[str, t.Dict[str, t.Union[str, int]]]]
for target_host in target_hosts:
host_groups[target_host.config.platform][sanitize_host_name(target_host.config.name)] = target_host.get_inventory_variables()
@@ -149,7 +149,7 @@ def create_posix_inventory(args, path, target_hosts, needs_ssh=False): # type:
ansible_port=ssh.settings.port,
ansible_user=ssh.settings.user,
ansible_ssh_private_key_file=ssh.settings.identity_file,
- )
+ ) # type: t.Dict[str, t.Optional[t.Union[str, int]]]
if ssh.become:
testhost.update(
diff --git a/test/lib/ansible_test/_internal/io.py b/test/lib/ansible_test/_internal/io.py
index 9d3301a1..df8c98d4 100644
--- a/test/lib/ansible_test/_internal/io.py
+++ b/test/lib/ansible_test/_internal/io.py
@@ -14,17 +14,17 @@ from .encoding import (
)
-def read_json_file(path): # type: (t.AnyStr) -> t.Any
+def read_json_file(path): # type: (str) -> t.Any
"""Parse and return the json content from the specified path."""
return json.loads(read_text_file(path))
-def read_text_file(path): # type: (t.AnyStr) -> t.Text
+def read_text_file(path): # type: (str) -> t.Text
"""Return the contents of the specified path as text."""
return to_text(read_binary_file(path))
-def read_binary_file(path): # type: (t.AnyStr) -> bytes
+def read_binary_file(path): # type: (str) -> bytes
"""Return the contents of the specified path as bytes."""
with open_binary_file(path) as file_obj:
return file_obj.read()
@@ -43,7 +43,7 @@ def write_json_file(path, # type: str
content, # type: t.Any
create_directories=False, # type: bool
formatted=True, # type: bool
- encoder=None, # type: t.Optional[t.Callable[[t.Any], t.Any]]
+ encoder=None, # type: t.Optional[t.Type[json.JSONEncoder]]
): # type: (...) -> str
"""Write the given json content to the specified path, optionally creating missing directories."""
text_content = json.dumps(content,
@@ -67,21 +67,19 @@ def write_text_file(path, content, create_directories=False): # type: (str, str
file_obj.write(to_bytes(content))
-def open_text_file(path, mode='r'): # type: (str, str) -> t.TextIO
+def open_text_file(path, mode='r'): # type: (str, str) -> t.IO[str]
"""Open the given path for text access."""
if 'b' in mode:
raise Exception('mode cannot include "b" for text files: %s' % mode)
- # noinspection PyTypeChecker
return io.open(to_bytes(path), mode, encoding=ENCODING) # pylint: disable=consider-using-with
-def open_binary_file(path, mode='rb'): # type: (str, str) -> t.BinaryIO
+def open_binary_file(path, mode='rb'): # type: (str, str) -> t.IO[bytes]
"""Open the given path for binary access."""
if 'b' not in mode:
raise Exception('mode must include "b" for binary files: %s' % mode)
- # noinspection PyTypeChecker
return io.open(to_bytes(path), mode) # pylint: disable=consider-using-with
diff --git a/test/lib/ansible_test/_internal/junit_xml.py b/test/lib/ansible_test/_internal/junit_xml.py
index bde5519b..3b958672 120000..100644
--- a/test/lib/ansible_test/_internal/junit_xml.py
+++ b/test/lib/ansible_test/_internal/junit_xml.py
@@ -1 +1,267 @@
-../../../../lib/ansible/utils/_junit_xml.py \ No newline at end of file
+"""
+Dataclasses for creating JUnit XML files.
+See: https://github.com/junit-team/junit5/blob/main/platform-tests/src/test/resources/jenkins-junit.xsd
+"""
+from __future__ import annotations
+
+import abc
+import dataclasses
+import datetime
+import decimal
+
+from xml.dom import minidom
+# noinspection PyPep8Naming
+from xml.etree import ElementTree as ET
+
+
+@dataclasses.dataclass # type: ignore[misc] # https://github.com/python/mypy/issues/5374
+class TestResult(metaclass=abc.ABCMeta):
+ """Base class for the result of a test case."""
+ output: str | None = None
+ message: str | None = None
+ type: str | None = None
+
+ def __post_init__(self):
+ if self.type is None:
+ self.type = self.tag
+
+ @property
+ @abc.abstractmethod
+ def tag(self) -> str:
+ """Tag name for the XML element created by this result type."""
+
+ def get_attributes(self) -> dict[str, str]:
+ """Return a dictionary of attributes for this instance."""
+ return _attributes(
+ message=self.message,
+ type=self.type,
+ )
+
+ def get_xml_element(self) -> ET.Element:
+ """Return an XML element representing this instance."""
+ element = ET.Element(self.tag, self.get_attributes())
+ element.text = self.output
+
+ return element
+
+
+@dataclasses.dataclass
+class TestFailure(TestResult):
+ """Failure info for a test case."""
+ @property
+ def tag(self) -> str:
+ """Tag name for the XML element created by this result type."""
+ return 'failure'
+
+
+@dataclasses.dataclass
+class TestError(TestResult):
+ """Error info for a test case."""
+ @property
+ def tag(self) -> str:
+ """Tag name for the XML element created by this result type."""
+ return 'error'
+
+
+@dataclasses.dataclass
+class TestCase:
+ """An individual test case."""
+ name: str
+ assertions: int | None = None
+ classname: str | None = None
+ status: str | None = None
+ time: decimal.Decimal | None = None
+
+ errors: list[TestError] = dataclasses.field(default_factory=list)
+ failures: list[TestFailure] = dataclasses.field(default_factory=list)
+ skipped: str | None = None
+ system_out: str | None = None
+ system_err: str | None = None
+
+ is_disabled: bool = False
+
+ @property
+ def is_failure(self) -> bool:
+ """True if the test case contains failure info."""
+ return bool(self.failures)
+
+ @property
+ def is_error(self) -> bool:
+ """True if the test case contains error info."""
+ return bool(self.errors)
+
+ @property
+ def is_skipped(self) -> bool:
+ """True if the test case was skipped."""
+ return bool(self.skipped)
+
+ def get_attributes(self) -> dict[str, str]:
+ """Return a dictionary of attributes for this instance."""
+ return _attributes(
+ assertions=self.assertions,
+ classname=self.classname,
+ name=self.name,
+ status=self.status,
+ time=self.time,
+ )
+
+ def get_xml_element(self) -> ET.Element:
+ """Return an XML element representing this instance."""
+ element = ET.Element('testcase', self.get_attributes())
+
+ if self.skipped:
+ ET.SubElement(element, 'skipped').text = self.skipped
+
+ element.extend([error.get_xml_element() for error in self.errors])
+ element.extend([failure.get_xml_element() for failure in self.failures])
+
+ if self.system_out:
+ ET.SubElement(element, 'system-out').text = self.system_out
+
+ if self.system_err:
+ ET.SubElement(element, 'system-err').text = self.system_err
+
+ return element
+
+
+@dataclasses.dataclass
+class TestSuite:
+ """A collection of test cases."""
+ name: str
+ hostname: str | None = None
+ id: str | None = None
+ package: str | None = None
+ timestamp: datetime.datetime | None = None
+
+ properties: dict[str, str] = dataclasses.field(default_factory=dict)
+ cases: list[TestCase] = dataclasses.field(default_factory=list)
+ system_out: str | None = None
+ system_err: str | None = None
+
+ @property
+ def disabled(self) -> int:
+ """The number of disabled test cases."""
+ return sum(case.is_disabled for case in self.cases)
+
+ @property
+ def errors(self) -> int:
+ """The number of test cases containing error info."""
+ return sum(case.is_error for case in self.cases)
+
+ @property
+ def failures(self) -> int:
+ """The number of test cases containing failure info."""
+ return sum(case.is_failure for case in self.cases)
+
+ @property
+ def skipped(self) -> int:
+ """The number of test cases containing skipped info."""
+ return sum(case.is_skipped for case in self.cases)
+
+ @property
+ def tests(self) -> int:
+ """The number of test cases."""
+ return len(self.cases)
+
+ @property
+ def time(self) -> decimal.Decimal:
+ """The total time from all test cases."""
+ return decimal.Decimal(sum(case.time for case in self.cases if case.time))
+
+ def get_attributes(self) -> dict[str, str]:
+ """Return a dictionary of attributes for this instance."""
+ return _attributes(
+ disabled=self.disabled,
+ errors=self.errors,
+ failures=self.failures,
+ hostname=self.hostname,
+ id=self.id,
+ name=self.name,
+ package=self.package,
+ skipped=self.skipped,
+ tests=self.tests,
+ time=self.time,
+ timestamp=self.timestamp.isoformat(timespec='seconds') if self.timestamp else None,
+ )
+
+ def get_xml_element(self) -> ET.Element:
+ """Return an XML element representing this instance."""
+ element = ET.Element('testsuite', self.get_attributes())
+
+ if self.properties:
+ ET.SubElement(element, 'properties').extend([ET.Element('property', dict(name=name, value=value)) for name, value in self.properties.items()])
+
+ element.extend([test_case.get_xml_element() for test_case in self.cases])
+
+ if self.system_out:
+ ET.SubElement(element, 'system-out').text = self.system_out
+
+ if self.system_err:
+ ET.SubElement(element, 'system-err').text = self.system_err
+
+ return element
+
+
+@dataclasses.dataclass
+class TestSuites:
+ """A collection of test suites."""
+ name: str | None = None
+
+ suites: list[TestSuite] = dataclasses.field(default_factory=list)
+
+ @property
+ def disabled(self) -> int:
+ """The number of disabled test cases."""
+ return sum(suite.disabled for suite in self.suites)
+
+ @property
+ def errors(self) -> int:
+ """The number of test cases containing error info."""
+ return sum(suite.errors for suite in self.suites)
+
+ @property
+ def failures(self) -> int:
+ """The number of test cases containing failure info."""
+ return sum(suite.failures for suite in self.suites)
+
+ @property
+ def tests(self) -> int:
+ """The number of test cases."""
+ return sum(suite.tests for suite in self.suites)
+
+ @property
+ def time(self) -> decimal.Decimal:
+ """The total time from all test cases."""
+ return decimal.Decimal(sum(suite.time for suite in self.suites))
+
+ def get_attributes(self) -> dict[str, str]:
+ """Return a dictionary of attributes for this instance."""
+ return _attributes(
+ disabled=self.disabled,
+ errors=self.errors,
+ failures=self.failures,
+ name=self.name,
+ tests=self.tests,
+ time=self.time,
+ )
+
+ def get_xml_element(self) -> ET.Element:
+ """Return an XML element representing this instance."""
+ element = ET.Element('testsuites', self.get_attributes())
+ element.extend([suite.get_xml_element() for suite in self.suites])
+
+ return element
+
+ def to_pretty_xml(self) -> str:
+ """Return a pretty formatted XML string representing this instance."""
+ return _pretty_xml(self.get_xml_element())
+
+
+def _attributes(**kwargs) -> dict[str, str]:
+ """Return the given kwargs as a dictionary with values converted to strings. Items with a value of None will be omitted."""
+ return {key: str(value) for key, value in kwargs.items() if value is not None}
+
+
+def _pretty_xml(element: ET.Element) -> str:
+ """Return a pretty formatted XML string representing the given element."""
+ return minidom.parseString(ET.tostring(element, encoding='unicode')).toprettyxml()
diff --git a/test/lib/ansible_test/_internal/metadata.py b/test/lib/ansible_test/_internal/metadata.py
index 769ec834..e7f82b0a 100644
--- a/test/lib/ansible_test/_internal/metadata.py
+++ b/test/lib/ansible_test/_internal/metadata.py
@@ -21,8 +21,8 @@ class Metadata:
"""Metadata object for passing data to delegated tests."""
def __init__(self):
"""Initialize metadata."""
- self.changes = {} # type: t.Dict[str, t.Tuple[t.Tuple[int, int]]]
- self.cloud_config = None # type: t.Optional[t.Dict[str, str]]
+ self.changes = {} # type: t.Dict[str, t.Tuple[t.Tuple[int, int], ...]]
+ self.cloud_config = None # type: t.Optional[t.Dict[str, t.Dict[str, t.Union[int, str, bool]]]]
self.change_description = None # type: t.Optional[ChangeDescription]
self.ci_provider = None # type: t.Optional[str]
diff --git a/test/lib/ansible_test/_internal/payload.py b/test/lib/ansible_test/_internal/payload.py
index d92f9f65..e6ccc6ed 100644
--- a/test/lib/ansible_test/_internal/payload.py
+++ b/test/lib/ansible_test/_internal/payload.py
@@ -34,8 +34,8 @@ from .util_common import (
)
# improve performance by disabling uid/gid lookups
-tarfile.pwd = None
-tarfile.grp = None
+tarfile.pwd = None # type: ignore[attr-defined] # undocumented attribute
+tarfile.grp = None # type: ignore[attr-defined] # undocumented attribute
def create_payload(args, dst_path): # type: (CommonConfig, str) -> None
@@ -69,8 +69,8 @@ def create_payload(args, dst_path): # type: (CommonConfig, str) -> None
collection_layouts = data_context().create_collection_layouts()
- content_files = []
- extra_files = []
+ content_files = [] # type: t.List[t.Tuple[str, str]]
+ extra_files = [] # type: t.List[t.Tuple[str, str]]
for layout in collection_layouts:
if layout == data_context().content:
diff --git a/test/lib/ansible_test/_internal/provider/__init__.py b/test/lib/ansible_test/_internal/provider/__init__.py
index e8972ac8..78346142 100644
--- a/test/lib/ansible_test/_internal/provider/__init__.py
+++ b/test/lib/ansible_test/_internal/provider/__init__.py
@@ -16,7 +16,7 @@ def get_path_provider_classes(provider_type): # type: (t.Type[TPathProvider]) -
return sorted(get_subclasses(provider_type), key=lambda c: (c.priority, c.__name__))
-def find_path_provider(provider_type, # type: t.Type[TPathProvider],
+def find_path_provider(provider_type, # type: t.Type[TPathProvider]
provider_classes, # type: t.List[t.Type[TPathProvider]]
path, # type: str
walk, # type: bool
diff --git a/test/lib/ansible_test/_internal/provider/layout/__init__.py b/test/lib/ansible_test/_internal/provider/layout/__init__.py
index 147fcbd5..9fd13550 100644
--- a/test/lib/ansible_test/_internal/provider/layout/__init__.py
+++ b/test/lib/ansible_test/_internal/provider/layout/__init__.py
@@ -91,6 +91,7 @@ class ContentLayout(Layout):
unit_module_path, # type: str
unit_module_utils_path, # type: str
unit_messages, # type: t.Optional[LayoutMessages]
+ unsupported=False, # type: bool
): # type: (...) -> None
super().__init__(root, paths)
@@ -108,6 +109,7 @@ class ContentLayout(Layout):
self.unit_module_path = unit_module_path
self.unit_module_utils_path = unit_module_utils_path
self.unit_messages = unit_messages
+ self.unsupported = unsupported
self.is_ansible = root == ANSIBLE_SOURCE_ROOT
@@ -204,7 +206,7 @@ class LayoutProvider(PathProvider):
def paths_to_tree(paths): # type: (t.List[str]) -> t.Tuple[t.Dict[str, t.Any], t.List[str]]
"""Return a filesystem tree from the given list of paths."""
- tree = {}, []
+ tree = {}, [] # type: t.Tuple[t.Dict[str, t.Any], t.List[str]]
for path in paths:
parts = path.split(os.path.sep)
diff --git a/test/lib/ansible_test/_internal/provider/layout/collection.py b/test/lib/ansible_test/_internal/provider/layout/collection.py
index 5dca046f..6b826ee4 100644
--- a/test/lib/ansible_test/_internal/provider/layout/collection.py
+++ b/test/lib/ansible_test/_internal/provider/layout/collection.py
@@ -11,6 +11,10 @@ from . import (
LayoutMessages,
)
+from ...util import (
+ is_valid_identifier,
+)
+
class CollectionLayout(LayoutProvider):
"""Layout provider for Ansible collections."""
@@ -28,6 +32,10 @@ class CollectionLayout(LayoutProvider):
collection_root = os.path.dirname(os.path.dirname(root))
collection_dir = os.path.relpath(root, collection_root)
+
+ collection_namespace: str
+ collection_name: str
+
collection_namespace, collection_name = collection_dir.split(os.path.sep)
collection_root = os.path.dirname(collection_root)
@@ -65,6 +73,7 @@ class CollectionLayout(LayoutProvider):
unit_module_path='tests/unit/plugins/modules',
unit_module_utils_path='tests/unit/plugins/module_utils',
unit_messages=unit_messages,
+ unsupported=not(is_valid_identifier(collection_namespace) and is_valid_identifier(collection_name)),
)
@staticmethod
diff --git a/test/lib/ansible_test/_internal/provider/layout/unsupported.py b/test/lib/ansible_test/_internal/provider/layout/unsupported.py
new file mode 100644
index 00000000..80a91291
--- /dev/null
+++ b/test/lib/ansible_test/_internal/provider/layout/unsupported.py
@@ -0,0 +1,42 @@
+"""Layout provider for an unsupported directory layout."""
+from __future__ import annotations
+
+import typing as t
+
+from . import (
+ ContentLayout,
+ LayoutProvider,
+)
+
+
+class UnsupportedLayout(LayoutProvider):
+ """Layout provider for an unsupported directory layout."""
+ sequence = 0 # disable automatic detection
+
+ @staticmethod
+ def is_content_root(path): # type: (str) -> bool
+ """Return True if the given path is a content root for this provider."""
+ return False
+
+ def create(self, root, paths): # type: (str, t.List[str]) -> ContentLayout
+ """Create a Layout using the given root and paths."""
+ plugin_paths = dict((p, p) for p in self.PLUGIN_TYPES)
+
+ return ContentLayout(root,
+ paths,
+ plugin_paths=plugin_paths,
+ collection=None,
+ test_path='',
+ results_path='',
+ sanity_path='',
+ sanity_messages=None,
+ integration_path='',
+ integration_targets_path='',
+ integration_vars_path='',
+ integration_messages=None,
+ unit_path='',
+ unit_module_path='',
+ unit_module_utils_path='',
+ unit_messages=None,
+ unsupported=True,
+ )
diff --git a/test/lib/ansible_test/_internal/provider/source/unsupported.py b/test/lib/ansible_test/_internal/provider/source/unsupported.py
new file mode 100644
index 00000000..ff5562c6
--- /dev/null
+++ b/test/lib/ansible_test/_internal/provider/source/unsupported.py
@@ -0,0 +1,22 @@
+"""Source provider to use when the layout is unsupported."""
+from __future__ import annotations
+
+import typing as t
+
+from . import (
+ SourceProvider,
+)
+
+
+class UnsupportedSource(SourceProvider):
+ """Source provider to use when the layout is unsupported."""
+ sequence = 0 # disable automatic detection
+
+ @staticmethod
+ def is_content_root(path): # type: (str) -> bool
+ """Return True if the given path is a content root for this provider."""
+ return False
+
+ def get_paths(self, path): # type: (str) -> t.List[str]
+ """Return the list of available content paths under the given path."""
+ return []
diff --git a/test/lib/ansible_test/_internal/provisioning.py b/test/lib/ansible_test/_internal/provisioning.py
index a9536036..85d91326 100644
--- a/test/lib/ansible_test/_internal/provisioning.py
+++ b/test/lib/ansible_test/_internal/provisioning.py
@@ -22,6 +22,7 @@ from .util import (
open_binary_file,
verify_sys_executable,
version_to_str,
+ type_guard,
)
from .thread import (
@@ -88,10 +89,9 @@ class HostState:
if not self.target_profiles:
raise Exception('No target profiles found.')
- if not all(isinstance(target, profile_type) for target in self.target_profiles):
- raise Exception(f'Target profile(s) are not of the required type: {profile_type}')
+ assert type_guard(self.target_profiles, profile_type)
- return self.target_profiles
+ return t.cast(t.List[THostProfile], self.target_profiles)
def prepare_profiles(
diff --git a/test/lib/ansible_test/_internal/pypi_proxy.py b/test/lib/ansible_test/_internal/pypi_proxy.py
index 968794fd..51974d26 100644
--- a/test/lib/ansible_test/_internal/pypi_proxy.py
+++ b/test/lib/ansible_test/_internal/pypi_proxy.py
@@ -3,6 +3,7 @@ from __future__ import annotations
import atexit
import os
+import typing as t
import urllib.parse
from .io import (
@@ -54,8 +55,9 @@ def run_pypi_proxy(args, targets_use_pypi): # type: (EnvironmentConfig, bool) -
if args.pypi_endpoint:
return # user has overridden the proxy endpoint, there is nothing to provision
+ versions_needing_proxy = tuple() # type: t.Tuple[str, ...] # preserved for future use, no versions currently require this
posix_targets = [target for target in args.targets if isinstance(target, PosixConfig)]
- need_proxy = targets_use_pypi and any(target.python.version == '2.6' for target in posix_targets)
+ need_proxy = targets_use_pypi and any(target.python.version in versions_needing_proxy for target in posix_targets)
use_proxy = args.pypi_proxy or need_proxy
if not use_proxy:
diff --git a/test/lib/ansible_test/_internal/python_requirements.py b/test/lib/ansible_test/_internal/python_requirements.py
index aaaf44b8..f67f6598 100644
--- a/test/lib/ansible_test/_internal/python_requirements.py
+++ b/test/lib/ansible_test/_internal/python_requirements.py
@@ -142,9 +142,9 @@ def install_requirements(
if ansible:
try:
- ansible_cache = install_requirements.ansible_cache
+ ansible_cache = install_requirements.ansible_cache # type: ignore[attr-defined]
except AttributeError:
- ansible_cache = install_requirements.ansible_cache = {}
+ ansible_cache = install_requirements.ansible_cache = {} # type: ignore[attr-defined]
ansible_installed = ansible_cache.get(python.path)
@@ -211,8 +211,7 @@ def collect_requirements(
if virtualenv:
# sanity tests on Python 2.x install virtualenv when it is too old or is not already installed and the `--requirements` option is given
# the last version of virtualenv with no dependencies is used to minimize the changes made outside a virtual environment
- virtualenv_version = '15.2.0' if python.version == '2.6' else '16.7.12'
- commands.extend(collect_package_install(packages=[f'virtualenv=={virtualenv_version}'], constraints=False))
+ commands.extend(collect_package_install(packages=['virtualenv==16.7.12'], constraints=False))
if coverage:
commands.extend(collect_package_install(packages=[f'coverage=={COVERAGE_REQUIRED_VERSION}'], constraints=False))
@@ -420,11 +419,6 @@ def get_venv_packages(python): # type: (PythonConfig) -> t.Dict[str, str]
)
override_packages = {
- '2.6': dict(
- pip='9.0.3', # 10.0 requires Python 2.7+
- setuptools='36.8.0', # 37.0.0 requires Python 2.7+
- wheel='0.29.0', # 0.30.0 requires Python 2.7+
- ),
'2.7': dict(
pip='20.3.4', # 21.0 requires Python 3.6+
setuptools='44.1.1', # 45.0.0 requires Python 3.5+
@@ -492,7 +486,7 @@ def prepare_pip_script(commands): # type: (t.List[PipCommand]) -> str
def usable_pip_file(path): # type: (t.Optional[str]) -> bool
"""Return True if the specified pip file is usable, otherwise False."""
- return path and os.path.exists(path) and os.path.getsize(path)
+ return bool(path) and os.path.exists(path) and bool(os.path.getsize(path))
# Cryptography
diff --git a/test/lib/ansible_test/_internal/ssh.py b/test/lib/ansible_test/_internal/ssh.py
index 21212dc1..7cee13c4 100644
--- a/test/lib/ansible_test/_internal/ssh.py
+++ b/test/lib/ansible_test/_internal/ssh.py
@@ -47,7 +47,7 @@ class SshProcess:
"""Wrapper around an SSH process."""
def __init__(self, process): # type: (t.Optional[subprocess.Popen]) -> None
self._process = process
- self.pending_forwards = None # type: t.Optional[t.Set[t.Tuple[str, int]]]
+ self.pending_forwards = None # type: t.Optional[t.List[t.Tuple[str, int]]]
self.forwards = {} # type: t.Dict[t.Tuple[str, int], int]
@@ -71,7 +71,7 @@ class SshProcess:
def collect_port_forwards(self): # type: (SshProcess) -> t.Dict[t.Tuple[str, int], int]
"""Collect port assignments for dynamic SSH port forwards."""
- errors = []
+ errors = [] # type: t.List[str]
display.info('Collecting %d SSH port forward(s).' % len(self.pending_forwards), verbosity=2)
@@ -107,7 +107,7 @@ class SshProcess:
dst = (dst_host, dst_port)
else:
# explain mode
- dst = list(self.pending_forwards)[0]
+ dst = self.pending_forwards[0]
src_port = random.randint(40000, 50000)
self.pending_forwards.remove(dst)
@@ -202,7 +202,7 @@ def create_ssh_port_forwards(
"""
options = dict(
LogLevel='INFO', # info level required to get messages on stderr indicating the ports assigned to each forward
- )
+ ) # type: t.Dict[str, t.Union[str, int]]
cli_args = []
@@ -221,7 +221,7 @@ def create_ssh_port_redirects(
redirects, # type: t.List[t.Tuple[int, str, int]]
): # type: (...) -> SshProcess
"""Create SSH port redirections using the provided list of tuples (bind_port, target_host, target_port)."""
- options = {}
+ options = {} # type: t.Dict[str, t.Union[str, int]]
cli_args = []
for bind_port, target_host, target_port in redirects:
diff --git a/test/lib/ansible_test/_internal/target.py b/test/lib/ansible_test/_internal/target.py
index ced111f7..879a7944 100644
--- a/test/lib/ansible_test/_internal/target.py
+++ b/test/lib/ansible_test/_internal/target.py
@@ -155,7 +155,7 @@ def walk_units_targets(): # type: () -> t.Iterable[TestTarget]
return walk_test_targets(path=data_context().content.unit_path, module_path=data_context().content.unit_module_path, extensions=('.py',), prefix='test_')
-def walk_compile_targets(include_symlinks=True): # type: (bool) -> t.Iterable[TestTarget, ...]
+def walk_compile_targets(include_symlinks=True): # type: (bool) -> t.Iterable[TestTarget]
"""Return an iterable of compile targets."""
return walk_test_targets(module_path=data_context().content.module_path, extensions=('.py',), extra_dirs=('bin',), include_symlinks=include_symlinks)
diff --git a/test/lib/ansible_test/_internal/test.py b/test/lib/ansible_test/_internal/test.py
index b67addc3..3e149b15 100644
--- a/test/lib/ansible_test/_internal/test.py
+++ b/test/lib/ansible_test/_internal/test.py
@@ -219,7 +219,7 @@ class TestFailure(TestResult):
command, # type: str
test, # type: str
python_version=None, # type: t.Optional[str]
- messages=None, # type: t.Optional[t.List[TestMessage]]
+ messages=None, # type: t.Optional[t.Sequence[TestMessage]]
summary=None, # type: t.Optional[str]
):
super().__init__(command, test, python_version)
diff --git a/test/lib/ansible_test/_internal/thread.py b/test/lib/ansible_test/_internal/thread.py
index 1b2fbec2..f74b365d 100644
--- a/test/lib/ansible_test/_internal/thread.py
+++ b/test/lib/ansible_test/_internal/thread.py
@@ -8,14 +8,14 @@ import queue
import typing as t
-TCallable = t.TypeVar('TCallable', bound=t.Callable)
+TCallable = t.TypeVar('TCallable', bound=t.Callable[..., t.Any])
class WrappedThread(threading.Thread):
"""Wrapper around Thread which captures results and exceptions."""
def __init__(self, action): # type: (t.Callable[[], t.Any]) -> None
super().__init__()
- self._result = queue.Queue()
+ self._result = queue.Queue() # type: queue.Queue[t.Any]
self.action = action
self.result = None
@@ -25,8 +25,8 @@ class WrappedThread(threading.Thread):
Do not override. Do not call directly. Executed by the start() method.
"""
# We truly want to catch anything that the worker thread might do including call sys.exit.
- # Therefore we catch *everything* (including old-style class exceptions)
- # noinspection PyBroadException, PyPep8
+ # Therefore, we catch *everything* (including old-style class exceptions)
+ # noinspection PyBroadException
try:
self._result.put((self.action(), None))
# pylint: disable=locally-disabled, bare-except
@@ -41,10 +41,7 @@ class WrappedThread(threading.Thread):
result, exception = self._result.get()
if exception:
- if sys.version_info[0] > 2:
- raise exception[1].with_traceback(exception[2])
- # noinspection PyRedundantParentheses
- exec('raise exception[0], exception[1], exception[2]') # pylint: disable=locally-disabled, exec-used
+ raise exception[1].with_traceback(exception[2])
self.result = result
@@ -61,4 +58,4 @@ def mutex(func): # type: (TCallable) -> TCallable
with lock:
return func(*args, **kwargs)
- return wrapper
+ return wrapper # type: ignore[return-value] # requires https://www.python.org/dev/peps/pep-0612/ support
diff --git a/test/lib/ansible_test/_internal/util.py b/test/lib/ansible_test/_internal/util.py
index fdd921e1..0ad78882 100644
--- a/test/lib/ansible_test/_internal/util.py
+++ b/test/lib/ansible_test/_internal/util.py
@@ -3,13 +3,14 @@ from __future__ import annotations
import errno
import fcntl
+import importlib.util
import inspect
+import keyword
import os
import pkgutil
import random
import re
import shutil
-import socket
import stat
import string
import subprocess
@@ -22,6 +23,11 @@ import typing as t
from struct import unpack, pack
from termios import TIOCGWINSZ
+try:
+ from typing_extensions import TypeGuard # TypeGuard was added in Python 3.9
+except ImportError:
+ TypeGuard = None
+
from .encoding import (
to_bytes,
to_optional_bytes,
@@ -48,12 +54,6 @@ TValue = t.TypeVar('TValue')
PYTHON_PATHS = {} # type: t.Dict[str, str]
-try:
- # noinspection PyUnresolvedReferences
- MAXFD = subprocess.MAXFD
-except AttributeError:
- MAXFD = -1
-
COVERAGE_CONFIG_NAME = 'coveragerc'
ANSIBLE_TEST_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@@ -79,6 +79,7 @@ ANSIBLE_TEST_CONTROLLER_ROOT = os.path.join(ANSIBLE_TEST_UTIL_ROOT, 'controller'
ANSIBLE_TEST_TARGET_ROOT = os.path.join(ANSIBLE_TEST_UTIL_ROOT, 'target')
ANSIBLE_TEST_TOOLS_ROOT = os.path.join(ANSIBLE_TEST_CONTROLLER_ROOT, 'tools')
+ANSIBLE_TEST_TARGET_TOOLS_ROOT = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'tools')
# Modes are set to allow all users the same level of access.
# This permits files to be used in tests that change users.
@@ -95,6 +96,11 @@ MODE_DIRECTORY = MODE_READ | stat.S_IWUSR | stat.S_IXUSR | stat.S_IXGRP | stat.S
MODE_DIRECTORY_WRITE = MODE_DIRECTORY | stat.S_IWGRP | stat.S_IWOTH
+def is_valid_identifier(value: str) -> bool:
+ """Return True if the given value is a valid non-keyword Python identifier, otherwise return False."""
+ return value.isidentifier() and not keyword.iskeyword(value)
+
+
def cache(func): # type: (t.Callable[[], TValue]) -> t.Callable[[], TValue]
"""Enforce exclusive access on a decorated function and cache the result."""
storage = {} # type: t.Dict[None, TValue]
@@ -253,8 +259,8 @@ def raw_command(
data=None, # type: t.Optional[str]
cwd=None, # type: t.Optional[str]
explain=False, # type: bool
- stdin=None, # type: t.Optional[t.BinaryIO]
- stdout=None, # type: t.Optional[t.BinaryIO]
+ stdin=None, # type: t.Optional[t.Union[t.IO[bytes], int]]
+ stdout=None, # type: t.Optional[t.Union[t.IO[bytes], int]]
cmd_verbosity=1, # type: int
str_errors='strict', # type: str
error_callback=None, # type: t.Optional[t.Callable[[SubprocessError], None]]
@@ -367,9 +373,6 @@ def common_environment():
# Example configuration for brew on macOS:
# export LDFLAGS="-L$(brew --prefix openssl)/lib/ -L$(brew --prefix libyaml)/lib/"
# export CFLAGS="-I$(brew --prefix openssl)/include/ -I$(brew --prefix libyaml)/include/"
- # However, this is not adequate for PyYAML 3.13, which is the latest version supported on Python 2.6.
- # For that version the standard location must be used, or `pip install` must be invoked with additional options:
- # --global-option=build_ext --global-option=-L{path_to_lib_dir}
'LDFLAGS',
'CFLAGS',
)
@@ -466,7 +469,6 @@ def is_binary_file(path): # type: (str) -> bool
return True
with open_binary_file(path) as path_fd:
- # noinspection PyTypeChecker
return b'\0' in path_fd.read(4096)
@@ -570,7 +572,7 @@ class Display:
self,
message, # type: str
color=None, # type: t.Optional[str]
- fd=sys.stdout, # type: t.TextIO
+ fd=sys.stdout, # type: t.IO[str]
truncate=False, # type: bool
): # type: (...) -> None
"""Display a message."""
@@ -590,9 +592,6 @@ class Display:
message = message.replace(self.clear, color)
message = '%s%s%s' % (color, message, self.clear)
- if sys.version_info[0] == 2:
- message = to_bytes(message)
-
print(message, file=fd)
fd.flush()
@@ -771,23 +770,10 @@ def load_module(path, name): # type: (str, str) -> None
if name in sys.modules:
return
- if sys.version_info >= (3, 4):
- import importlib.util
-
- spec = importlib.util.spec_from_file_location(name, path)
- module = importlib.util.module_from_spec(spec)
- # noinspection PyUnresolvedReferences
- spec.loader.exec_module(module)
-
- sys.modules[name] = module
- else:
- # noinspection PyDeprecation
- import imp # pylint: disable=deprecated-module
-
- # load_source (and thus load_module) require a file opened with `open` in text mode
- with open(to_bytes(path)) as module_file:
- # noinspection PyDeprecation
- imp.load_module(name, module_file, path, ('.py', 'r', imp.PY_SOURCE))
+ spec = importlib.util.spec_from_file_location(name, path)
+ module = importlib.util.module_from_spec(spec)
+ sys.modules[name] = module
+ spec.loader.exec_module(module)
def sanitize_host_name(name):
@@ -795,18 +781,6 @@ def sanitize_host_name(name):
return re.sub('[^A-Za-z0-9]+', '-', name)[:63].strip('-')
-@cache
-def get_host_ip():
- """Return the host's IP address."""
- with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock:
- sock.connect(('10.255.255.255', 22))
- host_ip = get_host_ip.ip = sock.getsockname()[0]
-
- display.info('Detected host IP: %s' % host_ip, verbosity=1)
-
- return host_ip
-
-
def get_generic_type(base_type, generic_base_type): # type: (t.Type, t.Type[TType]) -> t.Optional[t.Type[TType]]
"""Return the generic type arg derived from the generic_base_type type that is associated with the base_type type, if any, otherwise return None."""
# noinspection PyUnresolvedReferences
@@ -840,4 +814,19 @@ def verify_sys_executable(path): # type: (str) -> t.Optional[str]
return expected_executable
+def type_guard(sequence: t.Sequence[t.Any], guard_type: t.Type[C]) -> TypeGuard[t.Sequence[C]]:
+ """
+ Raises an exception if any item in the given sequence does not match the specified guard type.
+ Use with assert so that type checkers are aware of the type guard.
+ """
+ invalid_types = set(type(item) for item in sequence if not isinstance(item, guard_type))
+
+ if not invalid_types:
+ return True
+
+ invalid_type_names = sorted(str(item) for item in invalid_types)
+
+ raise Exception(f'Sequence required to contain only {guard_type} includes: {", ".join(invalid_type_names)}')
+
+
display = Display() # pylint: disable=locally-disabled, invalid-name
diff --git a/test/lib/ansible_test/_internal/util_common.py b/test/lib/ansible_test/_internal/util_common.py
index b4d42420..99d22c2b 100644
--- a/test/lib/ansible_test/_internal/util_common.py
+++ b/test/lib/ansible_test/_internal/util_common.py
@@ -31,7 +31,7 @@ from .util import (
raw_command,
ANSIBLE_TEST_DATA_ROOT,
ANSIBLE_TEST_TARGET_ROOT,
- ANSIBLE_TEST_TOOLS_ROOT,
+ ANSIBLE_TEST_TARGET_TOOLS_ROOT,
ApplicationError,
SubprocessError,
generate_name,
@@ -57,7 +57,7 @@ from .host_configs import (
VirtualPythonConfig,
)
-CHECK_YAML_VERSIONS = {}
+CHECK_YAML_VERSIONS = {} # type: t.Dict[str, t.Any]
class ShellScriptTemplate:
@@ -65,7 +65,7 @@ class ShellScriptTemplate:
def __init__(self, template): # type: (t.Text) -> None
self.template = template
- def substitute(self, **kwargs): # type: (t.Dict[str, t.Union[str, t.List[str]]]) -> str
+ def substitute(self, **kwargs: t.Union[str, t.List[str]]) -> str:
"""Return a string templated with the given arguments."""
kvp = dict((k, self.quote(v)) for k, v in kwargs.items())
pattern = re.compile(r'#{(?P<name>[^}]+)}')
@@ -139,7 +139,7 @@ class CommonConfig:
self.session_name = generate_name()
- self.cache = {}
+ self.cache = {} # type: t.Dict[str, t.Any]
def get_ansible_config(self): # type: () -> str
"""Return the path to the Ansible config for the given config."""
@@ -194,15 +194,8 @@ def process_scoped_temporary_directory(args, prefix='ansible-test-', suffix=None
@contextlib.contextmanager
-def named_temporary_file(args, prefix, suffix, directory, content):
- """
- :param args: CommonConfig
- :param prefix: str
- :param suffix: str
- :param directory: str
- :param content: str | bytes | unicode
- :rtype: str
- """
+def named_temporary_file(args, prefix, suffix, directory, content): # type: (CommonConfig, str, str, t.Optional[str], str) -> t.Iterator[str]
+ """Context manager for a named temporary file."""
if args.explain:
yield os.path.join(directory or '/tmp', '%stemp%s' % (prefix, suffix))
else:
@@ -217,7 +210,7 @@ def write_json_test_results(category, # type: ResultType
name, # type: str
content, # type: t.Union[t.List[t.Any], t.Dict[str, t.Any]]
formatted=True, # type: bool
- encoder=None, # type: t.Optional[t.Callable[[t.Any], t.Any]]
+ encoder=None, # type: t.Optional[t.Type[json.JSONEncoder]]
): # type: (...) -> None
"""Write the given json content to the specified test results path, creating directories as needed."""
path = os.path.join(category.path, name)
@@ -411,8 +404,8 @@ def run_command(
data=None, # type: t.Optional[str]
cwd=None, # type: t.Optional[str]
always=False, # type: bool
- stdin=None, # type: t.Optional[t.BinaryIO]
- stdout=None, # type: t.Optional[t.BinaryIO]
+ stdin=None, # type: t.Optional[t.IO[bytes]]
+ stdout=None, # type: t.Optional[t.IO[bytes]]
cmd_verbosity=1, # type: int
str_errors='strict', # type: str
error_callback=None, # type: t.Optional[t.Callable[[SubprocessError], None]]
@@ -425,7 +418,7 @@ def run_command(
def yamlcheck(python):
"""Return True if PyYAML has libyaml support, False if it does not and None if it was not found."""
- result = json.loads(raw_command([python.path, os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'yamlcheck.py')], capture=True)[0])
+ result = json.loads(raw_command([python.path, os.path.join(ANSIBLE_TEST_TARGET_TOOLS_ROOT, 'yamlcheck.py')], capture=True)[0])
if not result['yaml']:
return None
diff --git a/test/lib/ansible_test/_internal/venv.py b/test/lib/ansible_test/_internal/venv.py
index cf436775..64d8d04c 100644
--- a/test/lib/ansible_test/_internal/venv.py
+++ b/test/lib/ansible_test/_internal/venv.py
@@ -15,7 +15,7 @@ from .util import (
find_python,
SubprocessError,
get_available_python_versions,
- ANSIBLE_TEST_TOOLS_ROOT,
+ ANSIBLE_TEST_TARGET_TOOLS_ROOT,
display,
remove_tree,
ApplicationError,
@@ -118,10 +118,6 @@ def create_virtual_environment(args, # type: EnvironmentConfig
# virtualenv not available for this Python or we were unable to detect the version
continue
- if python.version == '2.6' and virtualenv_version >= (16, 0, 0):
- # virtualenv 16.0.0 dropped python 2.6 support: https://virtualenv.pypa.io/en/latest/changes/#v16-0-0-2018-05-16
- continue
-
# try using 'virtualenv' from another Python to setup the desired version
if run_virtualenv(args, available_python_interpreter, python.path, system_site_packages, pip, path):
display.info('Created Python %s virtual environment using "virtualenv" on Python %s: %s' % (python.version, available_python_version, path),
@@ -176,7 +172,7 @@ def get_python_real_prefix(args, python_path): # type: (EnvironmentConfig, str)
"""
Return the real prefix of the specified interpreter or None if the interpreter is not a virtual environment created by 'virtualenv'.
"""
- cmd = [python_path, os.path.join(os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'virtualenvcheck.py'))]
+ cmd = [python_path, os.path.join(os.path.join(ANSIBLE_TEST_TARGET_TOOLS_ROOT, 'virtualenvcheck.py'))]
check_result = json.loads(run_command(args, cmd, capture=True, always=True)[0])
real_prefix = check_result['real_prefix']
return real_prefix
@@ -205,7 +201,7 @@ def run_venv(args, # type: EnvironmentConfig
remove_tree(path)
if args.verbosity > 1:
- display.error(ex)
+ display.error(ex.message)
return False
@@ -241,7 +237,7 @@ def run_virtualenv(args, # type: EnvironmentConfig
remove_tree(path)
if args.verbosity > 1:
- display.error(ex)
+ display.error(ex.message)
return False
@@ -249,11 +245,11 @@ def run_virtualenv(args, # type: EnvironmentConfig
def get_virtualenv_version(args, python): # type: (EnvironmentConfig, str) -> t.Optional[t.Tuple[int, ...]]
- """Get the virtualenv version for the given python intepreter, if available, otherwise return None."""
+ """Get the virtualenv version for the given python interpreter, if available, otherwise return None."""
try:
- cache = get_virtualenv_version.cache
+ cache = get_virtualenv_version.cache # type: ignore[attr-defined]
except AttributeError:
- cache = get_virtualenv_version.cache = {}
+ cache = get_virtualenv_version.cache = {} # type: ignore[attr-defined]
if python not in cache:
try:
@@ -262,7 +258,7 @@ def get_virtualenv_version(args, python): # type: (EnvironmentConfig, str) -> t
stdout = ''
if args.verbosity > 1:
- display.error(ex)
+ display.error(ex.message)
version = None
diff --git a/test/lib/ansible_test/_util/__init__.py b/test/lib/ansible_test/_util/__init__.py
index d6fc0a86..527d413a 100644
--- a/test/lib/ansible_test/_util/__init__.py
+++ b/test/lib/ansible_test/_util/__init__.py
@@ -1,3 +1,2 @@
-"""Nearly empty __init__.py to allow importing under Python 2.x."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+# Empty __init__.py to allow importing of `ansible_test._util.target.common` under Python 2.x.
+# This allows the ansible-test entry point to report supported Python versions before exiting.
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.py
index e19b4d98..a319d1a1 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.py
@@ -1,6 +1,5 @@
"""Test to verify action plugins have an associated module to provide documentation."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.py
index 1875ab3a..fe2ba5e3 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog/sphinx.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog/sphinx.py
index 000c29e4..7eab0f57 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog/sphinx.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog/sphinx.py
@@ -1,5 +1,4 @@
"""Block the sphinx module from being loaded."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
raise ImportError('The sphinx module has been prevented from loading to maintain consistent test results.')
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.py
index 806c0e6e..e0dd41cd 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.py
index cdad9655..dcb02e7a 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import ast
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.py
index 660b0fce..99417b6e 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.py
index e3fba1f5..21cb0017 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import ast
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.py
index d6d710ae..71883c9f 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.py
index 18a3f6d1..bb564564 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.py
index 7dfd5b26..1b728de6 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.py
index 8925e831..9fe6e9ff 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.py
index 18134154..8a955636 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.py
index 5a267ba0..bf50a4d9 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.py
index 421bbd62..36793f3f 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.py
@@ -1,8 +1,7 @@
# a script to check for illegal filenames on various Operating Systems. The
# main rules are derived from restrictions on Windows
# https://msdn.microsoft.com/en-us/library/aa365247#naming_conventions
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import struct
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.py
index e5abd64d..020c95d6 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.py
index 8399a36e..1ac51710 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.py
@@ -1,6 +1,5 @@
# -*- coding: utf-8 -*-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.py
index bb8c8f01..2a341387 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.py
index 87575f51..8484047c 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py
index 929f371f..74a2b936 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py
@@ -1,6 +1,5 @@
"""Schema validation of ansible-core's ansible_builtin_runtime.yml and collection's meta/runtime.yml"""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import datetime
import os
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.py
index 401af1ae..73b45ac2 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import re
@@ -70,6 +69,10 @@ def main():
is_module = True
elif path == 'test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py':
pass # ansible-test entry point must be executable and have a shebang
+ elif re.search(r'^lib/ansible/cli/[^/]+\.py', path):
+ pass # cli entry points must be executable and have a shebang
+ elif path.startswith('examples/'):
+ continue # examples trigger some false positives due to location
elif path.startswith('lib/') or path.startswith('test/lib/'):
if executable:
print('%s:%d:%d: should not be executable' % (path, 0, 0))
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.py
index 5603051a..4bd9d4bf 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.py
index 68f380b0..0e2fcfa6 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.py
index a8f0b879..d0997484 100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/integration-aliases/yaml_to_json.py b/test/lib/ansible_test/_util/controller/sanity/integration-aliases/yaml_to_json.py
index 74a45f00..af11dd8a 100644
--- a/test/lib/ansible_test/_util/controller/sanity/integration-aliases/yaml_to_json.py
+++ b/test/lib/ansible_test/_util/controller/sanity/integration-aliases/yaml_to_json.py
@@ -1,6 +1,5 @@
"""Read YAML from stdin and write JSON to stdout."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import sys
diff --git a/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-core.ini b/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-core.ini
new file mode 100644
index 00000000..4d93f359
--- /dev/null
+++ b/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-core.ini
@@ -0,0 +1,119 @@
+# IMPORTANT
+# Set "ignore_missing_imports" per package below, rather than globally.
+# That will help identify missing type stubs that should be added to the sanity test environment.
+
+[mypy]
+# There are ~20 errors reported in ansible-core when strict optional checking is enabled.
+# Until the number of occurrences are reduced, it's better to disable strict checking.
+strict_optional = False
+# There are ~70 errors reported in ansible-core when checking attributes.
+# Until the number of occurrences are reduced, it's better to disable the check.
+disable_error_code = attr-defined
+
+[mypy-ansible.module_utils.six.moves.*]
+ignore_missing_imports = True
+
+[mypy-passlib.*]
+ignore_missing_imports = True
+
+[mypy-pexpect.*]
+ignore_missing_imports = True
+
+[mypy-pypsrp.*]
+ignore_missing_imports = True
+
+[mypy-winrm.*]
+ignore_missing_imports = True
+
+[mypy-kerberos.*]
+ignore_missing_imports = True
+
+[mypy-xmltodict.*]
+ignore_missing_imports = True
+
+[mypy-md5.*]
+ignore_missing_imports = True
+
+[mypy-scp.*]
+ignore_missing_imports = True
+
+[mypy-ncclient.*]
+ignore_missing_imports = True
+
+[mypy-lxml.*]
+ignore_missing_imports = True
+
+[mypy-yum.*]
+ignore_missing_imports = True
+
+[mypy-rpmUtils.*]
+ignore_missing_imports = True
+
+[mypy-rpm.*]
+ignore_missing_imports = True
+
+[mypy-psutil.*]
+ignore_missing_imports = True
+
+[mypy-dnf.*]
+ignore_missing_imports = True
+
+[mypy-apt.*]
+ignore_missing_imports = True
+
+[mypy-apt_pkg.*]
+ignore_missing_imports = True
+
+[mypy-gssapi.*]
+ignore_missing_imports = True
+
+[mypy-_ssl.*]
+ignore_missing_imports = True
+
+[mypy-urllib_gssapi.*]
+ignore_missing_imports = True
+
+[mypy-systemd.*]
+ignore_missing_imports = True
+
+[mypy-sha.*]
+ignore_missing_imports = True
+
+[mypy-distro.*]
+ignore_missing_imports = True
+
+[mypy-selectors2.*]
+ignore_missing_imports = True
+
+[mypy-resolvelib.*]
+ignore_missing_imports = True
+
+[mypy-urlparse.*]
+ignore_missing_imports = True
+
+[mypy-argcomplete.*]
+ignore_missing_imports = True
+
+[mypy-selinux.*]
+ignore_missing_imports = True
+
+[mypy-urllib2.*]
+ignore_missing_imports = True
+
+[mypy-httplib.*]
+ignore_missing_imports = True
+
+[mypy-compiler.*]
+ignore_missing_imports = True
+
+[mypy-aptsources.*]
+ignore_missing_imports = True
+
+[mypy-urllib3.*]
+ignore_missing_imports = True
+
+[mypy-requests.*]
+ignore_missing_imports = True
+
+[mypy-jinja2.nativetypes]
+ignore_missing_imports = True
diff --git a/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini b/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini
new file mode 100644
index 00000000..190e9529
--- /dev/null
+++ b/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini
@@ -0,0 +1,24 @@
+# IMPORTANT
+# Set "ignore_missing_imports" per package below, rather than globally.
+# That will help identify missing type stubs that should be added to the sanity test environment.
+
+[mypy]
+# There are ~350 errors reported in ansible-test when strict optional checking is enabled.
+# Until the number of occurrences are greatly reduced, it's better to disable strict checking.
+strict_optional = False
+# There are ~25 errors reported in ansible-test under the 'misc' code.
+# The majority of those errors are "Only concrete class can be given", which is due to a limitation of mypy.
+# See: https://github.com/python/mypy/issues/5374
+disable_error_code = misc
+
+[mypy-argcomplete]
+ignore_missing_imports = True
+
+[mypy-coverage]
+ignore_missing_imports = True
+
+[mypy-ansible_release]
+ignore_missing_imports = True
+
+[mypy-StringIO]
+ignore_missing_imports = True
diff --git a/test/lib/ansible_test/_util/controller/sanity/mypy/modules.ini b/test/lib/ansible_test/_util/controller/sanity/mypy/modules.ini
new file mode 100644
index 00000000..d6a608f6
--- /dev/null
+++ b/test/lib/ansible_test/_util/controller/sanity/mypy/modules.ini
@@ -0,0 +1,98 @@
+# IMPORTANT
+# Set "ignore_missing_imports" per package below, rather than globally.
+# That will help identify missing type stubs that should be added to the sanity test environment.
+
+[mypy]
+
+[mypy-ansible.module_utils.six.moves.*]
+ignore_missing_imports = True
+
+[mypy-pexpect.*]
+ignore_missing_imports = True
+
+[mypy-md5.*]
+ignore_missing_imports = True
+
+[mypy-yum.*]
+ignore_missing_imports = True
+
+[mypy-rpmUtils.*]
+ignore_missing_imports = True
+
+[mypy-rpm.*]
+ignore_missing_imports = True
+
+[mypy-psutil.*]
+ignore_missing_imports = True
+
+[mypy-dnf.*]
+ignore_missing_imports = True
+
+[mypy-apt.*]
+ignore_missing_imports = True
+
+[mypy-apt_pkg.*]
+ignore_missing_imports = True
+
+[mypy-gssapi.*]
+ignore_missing_imports = True
+
+[mypy-_ssl.*]
+ignore_missing_imports = True
+
+[mypy-urllib_gssapi.*]
+ignore_missing_imports = True
+
+[mypy-systemd.*]
+ignore_missing_imports = True
+
+[mypy-sha.*]
+ignore_missing_imports = True
+
+[mypy-distro.*]
+ignore_missing_imports = True
+
+[mypy-selectors2.*]
+ignore_missing_imports = True
+
+[mypy-selinux.*]
+ignore_missing_imports = True
+
+[mypy-urllib2.*]
+ignore_missing_imports = True
+
+[mypy-httplib.*]
+ignore_missing_imports = True
+
+[mypy-compiler.*]
+ignore_missing_imports = True
+
+[mypy-aptsources.*]
+ignore_missing_imports = True
+
+[mypy-urllib3.*]
+ignore_missing_imports = True
+
+[mypy-requests.*]
+ignore_missing_imports = True
+
+[mypy-pkg_resources.*]
+ignore_missing_imports = True
+
+[mypy-urllib.*]
+ignore_missing_imports = True
+
+[mypy-email.*]
+ignore_missing_imports = True
+
+[mypy-selectors.*]
+ignore_missing_imports = True
+
+[mypy-importlib.*]
+ignore_missing_imports = True
+
+[mypy-collections.*]
+ignore_missing_imports = True
+
+[mypy-http.*]
+ignore_missing_imports = True
diff --git a/test/lib/ansible_test/_util/controller/sanity/pslint/pslint.ps1 b/test/lib/ansible_test/_util/controller/sanity/pslint/pslint.ps1
index f9d11d9d..0cf3c7fc 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pslint/pslint.ps1
+++ b/test/lib/ansible_test/_util/controller/sanity/pslint/pslint.ps1
@@ -4,12 +4,6 @@
$ErrorActionPreference = "Stop"
$WarningPreference = "Stop"
-# Until https://github.com/PowerShell/PSScriptAnalyzer/issues/1217 is fixed we need to import Pester if it's
-# available.
-if (Get-Module -Name Pester -ListAvailable -ErrorAction SilentlyContinue) {
- Import-Module -Name Pester
-}
-
$LiteralPathRule = Import-Module -Name PSSA-PSCustomUseLiteralPath -PassThru
$LiteralPathRulePath = Join-Path -Path $LiteralPathRule.ModuleBase -ChildPath $LiteralPathRule.RootModule
@@ -19,22 +13,24 @@ $PSSAParams = @{
Setting = (Join-Path -Path $PSScriptRoot -ChildPath "settings.psd1")
}
-$Results = @(ForEach ($Path in $Args) {
- $Retries = 3
+$Results = @(
+ ForEach ($Path in $Args) {
+ $Retries = 3
- Do {
- Try {
- Invoke-ScriptAnalyzer -Path $Path @PSSAParams 3> $null
- $Retries = 0
- }
- Catch {
- If (--$Retries -le 0) {
- Throw
+ Do {
+ Try {
+ Invoke-ScriptAnalyzer -Path $Path @PSSAParams 3> $null
+ $Retries = 0
+ }
+ Catch {
+ If (--$Retries -le 0) {
+ Throw
+ }
}
}
+ Until ($Retries -le 0)
}
- Until ($Retries -le 0)
-})
+)
# Since pwsh 7.1 results that exceed depth will produce a warning which fails the process.
# Ignore warnings only for this step.
diff --git a/test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1 b/test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1
index 7646ec35..2ae13b4c 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1
+++ b/test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1
@@ -1,5 +1,41 @@
@{
- ExcludeRules=@(
+ Rules = @{
+ PSAvoidLongLines = @{
+ Enable = $true
+ MaximumLineLength = 160
+ }
+ PSPlaceOpenBrace = @{
+ Enable = $true
+ OnSameLine = $true
+ IgnoreOneLineBlock = $true
+ NewLineAfter = $true
+ }
+ PSPlaceCloseBrace = @{
+ Enable = $true
+ IgnoreOneLineBlock = $true
+ NewLineAfter = $true
+ NoEmptyLineBefore = $false
+ }
+ PSUseConsistentIndentation = @{
+ Enable = $true
+ IndentationSize = 4
+ PipelineIndentation = 'IncreaseIndentationForFirstPipeline'
+ Kind = 'space'
+ }
+ PSUseConsistentWhitespace = @{
+ Enable = $true
+ CheckInnerBrace = $true
+ CheckOpenBrace = $true
+ CheckOpenParen = $true
+ CheckOperator = $true
+ CheckPipe = $true
+ CheckPipeForRedundantWhitespace = $false
+ CheckSeparator = $true
+ CheckParameter = $false
+ IgnoreAssignmentOperatorInsideHashTable = $false
+ }
+ }
+ ExcludeRules = @(
'PSUseOutputTypeCorrectly',
'PSUseShouldProcessForStateChangingFunctions',
# We send strings as plaintext so will always come across the 3 issues
@@ -8,6 +44,9 @@
'PSAvoidUsingUserNameAndPassWordParams',
# We send the module as a base64 encoded string and a BOM will cause
# issues here
- 'PSUseBOMForUnicodeEncodedFile'
+ 'PSUseBOMForUnicodeEncodedFile',
+ # Too many false positives, there are many cases where shared utils
+ # invoke user defined code but not all parameters are used.
+ 'PSReviewUnusedParameter'
)
}
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg
index 30e40ba1..7390e041 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg
@@ -1,18 +1,13 @@
[MESSAGES CONTROL]
disable=
- consider-using-dict-comprehension, # requires Python 2.7+, but we still require Python 2.6 support
- consider-using-set-comprehension, # requires Python 2.7+, but we still require Python 2.6 support
cyclic-import, # consistent results require running with --jobs 1 and testing all files
duplicate-code, # consistent results require running with --jobs 1 and testing all files
- import-error, # inconsistent results which depend on the availability of imports
import-outside-toplevel, # common pattern in ansible related code
- no-name-in-module, # inconsistent results which depend on the availability of imports
no-self-use,
raise-missing-from, # Python 2.x does not support raise from
super-with-arguments, # Python 2.x does not support super without arguments
too-few-public-methods,
- too-many-ancestors, # inconsistent results between python 3.6 and 3.7+
too-many-arguments,
too-many-branches,
too-many-instance-attributes,
@@ -52,3 +47,8 @@ function-rgx=[a-z_][a-z0-9_]{1,40}$
preferred-modules =
distutils.version:ansible.module_utils.compat.version,
+
+# These modules are used by ansible-test, but will not be present in the virtual environment running pylint.
+# Listing them here makes it possible to enable the import-error check.
+ignored-modules =
+ py,
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg
index 3c60aa77..76200eb8 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg
@@ -3,9 +3,7 @@
disable=
cyclic-import, # consistent results require running with --jobs 1 and testing all files
duplicate-code, # consistent results require running with --jobs 1 and testing all files
- import-error, # inconsistent results which depend on the availability of imports
import-outside-toplevel, # common pattern in ansible related code
- no-name-in-module, # inconsistent results which depend on the availability of imports
no-self-use,
raise-missing-from, # Python 2.x does not support raise from
too-few-public-methods,
@@ -48,3 +46,10 @@ function-rgx=[a-z_][a-z0-9_]{1,40}$
preferred-modules =
distutils.version:ansible.module_utils.compat.version,
+
+# These modules are used by ansible-test, but will not be present in the virtual environment running pylint.
+# Listing them here makes it possible to enable the import-error check.
+ignored-modules =
+ cryptography,
+ coverage,
+ yamllint,
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg
index a1275aa9..7e978176 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg
@@ -18,12 +18,10 @@ disable=
comparison-with-callable,
consider-iterating-dictionary,
consider-merging-isinstance,
- consider-using-dict-comprehension, # requires Python 2.7+, but we still require Python 2.6 support
consider-using-dict-items,
consider-using-enumerate,
consider-using-get,
consider-using-in,
- consider-using-set-comprehension, # requires Python 2.7+, but we still require Python 2.6 support
consider-using-ternary,
consider-using-with,
cyclic-import, # consistent results require running with --jobs 1 and testing all files
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py
index 234ec217..5f9c90fe 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py
@@ -2,8 +2,7 @@
# (c) 2018, Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# -*- coding: utf-8 -*-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import datetime
import re
@@ -159,7 +158,7 @@ class AnsibleDeprecatedChecker(BaseChecker):
def _check_date(self, node, date):
if not isinstance(date, str):
- self.add_message('invalid-date', node=node, args=(date,))
+ self.add_message('ansible-invalid-deprecated-date', node=node, args=(date,))
return
try:
@@ -173,7 +172,11 @@ class AnsibleDeprecatedChecker(BaseChecker):
def _check_version(self, node, version, collection_name):
if not isinstance(version, (str, float)):
- self.add_message('invalid-version', node=node, args=(version,))
+ if collection_name == 'ansible.builtin':
+ symbol = 'ansible-invalid-deprecated-version'
+ else:
+ symbol = 'collection-invalid-deprecated-version'
+ self.add_message(symbol, node=node, args=(version,))
return
version_no = str(version)
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py
index 3b9a37e5..934a9ae7 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py
@@ -2,8 +2,7 @@
# (c) 2018, Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# -*- coding: utf-8 -*-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import astroid
from pylint.interfaces import IAstroidChecker
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py
index 75a8b57f..6ef5dc2a 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py
@@ -1,7 +1,5 @@
"""A plugin for pylint to identify imports and functions which should not be used."""
-from __future__ import (absolute_import, division, print_function)
-
-__metaclass__ = type
+from __future__ import annotations
import os
import typing as t
diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate-modules b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate-modules
deleted file mode 120000
index 11a5d8e1..00000000
--- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate-modules
+++ /dev/null
@@ -1 +0,0 @@
-main.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate.py
new file mode 100644
index 00000000..ee7e832b
--- /dev/null
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate.py
@@ -0,0 +1,6 @@
+from __future__ import annotations
+
+from validate_modules.main import main
+
+if __name__ == '__main__':
+ main()
diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/__init__.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/__init__.py
index d8ff2dc0..1cfd6ace 100644
--- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/__init__.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/__init__.py
@@ -15,6 +15,4 @@
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-__version__ = '0.0.1b'
+from __future__ import annotations
diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py
index f9eaa02a..0bdd9dee 100644
--- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py
@@ -15,8 +15,7 @@
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import abc
import argparse
@@ -33,6 +32,7 @@ import traceback
import warnings
from collections import OrderedDict
+from collections.abc import Mapping
from contextlib import contextmanager
from fnmatch import fnmatch
@@ -65,8 +65,7 @@ def setup_collection_loader():
setup_collection_loader()
from ansible import __version__ as ansible_version
-from ansible.executor.module_common import REPLACER_WINDOWS
-from ansible.module_utils.common._collections_compat import Mapping
+from ansible.executor.module_common import REPLACER_WINDOWS, NEW_STYLE_PYTHON_MODULE_RE
from ansible.module_utils.common.parameters import DEFAULT_TYPE_VALIDATORS
from ansible.module_utils.compat.version import StrictVersion, LooseVersion
from ansible.module_utils.basic import to_bytes
@@ -122,6 +121,11 @@ OS_CALL_REGEX = re.compile(r'os\.call.*')
LOOSE_ANSIBLE_VERSION = LooseVersion('.'.join(ansible_version.split('.')[:3]))
+PLUGINS_WITH_RETURN_VALUES = ('module', )
+PLUGINS_WITH_EXAMPLES = ('module', )
+PLUGINS_WITH_YAML_EXAMPLES = ('module', )
+
+
def is_potential_secret_option(option_name):
if not NO_LOG_REGEX.search(option_name):
return False
@@ -304,14 +308,15 @@ class ModuleValidator(Validator):
ACCEPTLIST_FUTURE_IMPORTS = frozenset(('absolute_import', 'division', 'print_function'))
def __init__(self, path, analyze_arg_spec=False, collection=None, collection_version=None,
- base_branch=None, git_cache=None, reporter=None, routing=None):
+ base_branch=None, git_cache=None, reporter=None, routing=None, plugin_type='module'):
super(ModuleValidator, self).__init__(reporter=reporter or Reporter())
self.path = path
self.basename = os.path.basename(self.path)
self.name = os.path.splitext(self.basename)[0]
+ self.plugin_type = plugin_type
- self.analyze_arg_spec = analyze_arg_spec
+ self.analyze_arg_spec = analyze_arg_spec and plugin_type == 'module'
self._Version = LooseVersion
self._StrictVersion = StrictVersion
@@ -459,7 +464,15 @@ class ModuleValidator(Validator):
)
return
+ missing_python_interpreter = False
+
if not self.text.startswith('#!/usr/bin/python'):
+ if NEW_STYLE_PYTHON_MODULE_RE.search(to_bytes(self.text)):
+ missing_python_interpreter = self.text.startswith('#!') # shebang optional, but if present must match
+ else:
+ missing_python_interpreter = True # shebang required
+
+ if missing_python_interpreter:
self.reporter.error(
path=self.object_path,
code='missing-python-interpreter',
@@ -907,7 +920,9 @@ class ModuleValidator(Validator):
# We are testing a collection
if self.routing:
- routing_deprecation = self.routing.get('plugin_routing', {}).get('modules', {}).get(self.name, {}).get('deprecation', {})
+ routing_deprecation = self.routing.get('plugin_routing', {})
+ routing_deprecation = routing_deprecation.get('modules' if self.plugin_type == 'module' else self.plugin_type, {})
+ routing_deprecation = routing_deprecation.get(self.name, {}).get('deprecation', {})
if routing_deprecation:
# meta/runtime.yml says this is deprecated
routing_says_deprecated = True
@@ -928,7 +943,8 @@ class ModuleValidator(Validator):
self.name, 'DOCUMENTATION'
)
if doc:
- add_collection_to_versions_and_dates(doc, self.collection_name, is_module=True)
+ add_collection_to_versions_and_dates(doc, self.collection_name,
+ is_module=self.plugin_type == 'module')
for error in errors:
self.reporter.error(
path=self.object_path,
@@ -945,7 +961,8 @@ class ModuleValidator(Validator):
with CaptureStd():
try:
get_docstring(self.path, fragment_loader, verbose=True,
- collection_name=self.collection_name, is_module=True)
+ collection_name=self.collection_name,
+ is_module=self.plugin_type == 'module')
except AssertionError:
fragment = doc['extends_documentation_fragment']
self.reporter.error(
@@ -966,7 +983,8 @@ class ModuleValidator(Validator):
)
if not missing_fragment:
- add_fragments(doc, self.object_path, fragment_loader=fragment_loader, is_module=True)
+ add_fragments(doc, self.object_path, fragment_loader=fragment_loader,
+ is_module=self.plugin_type == 'module')
if 'options' in doc and doc['options'] is None:
self.reporter.error(
@@ -998,6 +1016,7 @@ class ModuleValidator(Validator):
os.readlink(self.object_path).split('.')[0],
for_collection=bool(self.collection),
deprecated_module=deprecated,
+ plugin_type=self.plugin_type,
),
'DOCUMENTATION',
'invalid-documentation',
@@ -1010,6 +1029,7 @@ class ModuleValidator(Validator):
self.object_name.split('.')[0],
for_collection=bool(self.collection),
deprecated_module=deprecated,
+ plugin_type=self.plugin_type,
),
'DOCUMENTATION',
'invalid-documentation',
@@ -1020,12 +1040,13 @@ class ModuleValidator(Validator):
self._check_version_added(doc, existing_doc)
if not bool(doc_info['EXAMPLES']['value']):
- self.reporter.error(
- path=self.object_path,
- code='missing-examples',
- msg='No EXAMPLES provided'
- )
- else:
+ if self.plugin_type in PLUGINS_WITH_EXAMPLES:
+ self.reporter.error(
+ path=self.object_path,
+ code='missing-examples',
+ msg='No EXAMPLES provided'
+ )
+ elif self.plugin_type in PLUGINS_WITH_YAML_EXAMPLES:
_doc, errors, traces = parse_yaml(doc_info['EXAMPLES']['value'],
doc_info['EXAMPLES']['lineno'],
self.name, 'EXAMPLES', load_all=True,
@@ -1043,25 +1064,28 @@ class ModuleValidator(Validator):
)
if not bool(doc_info['RETURN']['value']):
- if self._is_new_module():
- self.reporter.error(
- path=self.object_path,
- code='missing-return',
- msg='No RETURN provided'
- )
- else:
- self.reporter.warning(
- path=self.object_path,
- code='missing-return-legacy',
- msg='No RETURN provided'
- )
+ if self.plugin_type in PLUGINS_WITH_RETURN_VALUES:
+ if self._is_new_module():
+ self.reporter.error(
+ path=self.object_path,
+ code='missing-return',
+ msg='No RETURN provided'
+ )
+ else:
+ self.reporter.warning(
+ path=self.object_path,
+ code='missing-return-legacy',
+ msg='No RETURN provided'
+ )
else:
data, errors, traces = parse_yaml(doc_info['RETURN']['value'],
doc_info['RETURN']['lineno'],
self.name, 'RETURN')
if data:
- add_collection_to_versions_and_dates(data, self.collection_name, is_module=True, return_docs=True)
- self._validate_docs_schema(data, return_schema(for_collection=bool(self.collection)),
+ add_collection_to_versions_and_dates(data, self.collection_name,
+ is_module=self.plugin_type == 'module', return_docs=True)
+ self._validate_docs_schema(data,
+ return_schema(for_collection=bool(self.collection), plugin_type=self.plugin_type),
'RETURN', 'return-syntax-error')
for error in errors:
@@ -1414,7 +1438,8 @@ class ModuleValidator(Validator):
try:
if not context:
- add_fragments(docs, self.object_path, fragment_loader=fragment_loader, is_module=True)
+ add_fragments(docs, self.object_path, fragment_loader=fragment_loader,
+ is_module=self.plugin_type == 'module')
except Exception:
# Cannot merge fragments
return
@@ -1902,16 +1927,6 @@ class ModuleValidator(Validator):
doc_elements = doc_options_arg.get('elements', None)
doc_type = doc_options_arg.get('type', 'str')
data_elements = data.get('elements', None)
- if (doc_elements and not doc_type == 'list'):
- msg = "Argument '%s' " % arg
- if context:
- msg += " found in %s" % " -> ".join(context)
- msg += " defines parameter elements as %s but it is valid only when value of parameter type is list" % doc_elements
- self.reporter.error(
- path=self.object_path,
- code='doc-elements-invalid',
- msg=msg
- )
if (doc_elements or data_elements) and not (doc_elements == data_elements):
msg = "Argument '%s' in argument_spec" % arg
if context:
@@ -1998,7 +2013,8 @@ class ModuleValidator(Validator):
with CaptureStd():
try:
existing_doc, dummy_examples, dummy_return, existing_metadata = get_docstring(
- self.base_module, fragment_loader, verbose=True, collection_name=self.collection_name, is_module=True)
+ self.base_module, fragment_loader, verbose=True, collection_name=self.collection_name,
+ is_module=self.plugin_type == 'module')
existing_options = existing_doc.get('options', {}) or {}
except AssertionError:
fragment = doc['extends_documentation_fragment']
@@ -2200,15 +2216,18 @@ class ModuleValidator(Validator):
pass
if self._python_module() and not self._just_docs() and not end_of_deprecation_should_be_removed_only:
- self._validate_ansible_module_call(docs)
+ if self.plugin_type == 'module':
+ self._validate_ansible_module_call(docs)
self._check_for_sys_exit()
self._find_rejectlist_imports()
- self._find_module_utils()
+ if self.plugin_type == 'module':
+ self._find_module_utils()
self._find_has_import()
first_callable = self._get_first_callable() or 1000000 # use a bogus "high" line number if no callable exists
self._ensure_imports_below_docs(doc_info, first_callable)
- self._check_for_subprocess()
- self._check_for_os_call()
+ if self.plugin_type == 'module':
+ self._check_for_subprocess()
+ self._check_for_os_call()
if self._powershell_module():
if self.basename in self.PS_DOC_REJECTLIST:
@@ -2226,7 +2245,8 @@ class ModuleValidator(Validator):
self._check_gpl3_header()
if not self._just_docs() and not end_of_deprecation_should_be_removed_only:
- self._check_interpreter(powershell=self._powershell_module())
+ if self.plugin_type == 'module':
+ self._check_interpreter(powershell=self._powershell_module())
self._check_type_instead_of_isinstance(
powershell=self._powershell_module()
)
@@ -2281,8 +2301,8 @@ def re_compile(value):
def run():
parser = argparse.ArgumentParser(prog="validate-modules")
- parser.add_argument('modules', nargs='+',
- help='Path to module or module directory')
+ parser.add_argument('plugins', nargs='+',
+ help='Path to module/plugin or module/plugin directory')
parser.add_argument('-w', '--warnings', help='Show warnings',
action='store_true')
parser.add_argument('--exclude', help='RegEx exclusion pattern',
@@ -2304,13 +2324,16 @@ def run():
parser.add_argument('--collection-version',
help='The collection\'s version number used to check '
'deprecations')
+ parser.add_argument('--plugin-type',
+ default='module',
+ help='The plugin type to validate. Defaults to %(default)s')
args = parser.parse_args()
- args.modules = [m.rstrip('/') for m in args.modules]
+ args.plugins = [m.rstrip('/') for m in args.plugins]
reporter = Reporter()
- git_cache = GitCache(args.base_branch)
+ git_cache = GitCache(args.base_branch, args.plugin_type)
check_dirs = set()
@@ -2327,25 +2350,26 @@ def run():
except Exception as ex: # pylint: disable=broad-except
print('%s:%d:%d: YAML load failed: %s' % (routing_file, 0, 0, re.sub(r'\s+', ' ', str(ex))))
- for module in args.modules:
- if os.path.isfile(module):
- path = module
+ for plugin in args.plugins:
+ if os.path.isfile(plugin):
+ path = plugin
if args.exclude and args.exclude.search(path):
continue
if ModuleValidator.is_on_rejectlist(path):
continue
with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version,
analyze_arg_spec=args.arg_spec, base_branch=args.base_branch,
- git_cache=git_cache, reporter=reporter, routing=routing) as mv1:
+ git_cache=git_cache, reporter=reporter, routing=routing,
+ plugin_type=args.plugin_type) as mv1:
mv1.validate()
check_dirs.add(os.path.dirname(path))
- for root, dirs, files in os.walk(module):
- basedir = root[len(module) + 1:].split('/', 1)[0]
+ for root, dirs, files in os.walk(plugin):
+ basedir = root[len(plugin) + 1:].split('/', 1)[0]
if basedir in REJECTLIST_DIRS:
continue
for dirname in dirs:
- if root == module and dirname in REJECTLIST_DIRS:
+ if root == plugin and dirname in REJECTLIST_DIRS:
continue
path = os.path.join(root, dirname)
if args.exclude and args.exclude.search(path):
@@ -2360,10 +2384,11 @@ def run():
continue
with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version,
analyze_arg_spec=args.arg_spec, base_branch=args.base_branch,
- git_cache=git_cache, reporter=reporter, routing=routing) as mv2:
+ git_cache=git_cache, reporter=reporter, routing=routing,
+ plugin_type=args.plugin_type) as mv2:
mv2.validate()
- if not args.collection:
+ if not args.collection and args.plugin_type == 'module':
for path in sorted(check_dirs):
pv = PythonPackageValidator(path, reporter=reporter)
pv.validate()
@@ -2375,16 +2400,21 @@ def run():
class GitCache:
- def __init__(self, base_branch):
+ def __init__(self, base_branch, plugin_type):
self.base_branch = base_branch
+ self.plugin_type = plugin_type
+
+ self.rel_path = 'lib/ansible/modules/'
+ if plugin_type != 'module':
+ self.rel_path = 'lib/ansible/plugins/%s/' % plugin_type
if self.base_branch:
- self.base_tree = self._git(['ls-tree', '-r', '--name-only', self.base_branch, 'lib/ansible/modules/'])
+ self.base_tree = self._git(['ls-tree', '-r', '--name-only', self.base_branch, self.rel_path])
else:
self.base_tree = []
try:
- self.head_tree = self._git(['ls-tree', '-r', '--name-only', 'HEAD', 'lib/ansible/modules/'])
+ self.head_tree = self._git(['ls-tree', '-r', '--name-only', 'HEAD', self.rel_path])
except GitError as ex:
if ex.status == 128:
# fallback when there is no .git directory
@@ -2398,7 +2428,10 @@ class GitCache:
else:
raise
- self.base_module_paths = dict((os.path.basename(p), p) for p in self.base_tree if os.path.splitext(p)[1] in ('.py', '.ps1'))
+ allowed_exts = ('.py', '.ps1')
+ if plugin_type != 'module':
+ allowed_exts = ('.py', )
+ self.base_module_paths = dict((os.path.basename(p), p) for p in self.base_tree if os.path.splitext(p)[1] in allowed_exts)
self.base_module_paths.pop('__init__.py', None)
@@ -2411,11 +2444,10 @@ class GitCache:
if os.path.islink(path):
self.head_aliased_modules.add(os.path.basename(os.path.realpath(path)))
- @staticmethod
- def _get_module_files():
+ def _get_module_files(self):
module_files = []
- for (dir_path, dir_names, file_names) in os.walk('lib/ansible/modules/'):
+ for (dir_path, dir_names, file_names) in os.walk(self.rel_path):
for file_name in file_names:
module_files.append(os.path.join(dir_path, file_name))
diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py
index 3846ee5d..ee938142 100644
--- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py
@@ -15,8 +15,7 @@
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import runpy
import inspect
@@ -117,9 +116,9 @@ def get_ps_argument_spec(filename, collection):
ps_dep_finder._add_module(name=b"Ansible.ModuleUtils.AddType", ext=".psm1", fqn=None, optional=False, wrapper=False)
util_manifest = json.dumps({
- 'module_path': to_text(module_path, errors='surrogiate_or_strict'),
+ 'module_path': to_text(module_path, errors='surrogate_or_strict'),
'ansible_basic': ps_dep_finder.cs_utils_module["Ansible.Basic"]['path'],
- 'ps_utils': dict([(name, info['path']) for name, info in ps_dep_finder.ps_modules.items()]),
+ 'ps_utils': {name: info['path'] for name, info in ps_dep_finder.ps_modules.items()}
})
script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ps_argspec.ps1')
diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/ps_argspec.ps1 b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/ps_argspec.ps1
index fb4a6174..23610e3e 100644
--- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/ps_argspec.ps1
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/ps_argspec.ps1
@@ -14,7 +14,7 @@ Function Resolve-CircularReference {
#>
[CmdletBinding()]
param (
- [Parameter(Mandatory=$true)]
+ [Parameter(Mandatory = $true)]
[System.Collections.IDictionary]
$Hash
)
@@ -23,22 +23,26 @@ Function Resolve-CircularReference {
$value = $Hash[$key]
if ($value -is [System.Collections.IDictionary]) {
Resolve-CircularReference -Hash $value
- } elseif ($value -is [Array] -or $value -is [System.Collections.IList]) {
+ }
+ elseif ($value -is [Array] -or $value -is [System.Collections.IList]) {
$values = @(foreach ($v in $value) {
- if ($v -is [System.Collections.IDictionary]) {
- Resolve-CircularReference -Hash $v
- }
- ,$v
- })
+ if ($v -is [System.Collections.IDictionary]) {
+ Resolve-CircularReference -Hash $v
+ }
+ , $v
+ })
$Hash[$key] = $values
- } elseif ($value -is [DateTime]) {
+ }
+ elseif ($value -is [DateTime]) {
$Hash[$key] = $value.ToString("yyyy-MM-dd")
- } elseif ($value -is [delegate]) {
+ }
+ elseif ($value -is [delegate]) {
# Type can be set to a delegate function which defines it's own type. For the documentation we just
# reflection that as raw
if ($key -eq 'type') {
$Hash[$key] = 'raw'
- } else {
+ }
+ else {
$Hash[$key] = $value.ToString() # Shouldn't ever happen but just in case.
}
}
@@ -81,9 +85,9 @@ if ($manifest.Contains('ps_utils')) {
$util_sb = [ScriptBlock]::Create((Get-Content -LiteralPath $util_path -Raw))
$powershell.AddCommand('New-Module').AddParameters(@{
- Name = $util_name
- ScriptBlock = $util_sb
- }) > $null
+ Name = $util_name
+ ScriptBlock = $util_sb
+ }) > $null
$powershell.AddCommand('Import-Module').AddParameter('WarningAction', 'SilentlyContinue') > $null
$powershell.AddCommand('Out-Null').AddStatement() > $null
diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py
index ed098cbc..67e44b79 100644
--- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py
@@ -3,17 +3,19 @@
# Copyright: (c) 2015, Matt Martz <matt@sivel.net>
# Copyright: (c) 2015, Rackspace US, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
from ansible.module_utils.compat.version import StrictVersion
from functools import partial
+from urllib.parse import urlparse
-from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, Required, Schema, Self, ValueInvalid
+from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, Required, Schema, Self, ValueInvalid, Exclusive
from ansible.module_utils.six import string_types
from ansible.module_utils.common.collections import is_iterable
+from ansible.module_utils.parsing.convert_bool import boolean
+from ansible.parsing.quoting import unquote
from ansible.utils.version import SemanticVersion
from ansible.release import __version__
@@ -46,7 +48,8 @@ def isodate(v, error_code=None):
return v
-COLLECTION_NAME_RE = re.compile(r'^([^.]+(\.[^.]+)+)$')
+COLLECTION_NAME_RE = re.compile(r'^\w+(?:\.\w+)+$')
+FULLY_QUALIFIED_COLLECTION_RESOURCE_RE = re.compile(r'^\w+(?:\.\w+){2,}$')
def collection_name(v, error_code=None):
@@ -77,6 +80,70 @@ def date(error_code=None):
return Any(isodate, error_code=error_code)
+_MODULE = re.compile(r"\bM\(([^)]+)\)")
+_LINK = re.compile(r"\bL\(([^)]+)\)")
+_URL = re.compile(r"\bU\(([^)]+)\)")
+_REF = re.compile(r"\bR\(([^)]+)\)")
+
+
+def _check_module_link(directive, content):
+ if not FULLY_QUALIFIED_COLLECTION_RESOURCE_RE.match(content):
+ raise _add_ansible_error_code(
+ Invalid('Directive "%s" must contain a FQCN' % directive), 'invalid-documentation-markup')
+
+
+def _check_link(directive, content):
+ if ',' not in content:
+ raise _add_ansible_error_code(
+ Invalid('Directive "%s" must contain a comma' % directive), 'invalid-documentation-markup')
+ idx = content.rindex(',')
+ title = content[:idx]
+ url = content[idx + 1:].lstrip(' ')
+ _check_url(directive, url)
+
+
+def _check_url(directive, content):
+ try:
+ parsed_url = urlparse(content)
+ if parsed_url.scheme not in ('', 'http', 'https'):
+ raise ValueError('Schema must be HTTP, HTTPS, or not specified')
+ except ValueError as exc:
+ raise _add_ansible_error_code(
+ Invalid('Directive "%s" must contain an URL' % directive), 'invalid-documentation-markup')
+
+
+def _check_ref(directive, content):
+ if ',' not in content:
+ raise _add_ansible_error_code(
+ Invalid('Directive "%s" must contain a comma' % directive), 'invalid-documentation-markup')
+
+
+def doc_string(v):
+ """Match a documentation string."""
+ if not isinstance(v, string_types):
+ raise _add_ansible_error_code(
+ Invalid('Must be a string'), 'invalid-documentation')
+ for m in _MODULE.finditer(v):
+ _check_module_link(m.group(0), m.group(1))
+ for m in _LINK.finditer(v):
+ _check_link(m.group(0), m.group(1))
+ for m in _URL.finditer(v):
+ _check_url(m.group(0), m.group(1))
+ for m in _REF.finditer(v):
+ _check_ref(m.group(0), m.group(1))
+ return v
+
+
+def doc_string_or_strings(v):
+ """Match a documentation string, or list of strings."""
+ if isinstance(v, string_types):
+ return doc_string(v)
+ if isinstance(v, (list, tuple)):
+ return [doc_string(vv) for vv in v]
+ raise _add_ansible_error_code(
+ Invalid('Must be a string or list of strings'), 'invalid-documentation')
+
+
def is_callable(v):
if not callable(v):
raise ValueInvalid('not a valid value')
@@ -103,16 +170,16 @@ seealso_schema = Schema(
Any(
{
Required('module'): Any(*string_types),
- 'description': Any(*string_types),
+ 'description': doc_string,
},
{
Required('ref'): Any(*string_types),
- Required('description'): Any(*string_types),
+ Required('description'): doc_string,
},
{
Required('name'): Any(*string_types),
Required('link'): Any(*string_types),
- Required('description'): Any(*string_types),
+ Required('description'): doc_string,
},
),
]
@@ -319,47 +386,267 @@ def version_added(v, error_code='version-added-invalid', accept_historical=False
return v
-def list_dict_option_schema(for_collection):
- suboption_schema = Schema(
- {
- Required('description'): Any(list_string_types, *string_types),
- 'required': bool,
- 'choices': list,
- 'aliases': Any(list_string_types),
- 'version_added': version(for_collection),
- 'version_added_collection': collection_name,
- 'default': json_value,
- # Note: Types are strings, not literal bools, such as True or False
- 'type': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'),
- # in case of type='list' elements define type of individual item in list
- 'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'),
- # Recursive suboptions
- 'suboptions': Any(None, *list({str_type: Self} for str_type in string_types)),
- },
- extra=PREVENT_EXTRA
- )
+def check_option_elements(v):
+ # Check whether elements is there iff type == 'list'
+ v_type = v.get('type')
+ v_elements = v.get('elements')
+ if v_type == 'list' and v_elements is None:
+ raise _add_ansible_error_code(
+ Invalid('Argument defines type as list but elements is not defined'),
+ error_code='parameter-list-no-elements') # FIXME: adjust error code?
+ if v_type != 'list' and v_elements is not None:
+ raise _add_ansible_error_code(
+ Invalid('Argument defines parameter elements as %s but it is valid only when value of parameter type is list' % (v_elements, )),
+ error_code='doc-elements-invalid')
+ return v
+
+
+def get_type_checker(v):
+ v_type = v.get('type')
+ if v_type == 'list':
+ elt_checker, elt_name = get_type_checker({'type': v.get('elements')})
+
+ def list_checker(value):
+ if isinstance(value, string_types):
+ value = [unquote(x.strip()) for x in value.split(',')]
+ if not isinstance(value, list):
+ raise ValueError('Value must be a list')
+ if elt_checker:
+ for elt in value:
+ try:
+ elt_checker(elt)
+ except Exception as exc:
+ raise ValueError('Entry %r is not of type %s: %s' % (elt, elt_name, exc))
+
+ return list_checker, ('list of %s' % elt_name) if elt_checker else 'list'
+
+ if v_type in ('boolean', 'bool'):
+ return partial(boolean, strict=False), v_type
+
+ if v_type in ('integer', 'int'):
+ return int, v_type
+
+ if v_type == 'float':
+ return float, v_type
+
+ if v_type == 'none':
+ def none_checker(value):
+ if value not in ('None', None):
+ raise ValueError('Value must be "None" or none')
+
+ return none_checker, v_type
+
+ if v_type in ('str', 'string', 'path', 'tmp', 'temppath', 'tmppath'):
+ def str_checker(value):
+ if not isinstance(value, string_types):
+ raise ValueError('Value must be string')
+
+ return str_checker, v_type
+
+ if v_type in ('pathspec', 'pathlist'):
+ def path_list_checker(value):
+ if not isinstance(value, string_types) and not is_iterable(value):
+ raise ValueError('Value must be string or list of strings')
+
+ return path_list_checker, v_type
+
+ if v_type in ('dict', 'dictionary'):
+ def dict_checker(value):
+ if not isinstance(value, dict):
+ raise ValueError('Value must be dictionary')
+
+ return dict_checker, v_type
+
+ return None, 'unknown'
+
+
+def check_option_choices(v):
+ # Check whether choices have the correct type
+ v_choices = v.get('choices')
+ if not is_iterable(v_choices):
+ return v
+
+ if v.get('type') == 'list':
+ # choices for a list type means that every list element must be one of these choices
+ type_checker, type_name = get_type_checker({'type': v.get('elements')})
+ else:
+ type_checker, type_name = get_type_checker(v)
+ if type_checker is None:
+ return v
+
+ for value in v_choices:
+ try:
+ type_checker(value)
+ except Exception as exc:
+ raise _add_ansible_error_code(
+ Invalid(
+ 'Argument defines choices as (%r) but this is incompatible with argument type %s: %s' % (value, type_name, exc)),
+ error_code='doc-choices-incompatible-type')
+
+ return v
+
+
+def check_option_default(v):
+ # Check whether default is only present if required=False, and whether default has correct type
+ v_default = v.get('default')
+ if v.get('required') and v_default is not None:
+ raise _add_ansible_error_code(
+ Invalid(
+ 'Argument is marked as required but specifies a default.'
+ ' Arguments with a default should not be marked as required'),
+ error_code='no-default-for-required-parameter') # FIXME: adjust error code?
+
+ if v_default is None:
+ return v
+
+ type_checker, type_name = get_type_checker(v)
+ if type_checker is None:
+ return v
+
+ try:
+ type_checker(v_default)
+ except Exception as exc:
+ raise _add_ansible_error_code(
+ Invalid(
+ 'Argument defines default as (%r) but this is incompatible with parameter type %s: %s' % (v_default, type_name, exc)),
+ error_code='incompatible-default-type')
+
+ return v
+
+
+def list_dict_option_schema(for_collection, plugin_type):
+ if plugin_type == 'module':
+ option_types = Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str')
+ element_types = option_types
+ else:
+ option_types = Any(None, 'boolean', 'bool', 'integer', 'int', 'float', 'list', 'dict', 'dictionary', 'none',
+ 'path', 'tmp', 'temppath', 'tmppath', 'pathspec', 'pathlist', 'str', 'string', 'raw')
+ element_types = Any(None, 'boolean', 'bool', 'integer', 'int', 'float', 'list', 'dict', 'dictionary', 'path', 'str', 'string', 'raw')
+
+ basic_option_schema = {
+ Required('description'): doc_string_or_strings,
+ 'required': bool,
+ 'choices': list,
+ 'aliases': Any(list_string_types),
+ 'version_added': version(for_collection),
+ 'version_added_collection': collection_name,
+ 'default': json_value,
+ # Note: Types are strings, not literal bools, such as True or False
+ 'type': option_types,
+ # in case of type='list' elements define type of individual item in list
+ 'elements': element_types,
+ }
+ if plugin_type != 'module':
+ basic_option_schema['name'] = Any(*string_types)
+ deprecated_schema = All(
+ Schema(
+ All(
+ {
+ # This definition makes sure everything has the correct types/values
+ 'why': doc_string,
+ 'alternatives': doc_string,
+ # vod stands for 'version or date'; this is the name of the exclusive group
+ Exclusive('removed_at_date', 'vod'): date(),
+ Exclusive('version', 'vod'): version(for_collection),
+ 'collection_name': collection_name,
+ },
+ {
+ # This definition makes sure that everything we require is there
+ Required('why'): Any(*string_types),
+ 'alternatives': Any(*string_types),
+ Required(Any('removed_at_date', 'version')): Any(*string_types),
+ Required('collection_name'): Any(*string_types),
+ },
+ ),
+ extra=PREVENT_EXTRA
+ ),
+ partial(check_removal_version,
+ version_field='version',
+ collection_name_field='collection_name',
+ error_code='invalid-removal-version'),
+ )
+ env_schema = All(
+ Schema({
+ Required('name'): Any(*string_types),
+ 'deprecated': deprecated_schema,
+ 'version_added': version(for_collection),
+ 'version_added_collection': collection_name,
+ }, extra=PREVENT_EXTRA),
+ partial(version_added, error_code='option-invalid-version-added')
+ )
+ ini_schema = All(
+ Schema({
+ Required('key'): Any(*string_types),
+ Required('section'): Any(*string_types),
+ 'deprecated': deprecated_schema,
+ 'version_added': version(for_collection),
+ 'version_added_collection': collection_name,
+ }, extra=PREVENT_EXTRA),
+ partial(version_added, error_code='option-invalid-version-added')
+ )
+ vars_schema = All(
+ Schema({
+ Required('name'): Any(*string_types),
+ 'deprecated': deprecated_schema,
+ 'version_added': version(for_collection),
+ 'version_added_collection': collection_name,
+ }, extra=PREVENT_EXTRA),
+ partial(version_added, error_code='option-invalid-version-added')
+ )
+ cli_schema = All(
+ Schema({
+ Required('name'): Any(*string_types),
+ 'option': Any(*string_types),
+ 'deprecated': deprecated_schema,
+ 'version_added': version(for_collection),
+ 'version_added_collection': collection_name,
+ }, extra=PREVENT_EXTRA),
+ partial(version_added, error_code='option-invalid-version-added')
+ )
+ keyword_schema = All(
+ Schema({
+ Required('name'): Any(*string_types),
+ 'deprecated': deprecated_schema,
+ 'version_added': version(for_collection),
+ 'version_added_collection': collection_name,
+ }, extra=PREVENT_EXTRA),
+ partial(version_added, error_code='option-invalid-version-added')
+ )
+ basic_option_schema.update({
+ 'env': [env_schema],
+ 'ini': [ini_schema],
+ 'vars': [vars_schema],
+ 'cli': [cli_schema],
+ 'keyword': [keyword_schema],
+ 'deprecated': deprecated_schema,
+ })
+
+ suboption_schema = dict(basic_option_schema)
+ suboption_schema.update({
+ # Recursive suboptions
+ 'suboptions': Any(None, *list({str_type: Self} for str_type in string_types)),
+ })
+ suboption_schema = Schema(All(
+ suboption_schema,
+ check_option_elements,
+ check_option_choices,
+ check_option_default,
+ ), extra=PREVENT_EXTRA)
# This generates list of dicts with keys from string_types and suboption_schema value
# for example in Python 3: {str: suboption_schema}
list_dict_suboption_schema = [{str_type: suboption_schema} for str_type in string_types]
- option_schema = Schema(
- {
- Required('description'): Any(list_string_types, *string_types),
- 'required': bool,
- 'choices': list,
- 'aliases': Any(list_string_types),
- 'version_added': version(for_collection),
- 'version_added_collection': collection_name,
- 'default': json_value,
- 'suboptions': Any(None, *list_dict_suboption_schema),
- # Note: Types are strings, not literal bools, such as True or False
- 'type': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'),
- # in case of type='list' elements define type of individual item in list
- 'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'),
- },
- extra=PREVENT_EXTRA
- )
+ option_schema = dict(basic_option_schema)
+ option_schema.update({
+ 'suboptions': Any(None, *list_dict_suboption_schema),
+ })
+ option_schema = Schema(All(
+ option_schema,
+ check_option_elements,
+ check_option_choices,
+ check_option_default,
+ ), extra=PREVENT_EXTRA)
option_version_added = Schema(
All({
@@ -385,23 +672,38 @@ def return_contains(v):
return v
-def return_schema(for_collection):
+def return_schema(for_collection, plugin_type='module'):
+ if plugin_type == 'module':
+ return_types = Any('bool', 'complex', 'dict', 'float', 'int', 'list', 'str')
+ element_types = Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str')
+ else:
+ return_types = Any(None, 'boolean', 'bool', 'integer', 'int', 'float', 'list', 'dict', 'dictionary', 'path', 'str', 'string', 'raw')
+ element_types = return_types
+
+ basic_return_option_schema = {
+ Required('description'): doc_string_or_strings,
+ 'returned': doc_string,
+ 'version_added': version(for_collection),
+ 'version_added_collection': collection_name,
+ 'sample': json_value,
+ 'example': json_value,
+ # in case of type='list' elements define type of individual item in list
+ 'elements': element_types,
+ 'choices': Any([object], (object,)),
+ }
+ if plugin_type == 'module':
+ # type is only required for modules right now
+ basic_return_option_schema[Required('type')] = return_types
+ else:
+ basic_return_option_schema['type'] = return_types
+
+ inner_return_option_schema = dict(basic_return_option_schema)
+ inner_return_option_schema.update({
+ 'contains': Any(None, *list({str_type: Self} for str_type in string_types)),
+ })
return_contains_schema = Any(
All(
- Schema(
- {
- Required('description'): Any(list_string_types, *string_types),
- 'returned': Any(*string_types), # only returned on top level
- Required('type'): Any('bool', 'complex', 'dict', 'float', 'int', 'list', 'str'),
- 'version_added': version(for_collection),
- 'version_added_collection': collection_name,
- 'sample': json_value,
- 'example': json_value,
- 'contains': Any(None, *list({str_type: Self} for str_type in string_types)),
- # in case of type='list' elements define type of individual item in list
- 'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'),
- }
- ),
+ Schema(inner_return_option_schema),
Schema(return_contains),
Schema(partial(version_added, error_code='option-invalid-version-added')),
),
@@ -412,22 +714,19 @@ def return_schema(for_collection):
# for example in Python 3: {str: return_contains_schema}
list_dict_return_contains_schema = [{str_type: return_contains_schema} for str_type in string_types]
+ return_option_schema = dict(basic_return_option_schema)
+ return_option_schema.update({
+ 'contains': Any(None, *list_dict_return_contains_schema),
+ })
+ if plugin_type == 'module':
+ # 'returned' is required on top-level
+ del return_option_schema['returned']
+ return_option_schema[Required('returned')] = Any(*string_types)
return Any(
All(
Schema(
{
- any_string_types: {
- Required('description'): Any(list_string_types, *string_types),
- Required('returned'): Any(*string_types),
- Required('type'): Any('bool', 'complex', 'dict', 'float', 'int', 'list', 'str'),
- 'version_added': version(for_collection),
- 'version_added_collection': collection_name,
- 'sample': json_value,
- 'example': json_value,
- 'contains': Any(None, *list_dict_return_contains_schema),
- # in case of type='list' elements define type of individual item in list
- 'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'),
- }
+ any_string_types: return_option_schema
}
),
Schema({any_string_types: return_contains}),
@@ -439,8 +738,8 @@ def return_schema(for_collection):
def deprecation_schema(for_collection):
main_fields = {
- Required('why'): Any(*string_types),
- Required('alternative'): Any(*string_types),
+ Required('why'): doc_string,
+ Required('alternative'): doc_string,
Required('removed_from_collection'): collection_name,
'removed': Any(True),
}
@@ -493,24 +792,35 @@ def author(value):
return value
-def doc_schema(module_name, for_collection=False, deprecated_module=False):
+def doc_schema(module_name, for_collection=False, deprecated_module=False, plugin_type='module'):
if module_name.startswith('_'):
module_name = module_name[1:]
deprecated_module = True
+ if for_collection is False and plugin_type == 'connection' and module_name == 'paramiko_ssh':
+ # The plugin loader has a hard-coded exception: when the builtin connection 'paramiko' is
+ # referenced, it loads 'paramiko_ssh' instead. That's why in this plugin, the name must be
+ # 'paramiko' and not 'paramiko_ssh'.
+ module_name = 'paramiko'
doc_schema_dict = {
- Required('module'): module_name,
- Required('short_description'): Any(*string_types),
- Required('description'): Any(list_string_types, *string_types),
- Required('author'): All(Any(None, list_string_types, *string_types), author),
- 'notes': Any(None, list_string_types),
+ Required('module' if plugin_type == 'module' else 'name'): module_name,
+ Required('short_description'): doc_string,
+ Required('description'): doc_string_or_strings,
+ 'notes': Any(None, [doc_string]),
'seealso': Any(None, seealso_schema),
- 'requirements': list_string_types,
- 'todo': Any(None, list_string_types, *string_types),
- 'options': Any(None, *list_dict_option_schema(for_collection)),
+ 'requirements': [doc_string],
+ 'todo': Any(None, doc_string_or_strings),
+ 'options': Any(None, *list_dict_option_schema(for_collection, plugin_type)),
'extends_documentation_fragment': Any(list_string_types, *string_types),
'version_added_collection': collection_name,
}
+ if plugin_type == 'module':
+ doc_schema_dict[Required('author')] = All(Any(None, list_string_types, *string_types), author)
+ else:
+ # author is optional for plugins (for now)
+ doc_schema_dict['author'] = All(Any(None, list_string_types, *string_types), author)
+ if plugin_type == 'callback':
+ doc_schema_dict[Required('type')] = Any('aggregate', 'notification', 'stdout')
if for_collection:
# Optional
@@ -527,8 +837,8 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False):
def add_default_attributes(more=None):
schema = {
- 'description': Any(list_string_types, *string_types),
- 'details': Any(list_string_types, *string_types),
+ 'description': doc_string_or_strings,
+ 'details': doc_string_or_strings,
'support': any_string_types,
'version_added_collection': any_string_types,
'version_added': any_string_types,
@@ -541,9 +851,9 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False):
All(
Schema({
any_string_types: {
- Required('description'): Any(list_string_types, *string_types),
+ Required('description'): doc_string_or_strings,
Required('support'): Any('full', 'partial', 'none', 'N/A'),
- 'details': Any(list_string_types, *string_types),
+ 'details': doc_string_or_strings,
'version_added_collection': collection_name,
'version_added': version(for_collection=for_collection),
},
diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py
index ac46f666..5b20db8d 100644
--- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py
@@ -15,8 +15,7 @@
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import ast
import datetime
diff --git a/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py b/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py
index 7f9df40a..f4b36101 100644
--- a/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py
+++ b/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py
@@ -1,6 +1,5 @@
"""Wrapper around yamllint that supports YAML embedded in Ansible modules."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import ast
import json
diff --git a/test/lib/ansible_test/_util/controller/tools/collection_detail.py b/test/lib/ansible_test/_util/controller/tools/collection_detail.py
index e7c883ca..4ab6631a 100644
--- a/test/lib/ansible_test/_util/controller/tools/collection_detail.py
+++ b/test/lib/ansible_test/_util/controller/tools/collection_detail.py
@@ -1,6 +1,5 @@
"""Retrieve collection detail."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import os
diff --git a/test/lib/ansible_test/_util/controller/tools/coverage_stub.ps1 b/test/lib/ansible_test/_util/controller/tools/coverage_stub.ps1
index 83c27ff7..fcc45703 100644
--- a/test/lib/ansible_test/_util/controller/tools/coverage_stub.ps1
+++ b/test/lib/ansible_test/_util/controller/tools/coverage_stub.ps1
@@ -9,30 +9,32 @@ param (
$Path
)
-$stubInfo = @(foreach ($sourcePath in $Path) {
- # Default is to just no lines for missing files
- [Collections.Generic.HashSet[int]]$lines = @()
+$stubInfo = @(
+ foreach ($sourcePath in $Path) {
+ # Default is to just no lines for missing files
+ [Collections.Generic.HashSet[int]]$lines = @()
- if (Test-Path -LiteralPath $sourcePath) {
- $code = [ScriptBlock]::Create([IO.File]::ReadAllText($sourcePath))
+ if (Test-Path -LiteralPath $sourcePath) {
+ $code = [ScriptBlock]::Create([IO.File]::ReadAllText($sourcePath))
- # We set our breakpoints with this predicate so our stubs should match
- # that logic.
- $predicate = {
- $args[0] -is [System.Management.Automation.Language.CommandBaseAst]
- }
- $cmds = $code.Ast.FindAll($predicate, $true)
+ # We set our breakpoints with this predicate so our stubs should match
+ # that logic.
+ $predicate = {
+ $args[0] -is [System.Management.Automation.Language.CommandBaseAst]
+ }
+ $cmds = $code.Ast.FindAll($predicate, $true)
- # We only care about unique lines not multiple commands on 1 line.
- $lines = @(foreach ($cmd in $cmds) {
- $cmd.Extent.StartLineNumber
- })
- }
+ # We only care about unique lines not multiple commands on 1 line.
+ $lines = @(foreach ($cmd in $cmds) {
+ $cmd.Extent.StartLineNumber
+ })
+ }
- [PSCustomObject]@{
- Path = $sourcePath
- Lines = $lines
+ [PSCustomObject]@{
+ Path = $sourcePath
+ Lines = $lines
+ }
}
-})
+)
ConvertTo-Json -InputObject $stubInfo -Depth 2 -Compress
diff --git a/test/lib/ansible_test/_util/controller/tools/sslcheck.py b/test/lib/ansible_test/_util/controller/tools/sslcheck.py
index 115c5ed2..c25fed61 100644
--- a/test/lib/ansible_test/_util/controller/tools/sslcheck.py
+++ b/test/lib/ansible_test/_util/controller/tools/sslcheck.py
@@ -1,6 +1,5 @@
"""Show openssl version."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
diff --git a/test/lib/ansible_test/_util/controller/tools/yaml_to_json.py b/test/lib/ansible_test/_util/controller/tools/yaml_to_json.py
index 1164168e..e2a15bf0 100644
--- a/test/lib/ansible_test/_util/controller/tools/yaml_to_json.py
+++ b/test/lib/ansible_test/_util/controller/tools/yaml_to_json.py
@@ -1,6 +1,5 @@
"""Read YAML from stdin and write JSON to stdout."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import datetime
import json
diff --git a/test/lib/ansible_test/_util/target/__init__.py b/test/lib/ansible_test/_util/target/__init__.py
index d6fc0a86..527d413a 100644
--- a/test/lib/ansible_test/_util/target/__init__.py
+++ b/test/lib/ansible_test/_util/target/__init__.py
@@ -1,3 +1,2 @@
-"""Nearly empty __init__.py to allow importing under Python 2.x."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+# Empty __init__.py to allow importing of `ansible_test._util.target.common` under Python 2.x.
+# This allows the ansible-test entry point to report supported Python versions before exiting.
diff --git a/test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py b/test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py
index dc31095a..95209493 100755
--- a/test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py
+++ b/test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py
@@ -11,7 +11,7 @@ import os
import sys
-def main():
+def main(args=None):
"""Main program entry point."""
ansible_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
source_root = os.path.join(ansible_root, 'test', 'lib')
@@ -30,7 +30,7 @@ def main():
# noinspection PyProtectedMember
from ansible_test._internal import main as cli_main
- cli_main()
+ cli_main(args)
def version_to_str(version):
diff --git a/test/lib/ansible_test/_util/target/common/__init__.py b/test/lib/ansible_test/_util/target/common/__init__.py
index d6fc0a86..527d413a 100644
--- a/test/lib/ansible_test/_util/target/common/__init__.py
+++ b/test/lib/ansible_test/_util/target/common/__init__.py
@@ -1,3 +1,2 @@
-"""Nearly empty __init__.py to allow importing under Python 2.x."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+# Empty __init__.py to allow importing of `ansible_test._util.target.common` under Python 2.x.
+# This allows the ansible-test entry point to report supported Python versions before exiting.
diff --git a/test/lib/ansible_test/_util/target/common/constants.py b/test/lib/ansible_test/_util/target/common/constants.py
index fdaa9e5f..4d42978e 100644
--- a/test/lib/ansible_test/_util/target/common/constants.py
+++ b/test/lib/ansible_test/_util/target/common/constants.py
@@ -6,7 +6,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
REMOTE_ONLY_PYTHON_VERSIONS = (
- '2.6',
'2.7',
'3.5',
'3.6',
diff --git a/test/lib/ansible_test/_util/target/legacy_collection_loader/__init__.py b/test/lib/ansible_test/_util/target/legacy_collection_loader/__init__.py
deleted file mode 100644
index 21c49c47..00000000
--- a/test/lib/ansible_test/_util/target/legacy_collection_loader/__init__.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# (c) 2019 Ansible Project
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-# CAUTION: There are two implementations of the collection loader.
-# They must be kept functionally identical, although their implementations may differ.
-#
-# 1) The controller implementation resides in the "lib/ansible/utils/collection_loader/" directory.
-# It must function on all Python versions supported on the controller.
-# 2) The ansible-test implementation resides in the "test/lib/ansible_test/_util/target/legacy_collection_loader/" directory.
-# It must function on all Python versions supported on managed hosts which are not supported by the controller.
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-# FIXME: decide what of this we want to actually be public/toplevel, put other stuff on a utility class?
-from ._collection_config import AnsibleCollectionConfig
-from ._collection_finder import AnsibleCollectionRef
-from ansible.module_utils.common.text.converters import to_text
-
-
-def resource_from_fqcr(ref):
- """
- Return resource from a fully-qualified collection reference,
- or from a simple resource name.
- For fully-qualified collection references, this is equivalent to
- ``AnsibleCollectionRef.from_fqcr(ref).resource``.
- :param ref: collection reference to parse
- :return: the resource as a unicode string
- """
- ref = to_text(ref, errors='strict')
- return ref.split(u'.')[-1]
diff --git a/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_config.py b/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_config.py
deleted file mode 100644
index a2031931..00000000
--- a/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_config.py
+++ /dev/null
@@ -1,107 +0,0 @@
-# (c) 2019 Ansible Project
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-# CAUTION: There are two implementations of the collection loader.
-# They must be kept functionally identical, although their implementations may differ.
-#
-# 1) The controller implementation resides in the "lib/ansible/utils/collection_loader/" directory.
-# It must function on all Python versions supported on the controller.
-# 2) The ansible-test implementation resides in the "test/lib/ansible_test/_util/target/legacy_collection_loader/" directory.
-# It must function on all Python versions supported on managed hosts which are not supported by the controller.
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from ansible.module_utils.common.text.converters import to_text
-from ansible.module_utils.six import with_metaclass
-
-
-class _EventSource:
- def __init__(self):
- self._handlers = set()
-
- def __iadd__(self, handler):
- if not callable(handler):
- raise ValueError('handler must be callable')
- self._handlers.add(handler)
- return self
-
- def __isub__(self, handler):
- try:
- self._handlers.remove(handler)
- except KeyError:
- pass
-
- return self
-
- def _on_exception(self, handler, exc, *args, **kwargs):
- # if we return True, we want the caller to re-raise
- return True
-
- def fire(self, *args, **kwargs):
- for h in self._handlers:
- try:
- h(*args, **kwargs)
- except Exception as ex:
- if self._on_exception(h, ex, *args, **kwargs):
- raise
-
-
-class _AnsibleCollectionConfig(type):
- def __init__(cls, meta, name, bases):
- cls._collection_finder = None
- cls._default_collection = None
- cls._on_collection_load = _EventSource()
-
- @property
- def collection_finder(cls):
- return cls._collection_finder
-
- @collection_finder.setter
- def collection_finder(cls, value):
- if cls._collection_finder:
- raise ValueError('an AnsibleCollectionFinder has already been configured')
-
- cls._collection_finder = value
-
- @property
- def collection_paths(cls):
- cls._require_finder()
- return [to_text(p) for p in cls._collection_finder._n_collection_paths]
-
- @property
- def default_collection(cls):
- return cls._default_collection
-
- @default_collection.setter
- def default_collection(cls, value):
-
- cls._default_collection = value
-
- @property
- def on_collection_load(cls):
- return cls._on_collection_load
-
- @on_collection_load.setter
- def on_collection_load(cls, value):
- if value is not cls._on_collection_load:
- raise ValueError('on_collection_load is not directly settable (use +=)')
-
- @property
- def playbook_paths(cls):
- cls._require_finder()
- return [to_text(p) for p in cls._collection_finder._n_playbook_paths]
-
- @playbook_paths.setter
- def playbook_paths(cls, value):
- cls._require_finder()
- cls._collection_finder.set_playbook_paths(value)
-
- def _require_finder(cls):
- if not cls._collection_finder:
- raise NotImplementedError('an AnsibleCollectionFinder has not been installed in this process')
-
-
-# concrete class of our metaclass type that defines the class properties we want
-class AnsibleCollectionConfig(with_metaclass(_AnsibleCollectionConfig)):
- pass
diff --git a/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_finder.py b/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_finder.py
deleted file mode 100644
index 8b4b1b98..00000000
--- a/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_finder.py
+++ /dev/null
@@ -1,1067 +0,0 @@
-# (c) 2019 Ansible Project
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-# CAUTION: There are two implementations of the collection loader.
-# They must be kept functionally identical, although their implementations may differ.
-#
-# 1) The controller implementation resides in the "lib/ansible/utils/collection_loader/" directory.
-# It must function on all Python versions supported on the controller.
-# 2) The ansible-test implementation resides in the "test/lib/ansible_test/_util/target/legacy_collection_loader/" directory.
-# It must function on all Python versions supported on managed hosts which are not supported by the controller.
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-import os.path
-import pkgutil
-import re
-import sys
-from keyword import iskeyword
-from tokenize import Name as _VALID_IDENTIFIER_REGEX
-
-
-# DO NOT add new non-stdlib import deps here, this loader is used by external tools (eg ansible-test import sanity)
-# that only allow stdlib and module_utils
-from ansible.module_utils.common.text.converters import to_native, to_text, to_bytes
-from ansible.module_utils.six import string_types, PY3
-from ._collection_config import AnsibleCollectionConfig
-
-from contextlib import contextmanager
-from types import ModuleType
-
-try:
- from importlib import import_module
-except ImportError:
- def import_module(name):
- __import__(name)
- return sys.modules[name]
-
-try:
- from importlib import reload as reload_module
-except ImportError:
- # 2.7 has a global reload function instead...
- reload_module = reload # pylint:disable=undefined-variable
-
-# NB: this supports import sanity test providing a different impl
-try:
- from ._collection_meta import _meta_yml_to_dict
-except ImportError:
- _meta_yml_to_dict = None
-
-
-if not hasattr(__builtins__, 'ModuleNotFoundError'):
- # this was introduced in Python 3.6
- ModuleNotFoundError = ImportError
-
-
-_VALID_IDENTIFIER_STRING_REGEX = re.compile(
- ''.join((_VALID_IDENTIFIER_REGEX, r'\Z')),
-)
-
-
-try: # NOTE: py3/py2 compat
- # py2 mypy can't deal with try/excepts
- is_python_identifier = str.isidentifier # type: ignore[attr-defined]
-except AttributeError: # Python 2
- def is_python_identifier(tested_str): # type: (str) -> bool
- """Determine whether the given string is a Python identifier."""
- # Ref: https://stackoverflow.com/a/55802320/595220
- return bool(re.match(_VALID_IDENTIFIER_STRING_REGEX, tested_str))
-
-
-PB_EXTENSIONS = ('.yml', '.yaml')
-
-
-class _AnsibleCollectionFinder:
- def __init__(self, paths=None, scan_sys_paths=True):
- # TODO: accept metadata loader override
- self._ansible_pkg_path = to_native(os.path.dirname(to_bytes(sys.modules['ansible'].__file__)))
-
- if isinstance(paths, string_types):
- paths = [paths]
- elif paths is None:
- paths = []
-
- # expand any placeholders in configured paths
- paths = [os.path.expanduser(to_native(p, errors='surrogate_or_strict')) for p in paths]
-
- # add syspaths if needed
- if scan_sys_paths:
- paths.extend(sys.path)
-
- good_paths = []
- # expand any placeholders in configured paths
- for p in paths:
-
- # ensure we always have ansible_collections
- if os.path.basename(p) == 'ansible_collections':
- p = os.path.dirname(p)
-
- if p not in good_paths and os.path.isdir(to_bytes(os.path.join(p, 'ansible_collections'), errors='surrogate_or_strict')):
- good_paths.append(p)
-
- self._n_configured_paths = good_paths
- self._n_cached_collection_paths = None
- self._n_cached_collection_qualified_paths = None
-
- self._n_playbook_paths = []
-
- @classmethod
- def _remove(cls):
- for mps in sys.meta_path:
- if isinstance(mps, _AnsibleCollectionFinder):
- sys.meta_path.remove(mps)
-
- # remove any path hooks that look like ours
- for ph in sys.path_hooks:
- if hasattr(ph, '__self__') and isinstance(ph.__self__, _AnsibleCollectionFinder):
- sys.path_hooks.remove(ph)
-
- # zap any cached path importer cache entries that might refer to us
- sys.path_importer_cache.clear()
-
- AnsibleCollectionConfig._collection_finder = None
-
- # validate via the public property that we really killed it
- if AnsibleCollectionConfig.collection_finder is not None:
- raise AssertionError('_AnsibleCollectionFinder remove did not reset AnsibleCollectionConfig.collection_finder')
-
- def _install(self):
- self._remove()
- sys.meta_path.insert(0, self)
-
- sys.path_hooks.insert(0, self._ansible_collection_path_hook)
-
- AnsibleCollectionConfig.collection_finder = self
-
- def _ansible_collection_path_hook(self, path):
- path = to_native(path)
- interesting_paths = self._n_cached_collection_qualified_paths
- if not interesting_paths:
- interesting_paths = []
- for p in self._n_collection_paths:
- if os.path.basename(p) != 'ansible_collections':
- p = os.path.join(p, 'ansible_collections')
-
- if p not in interesting_paths:
- interesting_paths.append(p)
-
- interesting_paths.insert(0, self._ansible_pkg_path)
- self._n_cached_collection_qualified_paths = interesting_paths
-
- if any(path.startswith(p) for p in interesting_paths):
- return _AnsiblePathHookFinder(self, path)
-
- raise ImportError('not interested')
-
- @property
- def _n_collection_paths(self):
- paths = self._n_cached_collection_paths
- if not paths:
- self._n_cached_collection_paths = paths = self._n_playbook_paths + self._n_configured_paths
- return paths
-
- def set_playbook_paths(self, playbook_paths):
- if isinstance(playbook_paths, string_types):
- playbook_paths = [playbook_paths]
-
- # track visited paths; we have to preserve the dir order as-passed in case there are duplicate collections (first one wins)
- added_paths = set()
-
- # de-dupe
- self._n_playbook_paths = [os.path.join(to_native(p), 'collections') for p in playbook_paths if not (p in added_paths or added_paths.add(p))]
- self._n_cached_collection_paths = None
- # HACK: playbook CLI sets this relatively late, so we've already loaded some packages whose paths might depend on this. Fix those up.
- # NB: this should NOT be used for late additions; ideally we'd fix the playbook dir setup earlier in Ansible init
- # to prevent this from occurring
- for pkg in ['ansible_collections', 'ansible_collections.ansible']:
- self._reload_hack(pkg)
-
- def _reload_hack(self, fullname):
- m = sys.modules.get(fullname)
- if not m:
- return
- reload_module(m)
-
- def find_module(self, fullname, path=None):
- # Figure out what's being asked for, and delegate to a special-purpose loader
-
- split_name = fullname.split('.')
- toplevel_pkg = split_name[0]
- module_to_find = split_name[-1]
- part_count = len(split_name)
-
- if toplevel_pkg not in ['ansible', 'ansible_collections']:
- # not interested in anything other than ansible_collections (and limited cases under ansible)
- return None
-
- # sanity check what we're getting from import, canonicalize path values
- if part_count == 1:
- if path:
- raise ValueError('path should not be specified for top-level packages (trying to find {0})'.format(fullname))
- else:
- # seed the path to the configured collection roots
- path = self._n_collection_paths
-
- if part_count > 1 and path is None:
- raise ValueError('path must be specified for subpackages (trying to find {0})'.format(fullname))
-
- # NB: actual "find"ing is delegated to the constructors on the various loaders; they'll ImportError if not found
- try:
- if toplevel_pkg == 'ansible':
- # something under the ansible package, delegate to our internal loader in case of redirections
- return _AnsibleInternalRedirectLoader(fullname=fullname, path_list=path)
- if part_count == 1:
- return _AnsibleCollectionRootPkgLoader(fullname=fullname, path_list=path)
- if part_count == 2: # ns pkg eg, ansible_collections, ansible_collections.somens
- return _AnsibleCollectionNSPkgLoader(fullname=fullname, path_list=path)
- elif part_count == 3: # collection pkg eg, ansible_collections.somens.somecoll
- return _AnsibleCollectionPkgLoader(fullname=fullname, path_list=path)
- # anything below the collection
- return _AnsibleCollectionLoader(fullname=fullname, path_list=path)
- except ImportError:
- # TODO: log attempt to load context
- return None
-
-
-# Implements a path_hook finder for iter_modules (since it's only path based). This finder does not need to actually
-# function as a finder in most cases, since our meta_path finder is consulted first for *almost* everything, except
-# pkgutil.iter_modules, and under py2, pkgutil.get_data if the parent package passed has not been loaded yet.
-class _AnsiblePathHookFinder:
- def __init__(self, collection_finder, pathctx):
- # when called from a path_hook, find_module doesn't usually get the path arg, so this provides our context
- self._pathctx = to_native(pathctx)
- self._collection_finder = collection_finder
- if PY3:
- # cache the native FileFinder (take advantage of its filesystem cache for future find/load requests)
- self._file_finder = None
-
- # class init is fun- this method has a self arg that won't get used
- def _get_filefinder_path_hook(self=None):
- _file_finder_hook = None
- if PY3:
- # try to find the FileFinder hook to call for fallback path-based imports in Py3
- _file_finder_hook = [ph for ph in sys.path_hooks if 'FileFinder' in repr(ph)]
- if len(_file_finder_hook) != 1:
- raise Exception('need exactly one FileFinder import hook (found {0})'.format(len(_file_finder_hook)))
- _file_finder_hook = _file_finder_hook[0]
-
- return _file_finder_hook
-
- _filefinder_path_hook = _get_filefinder_path_hook()
-
- def find_module(self, fullname, path=None):
- # we ignore the passed in path here- use what we got from the path hook init
- split_name = fullname.split('.')
- toplevel_pkg = split_name[0]
-
- if toplevel_pkg == 'ansible_collections':
- # collections content? delegate to the collection finder
- return self._collection_finder.find_module(fullname, path=[self._pathctx])
- else:
- # Something else; we'd normally restrict this to `ansible` descendent modules so that any weird loader
- # behavior that arbitrary Python modules have can be serviced by those loaders. In some dev/test
- # scenarios (eg a venv under a collection) our path_hook signs us up to load non-Ansible things, and
- # it's too late by the time we've reached this point, but also too expensive for the path_hook to figure
- # out what we *shouldn't* be loading with the limited info it has. So we'll just delegate to the
- # normal path-based loader as best we can to service it. This also allows us to take advantage of Python's
- # built-in FS caching and byte-compilation for most things.
- if PY3:
- # create or consult our cached file finder for this path
- if not self._file_finder:
- try:
- self._file_finder = _AnsiblePathHookFinder._filefinder_path_hook(self._pathctx)
- except ImportError:
- # FUTURE: log at a high logging level? This is normal for things like python36.zip on the path, but
- # might not be in some other situation...
- return None
-
- spec = self._file_finder.find_spec(fullname)
- if not spec:
- return None
- return spec.loader
- else:
- # call py2's internal loader
- # noinspection PyDeprecation
- return pkgutil.ImpImporter(self._pathctx).find_module(fullname) # pylint: disable=deprecated-class
-
- def iter_modules(self, prefix):
- # NB: this currently represents only what's on disk, and does not handle package redirection
- return _iter_modules_impl([self._pathctx], prefix)
-
- def __repr__(self):
- return "{0}(path='{1}')".format(self.__class__.__name__, self._pathctx)
-
-
-class _AnsibleCollectionPkgLoaderBase:
- _allows_package_code = False
-
- def __init__(self, fullname, path_list=None):
- self._fullname = fullname
- self._redirect_module = None
- self._split_name = fullname.split('.')
- self._rpart_name = fullname.rpartition('.')
- self._parent_package_name = self._rpart_name[0] # eg ansible_collections for ansible_collections.somens, '' for toplevel
- self._package_to_load = self._rpart_name[2] # eg somens for ansible_collections.somens
-
- self._source_code_path = None
- self._decoded_source = None
- self._compiled_code = None
-
- self._validate_args()
-
- self._candidate_paths = self._get_candidate_paths([to_native(p) for p in path_list])
- self._subpackage_search_paths = self._get_subpackage_search_paths(self._candidate_paths)
-
- self._validate_final()
-
- # allow subclasses to validate args and sniff split values before we start digging around
- def _validate_args(self):
- if self._split_name[0] != 'ansible_collections':
- raise ImportError('this loader can only load packages from the ansible_collections package, not {0}'.format(self._fullname))
-
- # allow subclasses to customize candidate path filtering
- def _get_candidate_paths(self, path_list):
- return [os.path.join(p, self._package_to_load) for p in path_list]
-
- # allow subclasses to customize finding paths
- def _get_subpackage_search_paths(self, candidate_paths):
- # filter candidate paths for existence (NB: silently ignoring package init code and same-named modules)
- return [p for p in candidate_paths if os.path.isdir(to_bytes(p))]
-
- # allow subclasses to customize state validation/manipulation before we return the loader instance
- def _validate_final(self):
- return
-
- @staticmethod
- @contextmanager
- def _new_or_existing_module(name, **kwargs):
- # handle all-or-nothing sys.modules creation/use-existing/delete-on-exception-if-created behavior
- created_module = False
- module = sys.modules.get(name)
- try:
- if not module:
- module = ModuleType(name)
- created_module = True
- sys.modules[name] = module
- # always override the values passed, except name (allow reference aliasing)
- for attr, value in kwargs.items():
- setattr(module, attr, value)
- yield module
- except Exception:
- if created_module:
- if sys.modules.get(name):
- sys.modules.pop(name)
- raise
-
- # basic module/package location support
- # NB: this does not support distributed packages!
- @staticmethod
- def _module_file_from_path(leaf_name, path):
- has_code = True
- package_path = os.path.join(to_native(path), to_native(leaf_name))
- module_path = None
-
- # if the submodule is a package, assemble valid submodule paths, but stop looking for a module
- if os.path.isdir(to_bytes(package_path)):
- # is there a package init?
- module_path = os.path.join(package_path, '__init__.py')
- if not os.path.isfile(to_bytes(module_path)):
- module_path = os.path.join(package_path, '__synthetic__')
- has_code = False
- else:
- module_path = package_path + '.py'
- package_path = None
- if not os.path.isfile(to_bytes(module_path)):
- raise ImportError('{0} not found at {1}'.format(leaf_name, path))
-
- return module_path, has_code, package_path
-
- def load_module(self, fullname):
- # short-circuit redirect; we've already imported the redirected module, so just alias it and return it
- if self._redirect_module:
- sys.modules[self._fullname] = self._redirect_module
- return self._redirect_module
-
- # we're actually loading a module/package
- module_attrs = dict(
- __loader__=self,
- __file__=self.get_filename(fullname),
- __package__=self._parent_package_name # sane default for non-packages
- )
-
- # eg, I am a package
- if self._subpackage_search_paths is not None: # empty is legal
- module_attrs['__path__'] = self._subpackage_search_paths
- module_attrs['__package__'] = fullname # per PEP366
-
- with self._new_or_existing_module(fullname, **module_attrs) as module:
- # execute the module's code in its namespace
- code_obj = self.get_code(fullname)
- if code_obj is not None: # things like NS packages that can't have code on disk will return None
- exec(code_obj, module.__dict__)
-
- return module
-
- def is_package(self, fullname):
- if fullname != self._fullname:
- raise ValueError('this loader cannot answer is_package for {0}, only {1}'.format(fullname, self._fullname))
- return self._subpackage_search_paths is not None
-
- def get_source(self, fullname):
- if self._decoded_source:
- return self._decoded_source
- if fullname != self._fullname:
- raise ValueError('this loader cannot load source for {0}, only {1}'.format(fullname, self._fullname))
- if not self._source_code_path:
- return None
- # FIXME: what do we want encoding/newline requirements to be?
- self._decoded_source = self.get_data(self._source_code_path)
- return self._decoded_source
-
- def get_data(self, path):
- if not path:
- raise ValueError('a path must be specified')
-
- # TODO: ensure we're being asked for a path below something we own
- # TODO: try to handle redirects internally?
-
- if not path[0] == '/':
- # relative to current package, search package paths if possible (this may not be necessary)
- # candidate_paths = [os.path.join(ssp, path) for ssp in self._subpackage_search_paths]
- raise ValueError('relative resource paths not supported')
- else:
- candidate_paths = [path]
-
- for p in candidate_paths:
- b_path = to_bytes(p)
- if os.path.isfile(b_path):
- with open(b_path, 'rb') as fd:
- return fd.read()
- # HACK: if caller asks for __init__.py and the parent dir exists, return empty string (this keep consistency
- # with "collection subpackages don't require __init__.py" working everywhere with get_data
- elif b_path.endswith(b'__init__.py') and os.path.isdir(os.path.dirname(b_path)):
- return ''
-
- return None
-
- def _synthetic_filename(self, fullname):
- return '<ansible_synthetic_collection_package>'
-
- def get_filename(self, fullname):
- if fullname != self._fullname:
- raise ValueError('this loader cannot find files for {0}, only {1}'.format(fullname, self._fullname))
-
- filename = self._source_code_path
-
- if not filename and self.is_package(fullname):
- if len(self._subpackage_search_paths) == 1:
- filename = os.path.join(self._subpackage_search_paths[0], '__synthetic__')
- else:
- filename = self._synthetic_filename(fullname)
-
- return filename
-
- def get_code(self, fullname):
- if self._compiled_code:
- return self._compiled_code
-
- # this may or may not be an actual filename, but it's the value we'll use for __file__
- filename = self.get_filename(fullname)
- if not filename:
- filename = '<string>'
-
- source_code = self.get_source(fullname)
-
- # for things like synthetic modules that really have no source on disk, don't return a code object at all
- # vs things like an empty package init (which has an empty string source on disk)
- if source_code is None:
- return None
-
- self._compiled_code = compile(source=source_code, filename=filename, mode='exec', flags=0, dont_inherit=True)
-
- return self._compiled_code
-
- def iter_modules(self, prefix):
- return _iter_modules_impl(self._subpackage_search_paths, prefix)
-
- def __repr__(self):
- return '{0}(path={1})'.format(self.__class__.__name__, self._subpackage_search_paths or self._source_code_path)
-
-
-class _AnsibleCollectionRootPkgLoader(_AnsibleCollectionPkgLoaderBase):
- def _validate_args(self):
- super(_AnsibleCollectionRootPkgLoader, self)._validate_args()
- if len(self._split_name) != 1:
- raise ImportError('this loader can only load the ansible_collections toplevel package, not {0}'.format(self._fullname))
-
-
-# Implements Ansible's custom namespace package support.
-# The ansible_collections package and one level down (collections namespaces) are Python namespace packages
-# that search across all configured collection roots. The collection package (two levels down) is the first one found
-# on the configured collection root path, and Python namespace package aggregation is not allowed at or below
-# the collection. Implements implicit package (package dir) support for both Py2/3. Package init code is ignored
-# by this loader.
-class _AnsibleCollectionNSPkgLoader(_AnsibleCollectionPkgLoaderBase):
- def _validate_args(self):
- super(_AnsibleCollectionNSPkgLoader, self)._validate_args()
- if len(self._split_name) != 2:
- raise ImportError('this loader can only load collections namespace packages, not {0}'.format(self._fullname))
-
- def _validate_final(self):
- # special-case the `ansible` namespace, since `ansible.builtin` is magical
- if not self._subpackage_search_paths and self._package_to_load != 'ansible':
- raise ImportError('no {0} found in {1}'.format(self._package_to_load, self._candidate_paths))
-
-
-# handles locating the actual collection package and associated metadata
-class _AnsibleCollectionPkgLoader(_AnsibleCollectionPkgLoaderBase):
- def _validate_args(self):
- super(_AnsibleCollectionPkgLoader, self)._validate_args()
- if len(self._split_name) != 3:
- raise ImportError('this loader can only load collection packages, not {0}'.format(self._fullname))
-
- def _validate_final(self):
- if self._split_name[1:3] == ['ansible', 'builtin']:
- # we don't want to allow this one to have on-disk search capability
- self._subpackage_search_paths = []
- elif not self._subpackage_search_paths:
- raise ImportError('no {0} found in {1}'.format(self._package_to_load, self._candidate_paths))
- else:
- # only search within the first collection we found
- self._subpackage_search_paths = [self._subpackage_search_paths[0]]
-
- def load_module(self, fullname):
- if not _meta_yml_to_dict:
- raise ValueError('ansible.utils.collection_loader._meta_yml_to_dict is not set')
-
- module = super(_AnsibleCollectionPkgLoader, self).load_module(fullname)
-
- module._collection_meta = {}
- # TODO: load collection metadata, cache in __loader__ state
-
- collection_name = '.'.join(self._split_name[1:3])
-
- if collection_name == 'ansible.builtin':
- # ansible.builtin is a synthetic collection, get its routing config from the Ansible distro
- ansible_pkg_path = os.path.dirname(import_module('ansible').__file__)
- metadata_path = os.path.join(ansible_pkg_path, 'config/ansible_builtin_runtime.yml')
- with open(to_bytes(metadata_path), 'rb') as fd:
- raw_routing = fd.read()
- else:
- b_routing_meta_path = to_bytes(os.path.join(module.__path__[0], 'meta/runtime.yml'))
- if os.path.isfile(b_routing_meta_path):
- with open(b_routing_meta_path, 'rb') as fd:
- raw_routing = fd.read()
- else:
- raw_routing = ''
- try:
- if raw_routing:
- routing_dict = _meta_yml_to_dict(raw_routing, (collection_name, 'runtime.yml'))
- module._collection_meta = self._canonicalize_meta(routing_dict)
- except Exception as ex:
- raise ValueError('error parsing collection metadata: {0}'.format(to_native(ex)))
-
- AnsibleCollectionConfig.on_collection_load.fire(collection_name=collection_name, collection_path=os.path.dirname(module.__file__))
-
- return module
-
- def _canonicalize_meta(self, meta_dict):
- # TODO: rewrite import keys and all redirect targets that start with .. (current namespace) and . (current collection)
- # OR we could do it all on the fly?
- # if not meta_dict:
- # return {}
- #
- # ns_name = '.'.join(self._split_name[0:2])
- # collection_name = '.'.join(self._split_name[0:3])
- #
- # #
- # for routing_type, routing_type_dict in iteritems(meta_dict.get('plugin_routing', {})):
- # for plugin_key, plugin_dict in iteritems(routing_type_dict):
- # redirect = plugin_dict.get('redirect', '')
- # if redirect.startswith('..'):
- # redirect = redirect[2:]
-
- return meta_dict
-
-
-# loads everything under a collection, including handling redirections defined by the collection
-class _AnsibleCollectionLoader(_AnsibleCollectionPkgLoaderBase):
- # HACK: stash this in a better place
- _redirected_package_map = {}
- _allows_package_code = True
-
- def _validate_args(self):
- super(_AnsibleCollectionLoader, self)._validate_args()
- if len(self._split_name) < 4:
- raise ValueError('this loader is only for sub-collection modules/packages, not {0}'.format(self._fullname))
-
- def _get_candidate_paths(self, path_list):
- if len(path_list) != 1 and self._split_name[1:3] != ['ansible', 'builtin']:
- raise ValueError('this loader requires exactly one path to search')
-
- return path_list
-
- def _get_subpackage_search_paths(self, candidate_paths):
- collection_name = '.'.join(self._split_name[1:3])
- collection_meta = _get_collection_metadata(collection_name)
-
- # check for explicit redirection, as well as ancestor package-level redirection (only load the actual code once!)
- redirect = None
- explicit_redirect = False
-
- routing_entry = _nested_dict_get(collection_meta, ['import_redirection', self._fullname])
- if routing_entry:
- redirect = routing_entry.get('redirect')
-
- if redirect:
- explicit_redirect = True
- else:
- redirect = _get_ancestor_redirect(self._redirected_package_map, self._fullname)
-
- # NB: package level redirection requires hooking all future imports beneath the redirected source package
- # in order to ensure sanity on future relative imports. We always import everything under its "real" name,
- # then add a sys.modules entry with the redirected name using the same module instance. If we naively imported
- # the source for each redirection, most submodules would import OK, but we'd have N runtime copies of the module
- # (one for each name), and relative imports that ascend above the redirected package would break (since they'd
- # see the redirected ancestor package contents instead of the package where they actually live).
- if redirect:
- # FIXME: wrap this so we can be explicit about a failed redirection
- self._redirect_module = import_module(redirect)
- if explicit_redirect and hasattr(self._redirect_module, '__path__') and self._redirect_module.__path__:
- # if the import target looks like a package, store its name so we can rewrite future descendent loads
- self._redirected_package_map[self._fullname] = redirect
-
- # if we redirected, don't do any further custom package logic
- return None
-
- # we're not doing a redirect- try to find what we need to actually load a module/package
-
- # this will raise ImportError if we can't find the requested module/package at all
- if not candidate_paths:
- # noplace to look, just ImportError
- raise ImportError('package has no paths')
-
- found_path, has_code, package_path = self._module_file_from_path(self._package_to_load, candidate_paths[0])
-
- # still here? we found something to load...
- if has_code:
- self._source_code_path = found_path
-
- if package_path:
- return [package_path] # always needs to be a list
-
- return None
-
-
-# This loader only answers for intercepted Ansible Python modules. Normal imports will fail here and be picked up later
-# by our path_hook importer (which proxies the built-in import mechanisms, allowing normal caching etc to occur)
-class _AnsibleInternalRedirectLoader:
- def __init__(self, fullname, path_list):
- self._redirect = None
-
- split_name = fullname.split('.')
- toplevel_pkg = split_name[0]
- module_to_load = split_name[-1]
-
- if toplevel_pkg != 'ansible':
- raise ImportError('not interested')
-
- builtin_meta = _get_collection_metadata('ansible.builtin')
-
- routing_entry = _nested_dict_get(builtin_meta, ['import_redirection', fullname])
- if routing_entry:
- self._redirect = routing_entry.get('redirect')
-
- if not self._redirect:
- raise ImportError('not redirected, go ask path_hook')
-
- def load_module(self, fullname):
- # since we're delegating to other loaders, this should only be called for internal redirects where we answered
- # find_module with this loader, in which case we'll just directly import the redirection target, insert it into
- # sys.modules under the name it was requested by, and return the original module.
-
- # should never see this
- if not self._redirect:
- raise ValueError('no redirect found for {0}'.format(fullname))
-
- # FIXME: smuggle redirection context, provide warning/error that we tried and failed to redirect
- mod = import_module(self._redirect)
- sys.modules[fullname] = mod
- return mod
-
-
-class AnsibleCollectionRef:
- # FUTURE: introspect plugin loaders to get these dynamically?
- VALID_REF_TYPES = frozenset(to_text(r) for r in ['action', 'become', 'cache', 'callback', 'cliconf', 'connection',
- 'doc_fragments', 'filter', 'httpapi', 'inventory', 'lookup',
- 'module_utils', 'modules', 'netconf', 'role', 'shell', 'strategy',
- 'terminal', 'test', 'vars', 'playbook'])
-
- # FIXME: tighten this up to match Python identifier reqs, etc
- VALID_SUBDIRS_RE = re.compile(to_text(r'^\w+(\.\w+)*$'))
- VALID_FQCR_RE = re.compile(to_text(r'^\w+(\.\w+){2,}$')) # can have 0-N included subdirs as well
-
- def __init__(self, collection_name, subdirs, resource, ref_type):
- """
- Create an AnsibleCollectionRef from components
- :param collection_name: a collection name of the form 'namespace.collectionname'
- :param subdirs: optional subdir segments to be appended below the plugin type (eg, 'subdir1.subdir2')
- :param resource: the name of the resource being references (eg, 'mymodule', 'someaction', 'a_role')
- :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
- """
- collection_name = to_text(collection_name, errors='strict')
- if subdirs is not None:
- subdirs = to_text(subdirs, errors='strict')
- resource = to_text(resource, errors='strict')
- ref_type = to_text(ref_type, errors='strict')
-
- if not self.is_valid_collection_name(collection_name):
- raise ValueError('invalid collection name (must be of the form namespace.collection): {0}'.format(to_native(collection_name)))
-
- if ref_type not in self.VALID_REF_TYPES:
- raise ValueError('invalid collection ref_type: {0}'.format(ref_type))
-
- self.collection = collection_name
- if subdirs:
- if not re.match(self.VALID_SUBDIRS_RE, subdirs):
- raise ValueError('invalid subdirs entry: {0} (must be empty/None or of the form subdir1.subdir2)'.format(to_native(subdirs)))
- self.subdirs = subdirs
- else:
- self.subdirs = u''
-
- self.resource = resource
- self.ref_type = ref_type
-
- package_components = [u'ansible_collections', self.collection]
- fqcr_components = [self.collection]
-
- self.n_python_collection_package_name = to_native('.'.join(package_components))
-
- if self.ref_type == u'role':
- package_components.append(u'roles')
- elif self.ref_type == u'playbook':
- package_components.append(u'playbooks')
- else:
- # we assume it's a plugin
- package_components += [u'plugins', self.ref_type]
-
- if self.subdirs:
- package_components.append(self.subdirs)
- fqcr_components.append(self.subdirs)
-
- if self.ref_type in (u'role', u'playbook'):
- # playbooks and roles are their own resource
- package_components.append(self.resource)
-
- fqcr_components.append(self.resource)
-
- self.n_python_package_name = to_native('.'.join(package_components))
- self._fqcr = u'.'.join(fqcr_components)
-
- def __repr__(self):
- return 'AnsibleCollectionRef(collection={0!r}, subdirs={1!r}, resource={2!r})'.format(self.collection, self.subdirs, self.resource)
-
- @property
- def fqcr(self):
- return self._fqcr
-
- @staticmethod
- def from_fqcr(ref, ref_type):
- """
- Parse a string as a fully-qualified collection reference, raises ValueError if invalid
- :param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
- :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
- :return: a populated AnsibleCollectionRef object
- """
- # assuming the fq_name is of the form (ns).(coll).(optional_subdir_N).(resource_name),
- # we split the resource name off the right, split ns and coll off the left, and we're left with any optional
- # subdirs that need to be added back below the plugin-specific subdir we'll add. So:
- # ns.coll.resource -> ansible_collections.ns.coll.plugins.(plugintype).resource
- # ns.coll.subdir1.resource -> ansible_collections.ns.coll.plugins.subdir1.(plugintype).resource
- # ns.coll.rolename -> ansible_collections.ns.coll.roles.rolename
- if not AnsibleCollectionRef.is_valid_fqcr(ref):
- raise ValueError('{0} is not a valid collection reference'.format(to_native(ref)))
-
- ref = to_text(ref, errors='strict')
- ref_type = to_text(ref_type, errors='strict')
- ext = ''
-
- if ref_type == u'playbook' and ref.endswith(PB_EXTENSIONS):
- resource_splitname = ref.rsplit(u'.', 2)
- package_remnant = resource_splitname[0]
- resource = resource_splitname[1]
- ext = '.' + resource_splitname[2]
- else:
- resource_splitname = ref.rsplit(u'.', 1)
- package_remnant = resource_splitname[0]
- resource = resource_splitname[1]
-
- # split the left two components of the collection package name off, anything remaining is plugin-type
- # specific subdirs to be added back on below the plugin type
- package_splitname = package_remnant.split(u'.', 2)
- if len(package_splitname) == 3:
- subdirs = package_splitname[2]
- else:
- subdirs = u''
-
- collection_name = u'.'.join(package_splitname[0:2])
-
- return AnsibleCollectionRef(collection_name, subdirs, resource + ext, ref_type)
-
- @staticmethod
- def try_parse_fqcr(ref, ref_type):
- """
- Attempt to parse a string as a fully-qualified collection reference, returning None on failure (instead of raising an error)
- :param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
- :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
- :return: a populated AnsibleCollectionRef object on successful parsing, else None
- """
- try:
- return AnsibleCollectionRef.from_fqcr(ref, ref_type)
- except ValueError:
- pass
-
- @staticmethod
- def legacy_plugin_dir_to_plugin_type(legacy_plugin_dir_name):
- """
- Utility method to convert from a PluginLoader dir name to a plugin ref_type
- :param legacy_plugin_dir_name: PluginLoader dir name (eg, 'action_plugins', 'library')
- :return: the corresponding plugin ref_type (eg, 'action', 'role')
- """
- legacy_plugin_dir_name = to_text(legacy_plugin_dir_name)
-
- plugin_type = legacy_plugin_dir_name.replace(u'_plugins', u'')
-
- if plugin_type == u'library':
- plugin_type = u'modules'
-
- if plugin_type not in AnsibleCollectionRef.VALID_REF_TYPES:
- raise ValueError('{0} cannot be mapped to a valid collection ref type'.format(to_native(legacy_plugin_dir_name)))
-
- return plugin_type
-
- @staticmethod
- def is_valid_fqcr(ref, ref_type=None):
- """
- Validates if is string is a well-formed fully-qualified collection reference (does not look up the collection itself)
- :param ref: candidate collection reference to validate (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
- :param ref_type: optional reference type to enable deeper validation, eg 'module', 'role', 'doc_fragment'
- :return: True if the collection ref passed is well-formed, False otherwise
- """
-
- ref = to_text(ref)
-
- if not ref_type:
- return bool(re.match(AnsibleCollectionRef.VALID_FQCR_RE, ref))
-
- return bool(AnsibleCollectionRef.try_parse_fqcr(ref, ref_type))
-
- @staticmethod
- def is_valid_collection_name(collection_name):
- """
- Validates if the given string is a well-formed collection name (does not look up the collection itself)
- :param collection_name: candidate collection name to validate (a valid name is of the form 'ns.collname')
- :return: True if the collection name passed is well-formed, False otherwise
- """
-
- collection_name = to_text(collection_name)
-
- if collection_name.count(u'.') != 1:
- return False
-
- return all(
- # NOTE: keywords and identifiers are different in differnt Pythons
- not iskeyword(ns_or_name) and is_python_identifier(ns_or_name)
- for ns_or_name in collection_name.split(u'.')
- )
-
-
-def _get_collection_playbook_path(playbook):
-
- acr = AnsibleCollectionRef.try_parse_fqcr(playbook, u'playbook')
- if acr:
- try:
- # get_collection_path
- pkg = import_module(acr.n_python_collection_package_name)
- except (IOError, ModuleNotFoundError) as e:
- # leaving e as debug target, even though not used in normal code
- pkg = None
-
- if pkg:
- cpath = os.path.join(sys.modules[acr.n_python_collection_package_name].__file__.replace('__synthetic__', 'playbooks'))
-
- if acr.subdirs:
- paths = [to_native(x) for x in acr.subdirs.split(u'.')]
- paths.insert(0, cpath)
- cpath = os.path.join(*paths)
-
- path = os.path.join(cpath, to_native(acr.resource))
- if os.path.exists(to_bytes(path)):
- return acr.resource, path, acr.collection
- elif not acr.resource.endswith(PB_EXTENSIONS):
- for ext in PB_EXTENSIONS:
- path = os.path.join(cpath, to_native(acr.resource + ext))
- if os.path.exists(to_bytes(path)):
- return acr.resource, path, acr.collection
- return None
-
-
-def _get_collection_role_path(role_name, collection_list=None):
- return _get_collection_resource_path(role_name, u'role', collection_list)
-
-
-def _get_collection_resource_path(name, ref_type, collection_list=None):
-
- if ref_type == u'playbook':
- # they are handled a bit diff due to 'extension variance' and no collection_list
- return _get_collection_playbook_path(name)
-
- acr = AnsibleCollectionRef.try_parse_fqcr(name, ref_type)
- if acr:
- # looks like a valid qualified collection ref; skip the collection_list
- collection_list = [acr.collection]
- subdirs = acr.subdirs
- resource = acr.resource
- elif not collection_list:
- return None # not a FQ and no collection search list spec'd, nothing to do
- else:
- resource = name # treat as unqualified, loop through the collection search list to try and resolve
- subdirs = ''
-
- for collection_name in collection_list:
- try:
- acr = AnsibleCollectionRef(collection_name=collection_name, subdirs=subdirs, resource=resource, ref_type=ref_type)
- # FIXME: error handling/logging; need to catch any import failures and move along
- pkg = import_module(acr.n_python_package_name)
-
- if pkg is not None:
- # the package is now loaded, get the collection's package and ask where it lives
- path = os.path.dirname(to_bytes(sys.modules[acr.n_python_package_name].__file__, errors='surrogate_or_strict'))
- return resource, to_text(path, errors='surrogate_or_strict'), collection_name
-
- except (IOError, ModuleNotFoundError) as e:
- continue
- except Exception as ex:
- # FIXME: pick out typical import errors first, then error logging
- continue
-
- return None
-
-
-def _get_collection_name_from_path(path):
- """
- Return the containing collection name for a given path, or None if the path is not below a configured collection, or
- the collection cannot be loaded (eg, the collection is masked by another of the same name higher in the configured
- collection roots).
- :param path: path to evaluate for collection containment
- :return: collection name or None
- """
-
- # ensure we compare full paths since pkg path will be abspath
- path = to_native(os.path.abspath(to_bytes(path)))
-
- path_parts = path.split('/')
- if path_parts.count('ansible_collections') != 1:
- return None
-
- ac_pos = path_parts.index('ansible_collections')
-
- # make sure it's followed by at least a namespace and collection name
- if len(path_parts) < ac_pos + 3:
- return None
-
- candidate_collection_name = '.'.join(path_parts[ac_pos + 1:ac_pos + 3])
-
- try:
- # we've got a name for it, now see if the path prefix matches what the loader sees
- imported_pkg_path = to_native(os.path.dirname(to_bytes(import_module('ansible_collections.' + candidate_collection_name).__file__)))
- except ImportError:
- return None
-
- # reassemble the original path prefix up the collection name, and it should match what we just imported. If not
- # this is probably a collection root that's not configured.
-
- original_path_prefix = os.path.join('/', *path_parts[0:ac_pos + 3])
-
- imported_pkg_path = to_native(os.path.abspath(to_bytes(imported_pkg_path)))
- if original_path_prefix != imported_pkg_path:
- return None
-
- return candidate_collection_name
-
-
-def _get_import_redirect(collection_meta_dict, fullname):
- if not collection_meta_dict:
- return None
-
- return _nested_dict_get(collection_meta_dict, ['import_redirection', fullname, 'redirect'])
-
-
-def _get_ancestor_redirect(redirected_package_map, fullname):
- # walk the requested module's ancestor packages to see if any have been previously redirected
- cur_pkg = fullname
- while cur_pkg:
- cur_pkg = cur_pkg.rpartition('.')[0]
- ancestor_redirect = redirected_package_map.get(cur_pkg)
- if ancestor_redirect:
- # rewrite the prefix on fullname so we import the target first, then alias it
- redirect = ancestor_redirect + fullname[len(cur_pkg):]
- return redirect
- return None
-
-
-def _nested_dict_get(root_dict, key_list):
- cur_value = root_dict
- for key in key_list:
- cur_value = cur_value.get(key)
- if not cur_value:
- return None
-
- return cur_value
-
-
-def _iter_modules_impl(paths, prefix=''):
- # NB: this currently only iterates what's on disk- redirected modules are not considered
- if not prefix:
- prefix = ''
- else:
- prefix = to_native(prefix)
- # yield (module_loader, name, ispkg) for each module/pkg under path
- # TODO: implement ignore/silent catch for unreadable?
- for b_path in map(to_bytes, paths):
- if not os.path.isdir(b_path):
- continue
- for b_basename in sorted(os.listdir(b_path)):
- b_candidate_module_path = os.path.join(b_path, b_basename)
- if os.path.isdir(b_candidate_module_path):
- # exclude things that obviously aren't Python package dirs
- # FIXME: this dir is adjustable in py3.8+, check for it
- if b'.' in b_basename or b_basename == b'__pycache__':
- continue
-
- # TODO: proper string handling?
- yield prefix + to_native(b_basename), True
- else:
- # FIXME: match builtin ordering for package/dir/file, support compiled?
- if b_basename.endswith(b'.py') and b_basename != b'__init__.py':
- yield prefix + to_native(os.path.splitext(b_basename)[0]), False
-
-
-def _get_collection_metadata(collection_name):
- collection_name = to_native(collection_name)
- if not collection_name or not isinstance(collection_name, string_types) or len(collection_name.split('.')) != 2:
- raise ValueError('collection_name must be a non-empty string of the form namespace.collection')
-
- try:
- collection_pkg = import_module('ansible_collections.' + collection_name)
- except ImportError:
- raise ValueError('unable to locate collection {0}'.format(collection_name))
-
- _collection_meta = getattr(collection_pkg, '_collection_meta', None)
-
- if _collection_meta is None:
- raise ValueError('collection metadata was not loaded for collection {0}'.format(collection_name))
-
- return _collection_meta
diff --git a/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_meta.py b/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_meta.py
deleted file mode 100644
index 3a971978..00000000
--- a/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_meta.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# (c) 2019 Ansible Project
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-# CAUTION: There are two implementations of the collection loader.
-# They must be kept functionally identical, although their implementations may differ.
-#
-# 1) The controller implementation resides in the "lib/ansible/utils/collection_loader/" directory.
-# It must function on all Python versions supported on the controller.
-# 2) The ansible-test implementation resides in the "test/lib/ansible_test/_util/target/legacy_collection_loader/" directory.
-# It must function on all Python versions supported on managed hosts which are not supported by the controller.
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-try:
- from collections.abc import Mapping # pylint: disable=ansible-bad-import-from
-except ImportError:
- from collections import Mapping # pylint: disable=ansible-bad-import-from,deprecated-class
-
-from ansible.module_utils.common.yaml import yaml_load
-
-
-def _meta_yml_to_dict(yaml_string_data, content_id):
- """
- Converts string YAML dictionary to a Python dictionary. This function may be monkeypatched to another implementation
- by some tools (eg the import sanity test).
- :param yaml_string_data: a bytes-ish YAML dictionary
- :param content_id: a unique ID representing the content to allow other implementations to cache the output
- :return: a Python dictionary representing the YAML dictionary content
- """
- # NB: content_id is passed in, but not used by this implementation
- routing_dict = yaml_load(yaml_string_data)
- if not routing_dict:
- routing_dict = {}
- if not isinstance(routing_dict, Mapping):
- raise ValueError('collection metadata must be an instance of Python Mapping')
- return routing_dict
diff --git a/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py b/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py
index c26971fc..fefd6b0f 100644
--- a/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py
+++ b/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py
@@ -3,7 +3,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
-import sys
# set by ansible-test to a single directory, rather than a list of directories as supported by Ansible itself
ANSIBLE_COLLECTIONS_PATH = os.path.join(os.environ['ANSIBLE_COLLECTIONS_PATH'], 'ansible_collections')
@@ -41,12 +40,8 @@ def pytest_configure():
except AttributeError:
pytest_configure.executed = True
- if sys.version_info >= ANSIBLE_CONTROLLER_MIN_PYTHON_VERSION:
- # noinspection PyProtectedMember
- from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder
- else:
- # noinspection PyProtectedMember
- from ansible_test._internal.legacy_collection_loader._collection_finder import _AnsibleCollectionFinder
+ # noinspection PyProtectedMember
+ from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder
# allow unit tests to import code from collections
diff --git a/test/lib/ansible_test/_util/target/sanity/compile/compile.py b/test/lib/ansible_test/_util/target/sanity/compile/compile.py
index e2302fc0..7890a9b2 100644
--- a/test/lib/ansible_test/_util/target/sanity/compile/compile.py
+++ b/test/lib/ansible_test/_util/target/sanity/compile/compile.py
@@ -24,6 +24,10 @@ def main():
else:
continue
+ # In some situations offset can be None. This can happen for syntax errors on Python 2.6
+ # (__future__ import following after a regular import).
+ offset = offset or 0
+
result = "%s:%d:%d: %s: %s" % (path, lineno, offset, extype.__name__, safe_message(message))
if sys.version_info <= (3,):
diff --git a/test/lib/ansible_test/_util/target/sanity/import/importer.py b/test/lib/ansible_test/_util/target/sanity/import/importer.py
index 60255da6..3dcb8bf9 100644
--- a/test/lib/ansible_test/_util/target/sanity/import/importer.py
+++ b/test/lib/ansible_test/_util/target/sanity/import/importer.py
@@ -39,7 +39,6 @@ def main():
collection_full_name = os.environ.get('SANITY_COLLECTION_FULL_NAME')
collection_root = os.environ.get('ANSIBLE_COLLECTIONS_PATH')
import_type = os.environ.get('SANITY_IMPORTER_TYPE')
- ansible_controller_min_python_version = tuple(int(x) for x in os.environ.get('ANSIBLE_CONTROLLER_MIN_PYTHON_VERSION', '0').split('.'))
try:
# noinspection PyCompatibility
@@ -55,19 +54,21 @@ def main():
except ImportError:
from io import StringIO
+ try:
+ from importlib.util import spec_from_loader, module_from_spec
+ from importlib.machinery import SourceFileLoader, ModuleSpec # pylint: disable=unused-import
+ except ImportError:
+ has_py3_loader = False
+ else:
+ has_py3_loader = True
+
if collection_full_name:
# allow importing code from collections when testing a collection
from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native, text_type
- if sys.version_info >= ansible_controller_min_python_version:
- # noinspection PyProtectedMember
- from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder
- from ansible.utils.collection_loader import _collection_finder
- else:
- # noinspection PyProtectedMember
- from ansible_test._internal.legacy_collection_loader._collection_finder import _AnsibleCollectionFinder
- # noinspection PyProtectedMember
- from ansible_test._internal.legacy_collection_loader import _collection_finder
+ # noinspection PyProtectedMember
+ from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder
+ from ansible.utils.collection_loader import _collection_finder
yaml_to_dict_cache = {}
@@ -156,12 +157,25 @@ def main():
self.loaded_modules = set()
self.restrict_to_module_paths = restrict_to_module_paths
+ def find_spec(self, fullname, path=None, target=None): # pylint: disable=unused-argument
+ # type: (RestrictedModuleLoader, str, list[str], types.ModuleType | None ) -> ModuleSpec | None | ImportError
+ """Return the spec from the loader or None"""
+ loader = self._get_loader(fullname, path=path)
+ if loader is not None:
+ if has_py3_loader:
+ # loader is expected to be Optional[importlib.abc.Loader], but RestrictedModuleLoader does not inherit from importlib.abc.Loder
+ return spec_from_loader(fullname, loader) # type: ignore[arg-type]
+ raise ImportError("Failed to import '%s' due to a bug in ansible-test. Check importlib imports for typos." % fullname)
+ return None
+
def find_module(self, fullname, path=None):
- """Return self if the given fullname is restricted, otherwise return None.
- :param fullname: str
- :param path: str
- :return: RestrictedModuleLoader | None
- """
+ # type: (RestrictedModuleLoader, str, list[str]) -> RestrictedModuleLoader | None
+ """Return self if the given fullname is restricted, otherwise return None."""
+ return self._get_loader(fullname, path=path)
+
+ def _get_loader(self, fullname, path=None):
+ # type: (RestrictedModuleLoader, str, list[str]) -> RestrictedModuleLoader | None
+ """Return self if the given fullname is restricted, otherwise return None."""
if fullname in self.loaded_modules:
return None # ignore modules that are already being loaded
@@ -198,27 +212,49 @@ def main():
# not a namespace we care about
return None
+ def create_module(self, spec): # pylint: disable=unused-argument
+ # type: (RestrictedModuleLoader, ModuleSpec) -> None
+ """Return None to use default module creation."""
+ return None
+
+ def exec_module(self, module):
+ # type: (RestrictedModuleLoader, types.ModuleType) -> None | ImportError
+ """Execute the module if the name is ansible.module_utils.basic and otherwise raise an ImportError"""
+ fullname = module.__spec__.name
+ if fullname == 'ansible.module_utils.basic':
+ self.loaded_modules.add(fullname)
+ for path in convert_ansible_name_to_absolute_paths(fullname):
+ if not os.path.exists(path):
+ continue
+ loader = SourceFileLoader(fullname, path)
+ spec = spec_from_loader(fullname, loader)
+ real_module = module_from_spec(spec)
+ loader.exec_module(real_module)
+ real_module.AnsibleModule = ImporterAnsibleModule # type: ignore[attr-defined]
+ real_module._load_params = lambda *args, **kwargs: {} # type: ignore[attr-defined] # pylint: disable=protected-access
+ sys.modules[fullname] = real_module
+ return None
+ raise ImportError('could not find "%s"' % fullname)
+ raise ImportError('import of "%s" is not allowed in this context' % fullname)
+
def load_module(self, fullname):
- """Raise an ImportError.
- :type fullname: str
- """
+ # type: (RestrictedModuleLoader, str) -> types.ModuleType | ImportError
+ """Return the module if the name is ansible.module_utils.basic and otherwise raise an ImportError."""
if fullname == 'ansible.module_utils.basic':
module = self.__load_module(fullname)
# stop Ansible module execution during AnsibleModule instantiation
- module.AnsibleModule = ImporterAnsibleModule
+ module.AnsibleModule = ImporterAnsibleModule # type: ignore[attr-defined]
# no-op for _load_params since it may be called before instantiating AnsibleModule
- module._load_params = lambda *args, **kwargs: {} # pylint: disable=protected-access
+ module._load_params = lambda *args, **kwargs: {} # type: ignore[attr-defined] # pylint: disable=protected-access
return module
raise ImportError('import of "%s" is not allowed in this context' % fullname)
def __load_module(self, fullname):
- """Load the requested module while avoiding infinite recursion.
- :type fullname: str
- :rtype: module
- """
+ # type: (RestrictedModuleLoader, str) -> types.ModuleType
+ """Load the requested module while avoiding infinite recursion."""
self.loaded_modules.add(fullname)
return import_module(fullname)
@@ -521,42 +557,6 @@ def main():
"ignore",
"Python 3.5 support will be dropped in the next release of cryptography. Please upgrade your Python.")
- if sys.version_info >= (3, 10):
- # Temporary solution for Python 3.10 until find_spec is implemented in RestrictedModuleLoader.
- # That implementation is dependent on find_spec being added to the controller's collection loader first.
- # The warning text is: main.<locals>.RestrictedModuleLoader.find_spec() not found; falling back to find_module()
- warnings.filterwarnings(
- "ignore",
- r"main\.<locals>\.RestrictedModuleLoader\.find_spec\(\) not found; falling back to find_module\(\)",
- )
- # Temporary solution for Python 3.10 until exec_module is implemented in RestrictedModuleLoader.
- # That implementation is dependent on exec_module being added to the controller's collection loader first.
- # The warning text is: main.<locals>.RestrictedModuleLoader.exec_module() not found; falling back to load_module()
- warnings.filterwarnings(
- "ignore",
- r"main\.<locals>\.RestrictedModuleLoader\.exec_module\(\) not found; falling back to load_module\(\)",
- )
-
- # Temporary solution for Python 3.10 until find_spec is implemented in the controller's collection loader.
- warnings.filterwarnings(
- "ignore",
- r"_Ansible.*Finder\.find_spec\(\) not found; falling back to find_module\(\)",
- )
- # Temporary solution for Python 3.10 until exec_module is implemented in the controller's collection loader.
- warnings.filterwarnings(
- "ignore",
- r"_Ansible.*Loader\.exec_module\(\) not found; falling back to load_module\(\)",
- )
-
- # Temporary solution until there is a vendored copy of distutils.version in module_utils.
- # Some of our dependencies such as packaging.tags also import distutils, which we have no control over
- # The warning text is: The distutils package is deprecated and slated for removal in Python 3.12.
- # Use setuptools or check PEP 632 for potential alternatives
- warnings.filterwarnings(
- "ignore",
- r"The distutils package is deprecated and slated for removal in Python 3\.12\. .*",
- )
-
try:
yield
finally:
diff --git a/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1 b/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1
index 7e039bb4..1fcbaabc 100644
--- a/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1
+++ b/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1
@@ -66,28 +66,24 @@ Param (
[switch]$EnableCredSSP
)
-Function Write-Log
-{
+Function Write-ProgressLog {
$Message = $args[0]
Write-EventLog -LogName Application -Source $EventSource -EntryType Information -EventId 1 -Message $Message
}
-Function Write-VerboseLog
-{
+Function Write-VerboseLog {
$Message = $args[0]
Write-Verbose $Message
- Write-Log $Message
+ Write-ProgressLog $Message
}
-Function Write-HostLog
-{
+Function Write-HostLog {
$Message = $args[0]
Write-Output $Message
- Write-Log $Message
+ Write-ProgressLog $Message
}
-Function New-LegacySelfSignedCert
-{
+Function New-LegacySelfSignedCert {
Param (
[string]$SubjectName,
[int]$ValidDays = 1095
@@ -125,14 +121,13 @@ Function New-LegacySelfSignedCert
$SigOID = New-Object -ComObject X509Enrollment.CObjectId
$SigOID.InitializeFromValue(([Security.Cryptography.Oid]$SignatureAlgorithm).Value)
- [string[]] $AlternativeName += $hostnonFQDN
+ [string[]] $AlternativeName += $hostnonFQDN
$AlternativeName += $hostFQDN
$IAlternativeNames = New-Object -ComObject X509Enrollment.CAlternativeNames
- foreach ($AN in $AlternativeName)
- {
+ foreach ($AN in $AlternativeName) {
$AltName = New-Object -ComObject X509Enrollment.CAlternativeName
- $AltName.InitializeFromString(0x3,$AN)
+ $AltName.InitializeFromString(0x3, $AN)
$IAlternativeNames.Add($AltName)
}
@@ -162,15 +157,14 @@ Function New-LegacySelfSignedCert
return $parsed_cert.Thumbprint
}
-Function Enable-GlobalHttpFirewallAccess
-{
+Function Enable-GlobalHttpFirewallAccess {
Write-Verbose "Forcing global HTTP firewall access"
# this is a fairly naive implementation; could be more sophisticated about rule matching/collapsing
$fw = New-Object -ComObject HNetCfg.FWPolicy2
# try to find/enable the default rule first
$add_rule = $false
- $matching_rules = $fw.Rules | Where-Object { $_.Name -eq "Windows Remote Management (HTTP-In)" }
+ $matching_rules = $fw.Rules | Where-Object { $_.Name -eq "Windows Remote Management (HTTP-In)" }
$rule = $null
If ($matching_rules) {
If ($matching_rules -isnot [Array]) {
@@ -217,80 +211,71 @@ Function Enable-GlobalHttpFirewallAccess
}
# Setup error handling.
-Trap
-{
+Trap {
$_
Exit 1
}
$ErrorActionPreference = "Stop"
# Get the ID and security principal of the current user account
-$myWindowsID=[System.Security.Principal.WindowsIdentity]::GetCurrent()
-$myWindowsPrincipal=new-object System.Security.Principal.WindowsPrincipal($myWindowsID)
+$myWindowsID = [System.Security.Principal.WindowsIdentity]::GetCurrent()
+$myWindowsPrincipal = new-object System.Security.Principal.WindowsPrincipal($myWindowsID)
# Get the security principal for the Administrator role
-$adminRole=[System.Security.Principal.WindowsBuiltInRole]::Administrator
+$adminRole = [System.Security.Principal.WindowsBuiltInRole]::Administrator
# Check to see if we are currently running "as Administrator"
-if (-Not $myWindowsPrincipal.IsInRole($adminRole))
-{
+if (-Not $myWindowsPrincipal.IsInRole($adminRole)) {
Write-Output "ERROR: You need elevated Administrator privileges in order to run this script."
Write-Output " Start Windows PowerShell by using the Run as Administrator option."
Exit 2
}
$EventSource = $MyInvocation.MyCommand.Name
-If (-Not $EventSource)
-{
+If (-Not $EventSource) {
$EventSource = "Powershell CLI"
}
-If ([System.Diagnostics.EventLog]::Exists('Application') -eq $False -or [System.Diagnostics.EventLog]::SourceExists($EventSource) -eq $False)
-{
+If ([System.Diagnostics.EventLog]::Exists('Application') -eq $False -or [System.Diagnostics.EventLog]::SourceExists($EventSource) -eq $False) {
New-EventLog -LogName Application -Source $EventSource
}
# Detect PowerShell version.
-If ($PSVersionTable.PSVersion.Major -lt 3)
-{
- Write-Log "PowerShell version 3 or higher is required."
+If ($PSVersionTable.PSVersion.Major -lt 3) {
+ Write-ProgressLog "PowerShell version 3 or higher is required."
Throw "PowerShell version 3 or higher is required."
}
# Find and start the WinRM service.
Write-Verbose "Verifying WinRM service."
-If (!(Get-Service "WinRM"))
-{
- Write-Log "Unable to find the WinRM service."
+If (!(Get-Service "WinRM")) {
+ Write-ProgressLog "Unable to find the WinRM service."
Throw "Unable to find the WinRM service."
}
-ElseIf ((Get-Service "WinRM").Status -ne "Running")
-{
+ElseIf ((Get-Service "WinRM").Status -ne "Running") {
Write-Verbose "Setting WinRM service to start automatically on boot."
Set-Service -Name "WinRM" -StartupType Automatic
- Write-Log "Set WinRM service to start automatically on boot."
+ Write-ProgressLog "Set WinRM service to start automatically on boot."
Write-Verbose "Starting WinRM service."
Start-Service -Name "WinRM" -ErrorAction Stop
- Write-Log "Started WinRM service."
+ Write-ProgressLog "Started WinRM service."
}
# WinRM should be running; check that we have a PS session config.
-If (!(Get-PSSessionConfiguration -Verbose:$false) -or (!(Get-ChildItem WSMan:\localhost\Listener)))
-{
- If ($SkipNetworkProfileCheck) {
- Write-Verbose "Enabling PS Remoting without checking Network profile."
- Enable-PSRemoting -SkipNetworkProfileCheck -Force -ErrorAction Stop
- Write-Log "Enabled PS Remoting without checking Network profile."
- }
- Else {
- Write-Verbose "Enabling PS Remoting."
- Enable-PSRemoting -Force -ErrorAction Stop
- Write-Log "Enabled PS Remoting."
- }
+If (!(Get-PSSessionConfiguration -Verbose:$false) -or (!(Get-ChildItem WSMan:\localhost\Listener))) {
+ If ($SkipNetworkProfileCheck) {
+ Write-Verbose "Enabling PS Remoting without checking Network profile."
+ Enable-PSRemoting -SkipNetworkProfileCheck -Force -ErrorAction Stop
+ Write-ProgressLog "Enabled PS Remoting without checking Network profile."
+ }
+ Else {
+ Write-Verbose "Enabling PS Remoting."
+ Enable-PSRemoting -Force -ErrorAction Stop
+ Write-ProgressLog "Enabled PS Remoting."
+ }
}
-Else
-{
+Else {
Write-Verbose "PS Remoting is already enabled."
}
@@ -310,8 +295,7 @@ if ($token_value -ne 1) {
# Make sure there is a SSL listener.
$listeners = Get-ChildItem WSMan:\localhost\Listener
-If (!($listeners | Where-Object {$_.Keys -like "TRANSPORT=HTTPS"}))
-{
+If (!($listeners | Where-Object { $_.Keys -like "TRANSPORT=HTTPS" })) {
# We cannot use New-SelfSignedCertificate on 2012R2 and earlier
$thumbprint = New-LegacySelfSignedCert -SubjectName $SubjectName -ValidDays $CertValidityDays
Write-HostLog "Self-signed SSL certificate generated; thumbprint: $thumbprint"
@@ -329,15 +313,13 @@ If (!($listeners | Where-Object {$_.Keys -like "TRANSPORT=HTTPS"}))
Write-Verbose "Enabling SSL listener."
New-WSManInstance -ResourceURI 'winrm/config/Listener' -SelectorSet $selectorset -ValueSet $valueset
- Write-Log "Enabled SSL listener."
+ Write-ProgressLog "Enabled SSL listener."
}
-Else
-{
+Else {
Write-Verbose "SSL listener is already active."
# Force a new SSL cert on Listener if the $ForceNewSSLCert
- If ($ForceNewSSLCert)
- {
+ If ($ForceNewSSLCert) {
# We cannot use New-SelfSignedCertificate on 2012R2 and earlier
$thumbprint = New-LegacySelfSignedCert -SubjectName $SubjectName -ValidDays $CertValidityDays
@@ -361,45 +343,37 @@ Else
}
# Check for basic authentication.
-$basicAuthSetting = Get-ChildItem WSMan:\localhost\Service\Auth | Where-Object {$_.Name -eq "Basic"}
+$basicAuthSetting = Get-ChildItem WSMan:\localhost\Service\Auth | Where-Object { $_.Name -eq "Basic" }
-If ($DisableBasicAuth)
-{
- If (($basicAuthSetting.Value) -eq $true)
- {
+If ($DisableBasicAuth) {
+ If (($basicAuthSetting.Value) -eq $true) {
Write-Verbose "Disabling basic auth support."
Set-Item -Path "WSMan:\localhost\Service\Auth\Basic" -Value $false
- Write-Log "Disabled basic auth support."
+ Write-ProgressLog "Disabled basic auth support."
}
- Else
- {
+ Else {
Write-Verbose "Basic auth is already disabled."
}
}
-Else
-{
- If (($basicAuthSetting.Value) -eq $false)
- {
+Else {
+ If (($basicAuthSetting.Value) -eq $false) {
Write-Verbose "Enabling basic auth support."
Set-Item -Path "WSMan:\localhost\Service\Auth\Basic" -Value $true
- Write-Log "Enabled basic auth support."
+ Write-ProgressLog "Enabled basic auth support."
}
- Else
- {
+ Else {
Write-Verbose "Basic auth is already enabled."
}
}
# If EnableCredSSP if set to true
-If ($EnableCredSSP)
-{
+If ($EnableCredSSP) {
# Check for CredSSP authentication
- $credsspAuthSetting = Get-ChildItem WSMan:\localhost\Service\Auth | Where-Object {$_.Name -eq "CredSSP"}
- If (($credsspAuthSetting.Value) -eq $false)
- {
+ $credsspAuthSetting = Get-ChildItem WSMan:\localhost\Service\Auth | Where-Object { $_.Name -eq "CredSSP" }
+ If (($credsspAuthSetting.Value) -eq $false) {
Write-Verbose "Enabling CredSSP auth support."
Enable-WSManCredSSP -role server -Force
- Write-Log "Enabled CredSSP auth support."
+ Write-ProgressLog "Enabled CredSSP auth support."
}
}
@@ -410,44 +384,37 @@ If ($GlobalHttpFirewallAccess) {
# Configure firewall to allow WinRM HTTPS connections.
$fwtest1 = netsh advfirewall firewall show rule name="Allow WinRM HTTPS"
$fwtest2 = netsh advfirewall firewall show rule name="Allow WinRM HTTPS" profile=any
-If ($fwtest1.count -lt 5)
-{
+If ($fwtest1.count -lt 5) {
Write-Verbose "Adding firewall rule to allow WinRM HTTPS."
netsh advfirewall firewall add rule profile=any name="Allow WinRM HTTPS" dir=in localport=5986 protocol=TCP action=allow
- Write-Log "Added firewall rule to allow WinRM HTTPS."
+ Write-ProgressLog "Added firewall rule to allow WinRM HTTPS."
}
-ElseIf (($fwtest1.count -ge 5) -and ($fwtest2.count -lt 5))
-{
+ElseIf (($fwtest1.count -ge 5) -and ($fwtest2.count -lt 5)) {
Write-Verbose "Updating firewall rule to allow WinRM HTTPS for any profile."
netsh advfirewall firewall set rule name="Allow WinRM HTTPS" new profile=any
- Write-Log "Updated firewall rule to allow WinRM HTTPS for any profile."
+ Write-ProgressLog "Updated firewall rule to allow WinRM HTTPS for any profile."
}
-Else
-{
+Else {
Write-Verbose "Firewall rule already exists to allow WinRM HTTPS."
}
# Test a remoting connection to localhost, which should work.
-$httpResult = Invoke-Command -ComputerName "localhost" -ScriptBlock {$env:COMPUTERNAME} -ErrorVariable httpError -ErrorAction SilentlyContinue
+$httpResult = Invoke-Command -ComputerName "localhost" -ScriptBlock { $using:env:COMPUTERNAME } -ErrorVariable httpError -ErrorAction SilentlyContinue
$httpsOptions = New-PSSessionOption -SkipCACheck -SkipCNCheck -SkipRevocationCheck
$httpsResult = New-PSSession -UseSSL -ComputerName "localhost" -SessionOption $httpsOptions -ErrorVariable httpsError -ErrorAction SilentlyContinue
-If ($httpResult -and $httpsResult)
-{
+If ($httpResult -and $httpsResult) {
Write-Verbose "HTTP: Enabled | HTTPS: Enabled"
}
-ElseIf ($httpsResult -and !$httpResult)
-{
+ElseIf ($httpsResult -and !$httpResult) {
Write-Verbose "HTTP: Disabled | HTTPS: Enabled"
}
-ElseIf ($httpResult -and !$httpsResult)
-{
+ElseIf ($httpResult -and !$httpsResult) {
Write-Verbose "HTTP: Enabled | HTTPS: Disabled"
}
-Else
-{
- Write-Log "Unable to establish an HTTP or HTTPS remoting session."
+Else {
+ Write-ProgressLog "Unable to establish an HTTP or HTTPS remoting session."
Throw "Unable to establish an HTTP or HTTPS remoting session."
}
Write-VerboseLog "PS Remoting has been successfully configured for Ansible."
diff --git a/test/lib/ansible_test/_util/target/setup/bootstrap.sh b/test/lib/ansible_test/_util/target/setup/bootstrap.sh
index 53e2ca71..3eeac1dd 100644
--- a/test/lib/ansible_test/_util/target/setup/bootstrap.sh
+++ b/test/lib/ansible_test/_util/target/setup/bootstrap.sh
@@ -80,36 +80,6 @@ pip_install() {
done
}
-bootstrap_remote_aix()
-{
- chfs -a size=1G /
- chfs -a size=4G /usr
- chfs -a size=1G /var
- chfs -a size=1G /tmp
- chfs -a size=2G /opt
-
- if [ "${python_version}" = "2.7" ]; then
- python_package_version=""
- else
- python_package_version="3"
- fi
-
- packages="
- gcc
- python${python_package_version}
- python${python_package_version}-devel
- python${python_package_version}-pip
- "
-
- while true; do
- # shellcheck disable=SC2086
- yum install -q -y ${packages} \
- && break
- echo "Failed to install packages. Sleeping before trying again..."
- sleep 10
- done
-}
-
bootstrap_remote_freebsd()
{
if [ "${python_version}" = "2.7" ]; then
@@ -130,39 +100,29 @@ bootstrap_remote_freebsd()
"
if [ "${controller}" ]; then
+ jinja2_pkg="py${python_package_version}-jinja2"
+ cryptography_pkg="py${python_package_version}-cryptography"
+ pyyaml_pkg="py${python_package_version}-yaml"
+
# Declare platform/python version combinations which do not have supporting OS packages available.
# For these combinations ansible-test will use pip to install the requirements instead.
case "${platform_version}/${python_version}" in
- "11.4/3.8")
- have_os_packages=""
- ;;
- "12.2/3.8")
- have_os_packages=""
- ;;
"13.0/3.8")
- have_os_packages=""
+ jinja2_pkg="" # not available
+ cryptography_pkg="" # not available
+ pyyaml_pkg="" # not available
;;
"13.0/3.9")
- have_os_packages=""
- ;;
- *)
- have_os_packages="yes"
+ jinja2_pkg="" # not available
+ cryptography_pkg="" # not available
+ pyyaml_pkg="" # not available
;;
esac
- # PyYAML is never installed with an OS package since it does not include libyaml support.
- # Instead, ansible-test will install it using pip.
- if [ "${have_os_packages}" ]; then
- jinja2_pkg="py${python_package_version}-Jinja2"
- cryptography_pkg="py${python_package_version}-cryptography"
- else
- jinja2_pkg=""
- cryptography_pkg=""
- fi
-
packages="
${packages}
libyaml
+ ${pyyaml_pkg}
${jinja2_pkg}
${cryptography_pkg}
"
@@ -233,6 +193,8 @@ bootstrap_remote_rhel_7()
done
install_pip
+
+ bootstrap_remote_rhel_pinned_pip_packages
}
bootstrap_remote_rhel_8()
@@ -248,10 +210,11 @@ bootstrap_remote_rhel_8()
${py_pkg_prefix}-devel
"
+ # Jinja2 is not installed with an OS package since the provided version is too old.
+ # Instead, ansible-test will install it using pip.
if [ "${controller}" ]; then
packages="
${packages}
- ${py_pkg_prefix}-jinja2
${py_pkg_prefix}-cryptography
"
fi
@@ -264,6 +227,38 @@ bootstrap_remote_rhel_8()
echo "Failed to install packages. Sleeping before trying again..."
sleep 10
done
+
+ bootstrap_remote_rhel_pinned_pip_packages
+}
+
+bootstrap_remote_rhel_9()
+{
+ py_pkg_prefix="python3"
+
+ packages="
+ gcc
+ ${py_pkg_prefix}-devel
+ "
+
+ # Jinja2 is not installed with an OS package since the provided version is too old.
+ # Instead, ansible-test will install it using pip.
+ if [ "${controller}" ]; then
+ packages="
+ ${packages}
+ ${py_pkg_prefix}-cryptography
+ ${py_pkg_prefix}-packaging
+ ${py_pkg_prefix}-pyyaml
+ ${py_pkg_prefix}-resolvelib
+ "
+ fi
+
+ while true; do
+ # shellcheck disable=SC2086
+ dnf install -q -y ${packages} \
+ && break
+ echo "Failed to install packages. Sleeping before trying again..."
+ sleep 10
+ done
}
bootstrap_remote_rhel()
@@ -271,8 +266,12 @@ bootstrap_remote_rhel()
case "${platform_version}" in
7.*) bootstrap_remote_rhel_7 ;;
8.*) bootstrap_remote_rhel_8 ;;
+ 9.*) bootstrap_remote_rhel_9 ;;
esac
+}
+bootstrap_remote_rhel_pinned_pip_packages()
+{
# pin packaging and pyparsing to match the downstream vendored versions
pip_packages="
packaging==20.4
@@ -297,7 +296,6 @@ bootstrap_remote()
python_package_version="$(echo "${python_version}" | tr -d '.')"
case "${platform}" in
- "aix") bootstrap_remote_aix ;;
"freebsd") bootstrap_remote_freebsd ;;
"macos") bootstrap_remote_macos ;;
"rhel") bootstrap_remote_rhel ;;
diff --git a/test/lib/ansible_test/_util/target/setup/quiet_pip.py b/test/lib/ansible_test/_util/target/setup/quiet_pip.py
index fc65c88b..54f0f860 100644
--- a/test/lib/ansible_test/_util/target/setup/quiet_pip.py
+++ b/test/lib/ansible_test/_util/target/setup/quiet_pip.py
@@ -20,26 +20,9 @@ LOGGING_MESSAGE_FILTER = re.compile("^("
"Requirement already satisfied.*"
")$")
-# [1] https://src.fedoraproject.org/rpms/python-pip/blob/master/f/emit-a-warning-when-running-with-root-privileges.patch
+# [1] https://src.fedoraproject.org/rpms/python-pip/blob/f34/f/emit-a-warning-when-running-with-root-privileges.patch
WARNING_MESSAGE_FILTERS = (
- # DEPRECATION: Python 2.6 is no longer supported by the Python core team, please upgrade your Python.
- # A future version of pip will drop support for Python 2.6
- 'Python 2.6 is no longer supported by the Python core team, ',
-
- # {path}/python2.6/lib/python2.6/site-packages/pip/_vendor/urllib3/util/ssl_.py:137: InsecurePlatformWarning:
- # A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail.
- # You can upgrade to a newer version of Python to solve this.
- # For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings
- 'A true SSLContext object is not available. ',
-
- # {path}/python2.6/lib/python2.6/site-packages/pip/_vendor/urllib3/util/ssl_.py:339: SNIMissingWarning:
- # An HTTPS request has been made, but the SNI (Subject Name Indication) extension to TLS is not available on this platform.
- # This may cause the server to present an incorrect TLS certificate, which can cause validation failures.
- # You can upgrade to a newer version of Python to solve this.
- # For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings
- 'An HTTPS request has been made, but the SNI ',
-
# DEPRECATION: Python 2.7 reached the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 is no longer maintained.
# pip 21.0 will drop support for Python 2.7 in January 2021.
# More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support
@@ -67,7 +50,6 @@ def main():
for message_filter in WARNING_MESSAGE_FILTERS:
# Setting filterwarnings in code is necessary because of the following:
- # Python 2.6 does not support the PYTHONWARNINGS environment variable. It does support the -W option.
# Python 2.7 cannot use the -W option to match warning text after a colon. This makes it impossible to match specific warning messages.
warnings.filterwarnings('ignore', message_filter)
diff --git a/test/lib/ansible_test/_util/target/setup/requirements.py b/test/lib/ansible_test/_util/target/setup/requirements.py
index f460c5c5..0a29429b 100644
--- a/test/lib/ansible_test/_util/target/setup/requirements.py
+++ b/test/lib/ansible_test/_util/target/setup/requirements.py
@@ -81,7 +81,7 @@ def bootstrap(pip, options): # type: (str, t.Dict[str, t.Any]) -> None
pip_version = options['pip_version']
packages = options['packages']
- url = 'https://ansible-ci-files.s3.amazonaws.com/ansible-test/get-pip-%s.py' % pip_version
+ url = 'https://ci-files.testing.ansible.com/ansible-test/get-pip-%s.py' % pip_version
cache_path = os.path.expanduser('~/.ansible/test/cache/get_pip_%s.py' % pip_version.replace(".", "_"))
temp_path = cache_path + '.download'
@@ -277,12 +277,11 @@ def make_dirs(path): # type: (str) -> None
raise
-def open_binary_file(path, mode='rb'): # type: (str, str) -> t.BinaryIO
+def open_binary_file(path, mode='rb'): # type: (str, str) -> t.IO[bytes]
"""Open the given path for binary access."""
if 'b' not in mode:
raise Exception('mode must include "b" for binary files: %s' % mode)
- # noinspection PyTypeChecker
return io.open(to_bytes(path), mode) # pylint: disable=consider-using-with
diff --git a/test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py b/test/lib/ansible_test/_util/target/tools/virtualenvcheck.py
index a38ad074..a38ad074 100644
--- a/test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py
+++ b/test/lib/ansible_test/_util/target/tools/virtualenvcheck.py
diff --git a/test/lib/ansible_test/_util/controller/tools/yamlcheck.py b/test/lib/ansible_test/_util/target/tools/yamlcheck.py
index dfd08e58..dfd08e58 100644
--- a/test/lib/ansible_test/_util/controller/tools/yamlcheck.py
+++ b/test/lib/ansible_test/_util/target/tools/yamlcheck.py
diff --git a/test/lib/ansible_test/config/cloud-config-azure.ini.template b/test/lib/ansible_test/config/cloud-config-azure.ini.template
index ac5266ba..766553d1 100644
--- a/test/lib/ansible_test/config/cloud-config-azure.ini.template
+++ b/test/lib/ansible_test/config/cloud-config-azure.ini.template
@@ -9,9 +9,7 @@
# fill in the values below and save this file without the .template extension.
# This will cause ansible-test to use the given configuration instead of temporary credentials.
#
-# NOTE: Automatic provisioning of Azure credentials requires one of:
-# 1) ansible-core-ci API key in ~/.ansible-core-ci.key
-# 2) Sherlock URL (including API key) in ~/.ansible-sherlock-ci.cfg
+# NOTE: Automatic provisioning of Azure credentials requires an ansible-core-ci API key in ~/.ansible-core-ci.key
[default]
# Provide either Service Principal or Active Directory credentials below.
diff --git a/test/sanity/code-smell/ansible-requirements.py b/test/sanity/code-smell/ansible-requirements.py
index 48ecbaaf..4d1a652f 100644
--- a/test/sanity/code-smell/ansible-requirements.py
+++ b/test/sanity/code-smell/ansible-requirements.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
import sys
diff --git a/test/sanity/code-smell/ansible-test-future-boilerplate.json b/test/sanity/code-smell/ansible-test-future-boilerplate.json
index e689ba5d..ca4c067a 100644
--- a/test/sanity/code-smell/ansible-test-future-boilerplate.json
+++ b/test/sanity/code-smell/ansible-test-future-boilerplate.json
@@ -3,7 +3,8 @@
".py"
],
"prefixes": [
- "test/lib/ansible_test/_internal/"
+ "test/sanity/",
+ "test/lib/ansible_test/"
],
"output": "path-message"
}
diff --git a/test/sanity/code-smell/ansible-test-future-boilerplate.py b/test/sanity/code-smell/ansible-test-future-boilerplate.py
index 55092a73..9a622251 100644
--- a/test/sanity/code-smell/ansible-test-future-boilerplate.py
+++ b/test/sanity/code-smell/ansible-test-future-boilerplate.py
@@ -1,12 +1,19 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import ast
import sys
def main():
+ # The following directories contain code which must work under Python 2.x.
+ py2_compat = (
+ 'test/lib/ansible_test/_util/target/',
+ )
+
for path in sys.argv[1:] or sys.stdin.read().splitlines():
+ if any(path.startswith(prefix) for prefix in py2_compat):
+ continue
+
with open(path, 'rb') as path_fd:
lines = path_fd.read().splitlines()
@@ -15,11 +22,16 @@ def main():
# Files are allowed to be empty of everything including boilerplate
missing = False
+ invalid_future = []
+
for text in lines:
if text == b'from __future__ import annotations':
missing = False
break
+ if text.startswith(b'from __future__ ') or text == b'__metaclass__ = type':
+ invalid_future.append(text.decode())
+
if missing:
with open(path) as file:
contents = file.read()
@@ -39,6 +51,9 @@ def main():
if missing:
print('%s: missing: from __future__ import annotations' % path)
+ for text in invalid_future:
+ print('%s: invalid: %s' % (path, text))
+
if __name__ == '__main__':
main()
diff --git a/test/sanity/code-smell/configure-remoting-ps1.py b/test/sanity/code-smell/configure-remoting-ps1.py
index bd216106..fe678008 100644
--- a/test/sanity/code-smell/configure-remoting-ps1.py
+++ b/test/sanity/code-smell/configure-remoting-ps1.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
diff --git a/test/sanity/code-smell/deprecated-config.py b/test/sanity/code-smell/deprecated-config.py
index 53cb2b93..3c5c6459 100644
--- a/test/sanity/code-smell/deprecated-config.py
+++ b/test/sanity/code-smell/deprecated-config.py
@@ -16,8 +16,7 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import mmap
import os
diff --git a/test/sanity/code-smell/deprecated-config.requirements.in b/test/sanity/code-smell/deprecated-config.requirements.in
new file mode 100644
index 00000000..859c4ee7
--- /dev/null
+++ b/test/sanity/code-smell/deprecated-config.requirements.in
@@ -0,0 +1,2 @@
+jinja2 # ansible-core requirement
+pyyaml
diff --git a/test/sanity/code-smell/deprecated-config.requirements.txt b/test/sanity/code-smell/deprecated-config.requirements.txt
index a3a33e6c..ca5e9115 100644
--- a/test/sanity/code-smell/deprecated-config.requirements.txt
+++ b/test/sanity/code-smell/deprecated-config.requirements.txt
@@ -1,5 +1,4 @@
-jinja2 == 3.0.1 # ansible-core requirement
-pyyaml == 5.4.1
-
-# dependencies
-MarkupSafe == 2.0.1
+# edit "deprecated-config.requirements.in" and generate with: hacking/update-sanity-requirements.py --test deprecated-config
+Jinja2==3.0.3
+MarkupSafe==2.0.1
+PyYAML==6.0
diff --git a/test/sanity/code-smell/docs-build.py b/test/sanity/code-smell/docs-build.py
index ff7d427a..9461620a 100644
--- a/test/sanity/code-smell/docs-build.py
+++ b/test/sanity/code-smell/docs-build.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import re
diff --git a/test/sanity/code-smell/docs-build.requirements.in b/test/sanity/code-smell/docs-build.requirements.in
new file mode 100644
index 00000000..f4f8c9b0
--- /dev/null
+++ b/test/sanity/code-smell/docs-build.requirements.in
@@ -0,0 +1,8 @@
+jinja2
+pyyaml
+resolvelib < 0.6.0
+sphinx == 4.2.0
+sphinx-notfound-page
+sphinx-ansible-theme
+straight.plugin
+antsibull-docs
diff --git a/test/sanity/code-smell/docs-build.requirements.txt b/test/sanity/code-smell/docs-build.requirements.txt
index 36fc363a..6f3dc291 100644
--- a/test/sanity/code-smell/docs-build.requirements.txt
+++ b/test/sanity/code-smell/docs-build.requirements.txt
@@ -1,50 +1,50 @@
-jinja2 == 3.0.1
-pyyaml == 5.4.1
-resolvelib == 0.5.4
-sphinx == 2.1.2
-sphinx-notfound-page == 0.7.1
-sphinx-ansible-theme == 0.8.0
-straight.plugin == 1.5.0
-antsibull == 0.26.0
-
-# dependencies
-MarkupSafe == 2.0.1
-aiofiles == 0.7.0
-aiohttp == 3.7.4.post0
-alabaster == 0.7.12
-ansible-pygments == 0.1.0
-antsibull-changelog == 0.9.0
-async-timeout == 3.0.1
-asyncio-pool == 0.5.2
-attrs == 21.2.0
-babel == 2.9.1
-certifi == 2021.5.30
-chardet == 4.0.0
-charset-normalizer == 2.0.5
-docutils == 0.17.1
-idna == 2.5
-imagesize == 1.2.0
-multidict == 5.1.0
-packaging == 21.0
-perky == 0.5.5
-pydantic == 1.8.2
-pygments == 2.10.0
-pyparsing == 2.4.7
-pytz == 2021.1
-requests == 2.26.0
-rstcheck == 3.3.1
-semantic-version == 2.8.5
-sh == 1.14.2
-six == 1.16.0
-snowballstemmer == 2.1.0
-sphinx-rtd-theme == 1.0.0
-sphinxcontrib-applehelp == 1.0.2
-sphinxcontrib-devhelp == 1.0.2
-sphinxcontrib-htmlhelp == 2.0.0
-sphinxcontrib-jsmath == 1.0.1
-sphinxcontrib-qthelp == 1.0.3
-sphinxcontrib-serializinghtml == 1.1.5
-twiggy == 0.5.1
-typing-extensions == 3.10.0.2
-urllib3 == 1.26.6
-yarl == 1.6.3
+# edit "docs-build.requirements.in" and generate with: hacking/update-sanity-requirements.py --test docs-build
+aiofiles==0.7.0
+aiohttp==3.8.0
+aiosignal==1.2.0
+alabaster==0.7.12
+ansible-pygments==0.1.0
+antsibull-core==1.0.0
+antsibull-docs==1.0.0
+async-timeout==4.0.1
+asyncio-pool==0.5.2
+attrs==21.2.0
+Babel==2.9.1
+certifi==2021.10.8
+charset-normalizer==2.0.7
+docutils==0.17.1
+frozenlist==1.2.0
+idna==3.3
+imagesize==1.3.0
+Jinja2==3.0.3
+MarkupSafe==2.0.1
+multidict==5.2.0
+packaging==21.2
+perky==0.5.5
+pydantic==1.8.2
+Pygments==2.10.0
+pyparsing==2.4.7
+pytz==2021.3
+PyYAML==6.0
+requests==2.26.0
+resolvelib==0.5.4
+rstcheck==3.3.1
+semantic-version==2.8.5
+sh==1.14.2
+six==1.16.0
+snowballstemmer==2.1.0
+Sphinx==4.2.0
+sphinx-ansible-theme==0.9.1
+sphinx-notfound-page==0.8
+sphinx-rtd-theme==1.0.0
+sphinxcontrib-applehelp==1.0.2
+sphinxcontrib-devhelp==1.0.2
+sphinxcontrib-htmlhelp==2.0.0
+sphinxcontrib-jsmath==1.0.1
+sphinxcontrib-qthelp==1.0.3
+sphinxcontrib-serializinghtml==1.1.5
+straight.plugin==1.5.0
+Twiggy==0.5.1
+typing-extensions==3.10.0.2
+urllib3==1.26.7
+yarl==1.7.2
diff --git a/test/sanity/code-smell/no-unwanted-files.py b/test/sanity/code-smell/no-unwanted-files.py
index 1b55c23e..82f7aff0 100644
--- a/test/sanity/code-smell/no-unwanted-files.py
+++ b/test/sanity/code-smell/no-unwanted-files.py
@@ -1,6 +1,5 @@
"""Prevent unwanted files from being added to the source tree."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import sys
diff --git a/test/sanity/code-smell/obsolete-files.py b/test/sanity/code-smell/obsolete-files.py
index 1fd98027..3c1a4a4c 100644
--- a/test/sanity/code-smell/obsolete-files.py
+++ b/test/sanity/code-smell/obsolete-files.py
@@ -1,6 +1,5 @@
"""Prevent files from being added to directories that are now obsolete."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import sys
diff --git a/test/sanity/code-smell/package-data.py b/test/sanity/code-smell/package-data.py
index 06f3f916..8e777b48 100644
--- a/test/sanity/code-smell/package-data.py
+++ b/test/sanity/code-smell/package-data.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import contextlib
import fnmatch
@@ -29,6 +28,7 @@ def assemble_files_to_ship(complete_file_list):
'hacking/tests/*',
'hacking/ticket_stubs/*',
'test/sanity/code-smell/botmeta.*',
+ 'test/sanity/code-smell/release-names.*',
'test/utils/*',
'test/utils/*/*',
'test/utils/*/*/*',
@@ -53,8 +53,9 @@ def assemble_files_to_ship(complete_file_list):
'hacking/report.py',
'hacking/return_skeleton_generator.py',
'hacking/test-module',
- 'hacking/test-module.py',
'test/support/README.md',
+ 'test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py',
+ 'test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py',
'.cherry_picker.toml',
'.mailmap',
# Generated as part of a build step
@@ -74,22 +75,27 @@ def assemble_files_to_ship(complete_file_list):
'hacking/env-setup',
'hacking/env-setup.fish',
'MANIFEST',
+ 'setup.cfg',
+ # docs for test files not included in sdist
+ 'docs/docsite/rst/dev_guide/testing/sanity/bin-symlinks.rst',
+ 'docs/docsite/rst/dev_guide/testing/sanity/botmeta.rst',
+ 'docs/docsite/rst/dev_guide/testing/sanity/integration-aliases.rst',
+ 'docs/docsite/rst/dev_guide/testing/sanity/release-names.rst',
))
# These files are generated and then intentionally added to the sdist
# Manpages
+ ignore_script = ('ansible-connection', 'ansible-test')
manpages = ['docs/man/man1/ansible.1']
for dirname, dummy, files in os.walk('bin'):
for filename in files:
- path = os.path.join(dirname, filename)
- if os.path.islink(path):
- if os.readlink(path) == 'ansible':
- manpages.append('docs/man/man1/%s.1' % filename)
+ if filename in ignore_script:
+ continue
+ manpages.append('docs/man/man1/%s.1' % filename)
# Misc
misc_generated_files = [
- 'SYMLINK_CACHE.json',
'PKG-INFO',
]
@@ -110,7 +116,11 @@ def assemble_files_to_install(complete_file_list):
"""
This looks for all of the files which should show up in an installation of ansible
"""
- ignore_patterns = tuple()
+ ignore_patterns = (
+ # Tests excluded from sdist
+ 'test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py',
+ 'test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py',
+ )
pkg_data_files = []
for path in complete_file_list:
@@ -257,12 +267,19 @@ def check_sdist_files_are_wanted(sdist_dir, to_ship_files):
dirname = ''
for filename in files:
+ if filename == 'setup.cfg':
+ continue
+
path = os.path.join(dirname, filename)
if path not in to_ship_files:
+
if fnmatch.fnmatch(path, 'changelogs/CHANGELOG-v2.[0-9]*.rst'):
# changelog files are expected
continue
+ if fnmatch.fnmatch(path, 'lib/ansible_core.egg-info/*'):
+ continue
+
# FIXME: ansible-test doesn't pass the paths of symlinks to us so we aren't
# checking those
if not os.path.islink(os.path.join(sdist_dir, path)):
@@ -283,7 +300,7 @@ def check_installed_contains_expected(install_dir, to_install_files):
EGG_RE = re.compile('ansible[^/]+\\.egg-info/(PKG-INFO|SOURCES.txt|'
- 'dependency_links.txt|not-zip-safe|requires.txt|top_level.txt)$')
+ 'dependency_links.txt|not-zip-safe|requires.txt|top_level.txt|entry_points.txt)$')
def check_installed_files_are_wanted(install_dir, to_install_files):
diff --git a/test/sanity/code-smell/package-data.requirements.in b/test/sanity/code-smell/package-data.requirements.in
new file mode 100644
index 00000000..68c2248e
--- /dev/null
+++ b/test/sanity/code-smell/package-data.requirements.in
@@ -0,0 +1,7 @@
+docutils < 0.18 # match version required by sphinx in the docs-build sanity test
+jinja2
+pyyaml # ansible-core requirement
+resolvelib < 0.6.0
+rstcheck
+straight.plugin
+antsibull-changelog
diff --git a/test/sanity/code-smell/package-data.requirements.txt b/test/sanity/code-smell/package-data.requirements.txt
index 8055b3c5..c3caa7fc 100644
--- a/test/sanity/code-smell/package-data.requirements.txt
+++ b/test/sanity/code-smell/package-data.requirements.txt
@@ -1,13 +1,12 @@
-docutils == 0.17.1
-jinja2 == 3.0.1
-packaging == 21.0
-pyyaml == 5.4.1 # ansible-core requirement
-resolvelib == 0.5.4 # ansible-core requirement
-rstcheck == 3.3.1
-straight.plugin == 1.5.0
-antsibull-changelog == 0.9.0
-
-# dependencies
-MarkupSafe == 2.0.1
-pyparsing == 2.4.7
-semantic-version == 2.8.5
+# edit "package-data.requirements.in" and generate with: hacking/update-sanity-requirements.py --test package-data
+antsibull-changelog==0.14.0
+docutils==0.17.1
+Jinja2==3.0.3
+MarkupSafe==2.0.1
+packaging==21.2
+pyparsing==2.4.7
+PyYAML==6.0
+resolvelib==0.5.4
+rstcheck==3.3.1
+semantic-version==2.8.5
+straight.plugin==1.5.0
diff --git a/test/sanity/code-smell/release-names.json b/test/sanity/code-smell/release-names.json
deleted file mode 100644
index 593b765d..00000000
--- a/test/sanity/code-smell/release-names.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "no_targets": true,
- "output": "path-message"
-}
diff --git a/test/sanity/code-smell/release-names.py b/test/sanity/code-smell/release-names.py
deleted file mode 100644
index 4e145062..00000000
--- a/test/sanity/code-smell/release-names.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# -*- coding: utf-8 -*-
-# (c) 2019, Ansible Project
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-"""
-Test that the release name is present in the list of used up release names
-"""
-
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from yaml import safe_load
-
-from ansible.release import __codename__
-
-
-def main():
- """Entrypoint to the script"""
-
- with open('.github/RELEASE_NAMES.yml') as f:
- releases = safe_load(f.read())
-
- # Why this format? The file's sole purpose is to be read by a human when they need to know
- # which release names have already been used. So:
- # 1) It's easier for a human to find the release names when there's one on each line
- # 2) It helps keep other people from using the file and then asking for new features in it
- for name in (r.split(maxsplit=1)[1] for r in releases):
- if __codename__ == name:
- break
- else:
- print('.github/RELEASE_NAMES.yml: Current codename was not present in the file')
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/sanity/code-smell/release-names.requirements.txt b/test/sanity/code-smell/release-names.requirements.txt
deleted file mode 100644
index cc530e42..00000000
--- a/test/sanity/code-smell/release-names.requirements.txt
+++ /dev/null
@@ -1 +0,0 @@
-pyyaml == 5.4.1
diff --git a/test/sanity/code-smell/required-and-default-attributes.py b/test/sanity/code-smell/required-and-default-attributes.py
index d71ddeeb..900829dc 100644
--- a/test/sanity/code-smell/required-and-default-attributes.py
+++ b/test/sanity/code-smell/required-and-default-attributes.py
@@ -1,5 +1,4 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
import sys
diff --git a/test/sanity/code-smell/rstcheck.py b/test/sanity/code-smell/rstcheck.py
index 7f702846..99917ca8 100644
--- a/test/sanity/code-smell/rstcheck.py
+++ b/test/sanity/code-smell/rstcheck.py
@@ -1,6 +1,5 @@
"""Sanity test using rstcheck and sphinx."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
import subprocess
diff --git a/test/sanity/code-smell/rstcheck.requirements.in b/test/sanity/code-smell/rstcheck.requirements.in
new file mode 100644
index 00000000..f9973912
--- /dev/null
+++ b/test/sanity/code-smell/rstcheck.requirements.in
@@ -0,0 +1,3 @@
+sphinx == 4.2.0 # required for full rstcheck functionality, installed first to get the correct docutils version
+rstcheck
+jinja2 # ansible-core requirement
diff --git a/test/sanity/code-smell/rstcheck.requirements.txt b/test/sanity/code-smell/rstcheck.requirements.txt
index 071bc5a1..f8916539 100644
--- a/test/sanity/code-smell/rstcheck.requirements.txt
+++ b/test/sanity/code-smell/rstcheck.requirements.txt
@@ -1,27 +1,25 @@
-rstcheck == 3.3.1
-sphinx == 2.1.2 # required for full functionality
-
-# dependencies
-Jinja2 == 3.0.1
-MarkupSafe == 2.0.1
-Pygments == 2.10.0
-alabaster == 0.7.12
-babel == 2.9.1
-certifi == 2021.5.30
-charset-normalizer == 2.0.5
-docutils == 0.17.1
-idna == 2.5
-imagesize == 1.2.0
-packaging == 21.0
-pyparsing == 2.4.7
-pytz == 2021.1
-requests == 2.26.0
-rstcheck == 3.3.1
-snowballstemmer == 2.1.0
-sphinxcontrib-applehelp == 1.0.2
-sphinxcontrib-devhelp == 1.0.2
-sphinxcontrib-htmlhelp == 2.0.0
-sphinxcontrib-jsmath == 1.0.1
-sphinxcontrib-qthelp == 1.0.3
-sphinxcontrib-serializinghtml == 1.1.5
-urllib3 == 1.26.6
+# edit "rstcheck.requirements.in" and generate with: hacking/update-sanity-requirements.py --test rstcheck
+alabaster==0.7.12
+Babel==2.9.1
+certifi==2021.10.8
+charset-normalizer==2.0.7
+docutils==0.17.1
+idna==3.3
+imagesize==1.3.0
+Jinja2==3.0.3
+MarkupSafe==2.0.1
+packaging==21.2
+Pygments==2.10.0
+pyparsing==2.4.7
+pytz==2021.3
+requests==2.26.0
+rstcheck==3.3.1
+snowballstemmer==2.1.0
+Sphinx==4.2.0
+sphinxcontrib-applehelp==1.0.2
+sphinxcontrib-devhelp==1.0.2
+sphinxcontrib-htmlhelp==2.0.0
+sphinxcontrib-jsmath==1.0.1
+sphinxcontrib-qthelp==1.0.3
+sphinxcontrib-serializinghtml==1.1.5
+urllib3==1.26.7
diff --git a/test/sanity/code-smell/test-constraints.py b/test/sanity/code-smell/test-constraints.py
index 8383235e..18a7a29b 100644
--- a/test/sanity/code-smell/test-constraints.py
+++ b/test/sanity/code-smell/test-constraints.py
@@ -1,6 +1,6 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+import os
import re
import sys
@@ -21,6 +21,11 @@ def main():
non_sanity_requirements = set()
for path, requirements in requirements.items():
+ filename = os.path.basename(path)
+
+ is_sanity = filename.startswith('sanity.') or filename.endswith('.requirements.txt')
+ is_constraints = path == constraints_path
+
for lineno, line, requirement in requirements:
if not requirement:
print('%s:%d:%d: cannot parse requirement: %s' % (path, lineno, 1, line))
@@ -28,14 +33,10 @@ def main():
name = requirement.group('name').lower()
raw_constraints = requirement.group('constraints')
- raw_markers = requirement.group('markers')
constraints = raw_constraints.strip()
- markers = raw_markers.strip()
comment = requirement.group('comment')
- is_sanity = path.startswith('test/lib/ansible_test/_data/requirements/sanity.') or path.startswith('test/sanity/code-smell/')
is_pinned = re.search('^ *== *[0-9.]+(\\.post[0-9]+)?$', constraints)
- is_constraints = path == constraints_path
if is_sanity:
sanity = frozen_sanity.setdefault(name, [])
@@ -43,20 +44,19 @@ def main():
elif not is_constraints:
non_sanity_requirements.add(name)
+ if is_sanity:
+ if not is_pinned:
+ # sanity test requirements must be pinned
+ print('%s:%d:%d: sanity test requirement (%s%s) must be frozen (use `==`)' % (path, lineno, 1, name, raw_constraints))
+
+ continue
+
if constraints and not is_constraints:
allow_constraints = 'sanity_ok' in comment
- if is_sanity and is_pinned and not markers:
- allow_constraints = True # sanity tests can use frozen requirements without markers
-
if not allow_constraints:
- if is_sanity:
- # sanity test requirements which need constraints should be frozen to maintain consistent test results
- # use of anything other than frozen constraints will make evaluation of conflicts extremely difficult
- print('%s:%d:%d: sanity test constraint (%s%s) must be frozen (use `==`)' % (path, lineno, 1, name, raw_constraints))
- else:
- # keeping constraints for tests other than sanity tests in one file helps avoid conflicts
- print('%s:%d:%d: put the constraint (%s%s) in `%s`' % (path, lineno, 1, name, raw_constraints, constraints_path))
+ # keeping constraints for tests other than sanity tests in one file helps avoid conflicts
+ print('%s:%d:%d: put the constraint (%s%s) in `%s`' % (path, lineno, 1, name, raw_constraints, constraints_path))
for name, requirements in frozen_sanity.items():
if len(set(req[3].group('constraints').strip() for req in requirements)) != 1:
diff --git a/test/sanity/code-smell/update-bundled.py b/test/sanity/code-smell/update-bundled.py
index 009f801b..4bad77a6 100644
--- a/test/sanity/code-smell/update-bundled.py
+++ b/test/sanity/code-smell/update-bundled.py
@@ -21,8 +21,7 @@ a newer upstream release.
"""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import fnmatch
import json
diff --git a/test/sanity/code-smell/update-bundled.requirements.in b/test/sanity/code-smell/update-bundled.requirements.in
new file mode 100644
index 00000000..748809f7
--- /dev/null
+++ b/test/sanity/code-smell/update-bundled.requirements.in
@@ -0,0 +1 @@
+packaging
diff --git a/test/sanity/code-smell/update-bundled.requirements.txt b/test/sanity/code-smell/update-bundled.requirements.txt
index 101e3fdb..93330e34 100644
--- a/test/sanity/code-smell/update-bundled.requirements.txt
+++ b/test/sanity/code-smell/update-bundled.requirements.txt
@@ -1,4 +1,3 @@
-packaging == 21.0
-
-# dependencies
-pyparsing == 2.4.7
+# edit "update-bundled.requirements.in" and generate with: hacking/update-sanity-requirements.py --test update-bundled
+packaging==21.2
+pyparsing==2.4.7
diff --git a/test/sanity/ignore.txt b/test/sanity/ignore.txt
index 1dba93b0..00547573 100644
--- a/test/sanity/ignore.txt
+++ b/test/sanity/ignore.txt
@@ -1,15 +1,45 @@
.azure-pipelines/scripts/publish-codecov.py replace-urlopen
docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst no-smart-quotes
docs/docsite/rst/locales/ja/LC_MESSAGES/dev_guide.po no-smart-quotes # Translation of the no-smart-quotes rule
-examples/play.yml shebang
examples/scripts/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
-examples/scripts/my_test.py shebang # example module but not in a normal module location
-examples/scripts/my_test_facts.py shebang # example module but not in a normal module location
-examples/scripts/my_test_info.py shebang # example module but not in a normal module location
examples/scripts/upgrade_to_ps3.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/upgrade_to_ps3.ps1 pslint:PSUseApprovedVerbs
-lib/ansible/cli/console.py pylint:disallowed-name
-lib/ansible/cli/scripts/ansible_cli_stub.py shebang
+lib/ansible/cli/galaxy.py import-3.8 # unguarded indirect resolvelib import
+lib/ansible/galaxy/collection/__init__.py import-3.8 # unguarded resolvelib import
+lib/ansible/galaxy/collection/concrete_artifact_manager.py import-3.8 # unguarded resolvelib import
+lib/ansible/galaxy/collection/galaxy_api_proxy.py import-3.8 # unguarded resolvelib imports
+lib/ansible/galaxy/collection/gpg.py import-3.8 # unguarded resolvelib imports
+lib/ansible/galaxy/dependency_resolution/__init__.py import-3.8 # circular imports
+lib/ansible/galaxy/dependency_resolution/dataclasses.py import-3.8 # circular imports
+lib/ansible/galaxy/dependency_resolution/errors.py import-3.8 # circular imports
+lib/ansible/galaxy/dependency_resolution/providers.py import-3.8 # circular imports
+lib/ansible/galaxy/dependency_resolution/reporters.py import-3.8 # circular imports
+lib/ansible/galaxy/dependency_resolution/resolvers.py import-3.8 # circular imports
+lib/ansible/galaxy/dependency_resolution/versioning.py import-3.8 # circular imports
+lib/ansible/cli/galaxy.py import-3.9 # unguarded indirect resolvelib import
+lib/ansible/galaxy/collection/__init__.py import-3.9 # unguarded resolvelib import
+lib/ansible/galaxy/collection/concrete_artifact_manager.py import-3.9 # unguarded resolvelib import
+lib/ansible/galaxy/collection/galaxy_api_proxy.py import-3.9 # unguarded resolvelib imports
+lib/ansible/galaxy/collection/gpg.py import-3.9 # unguarded resolvelib imports
+lib/ansible/galaxy/dependency_resolution/__init__.py import-3.9 # circular imports
+lib/ansible/galaxy/dependency_resolution/dataclasses.py import-3.9 # circular imports
+lib/ansible/galaxy/dependency_resolution/errors.py import-3.9 # circular imports
+lib/ansible/galaxy/dependency_resolution/providers.py import-3.9 # circular imports
+lib/ansible/galaxy/dependency_resolution/reporters.py import-3.9 # circular imports
+lib/ansible/galaxy/dependency_resolution/resolvers.py import-3.9 # circular imports
+lib/ansible/galaxy/dependency_resolution/versioning.py import-3.9 # circular imports
+lib/ansible/cli/galaxy.py import-3.10 # unguarded indirect resolvelib import
+lib/ansible/galaxy/collection/__init__.py import-3.10 # unguarded resolvelib import
+lib/ansible/galaxy/collection/concrete_artifact_manager.py import-3.10 # unguarded resolvelib import
+lib/ansible/galaxy/collection/galaxy_api_proxy.py import-3.10 # unguarded resolvelib imports
+lib/ansible/galaxy/collection/gpg.py import-3.10 # unguarded resolvelib imports
+lib/ansible/galaxy/dependency_resolution/__init__.py import-3.10 # circular imports
+lib/ansible/galaxy/dependency_resolution/dataclasses.py import-3.10 # circular imports
+lib/ansible/galaxy/dependency_resolution/errors.py import-3.10 # circular imports
+lib/ansible/galaxy/dependency_resolution/providers.py import-3.10 # circular imports
+lib/ansible/galaxy/dependency_resolution/reporters.py import-3.10 # circular imports
+lib/ansible/galaxy/dependency_resolution/resolvers.py import-3.10 # circular imports
+lib/ansible/galaxy/dependency_resolution/versioning.py import-3.10 # circular imports
lib/ansible/cli/scripts/ansible_connection_cli_stub.py shebang
lib/ansible/config/base.yml no-unwanted-files
lib/ansible/executor/playbook_executor.py pylint:disallowed-name
@@ -18,45 +48,8 @@ lib/ansible/executor/powershell/async_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/exec_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/task_queue_manager.py pylint:disallowed-name
lib/ansible/keyword_desc.yml no-unwanted-files
-lib/ansible/module_utils/compat/_selectors2.py future-import-boilerplate # ignore bundled
-lib/ansible/module_utils/compat/_selectors2.py metaclass-boilerplate # ignore bundled
-lib/ansible/module_utils/compat/_selectors2.py pylint:disallowed-name
-lib/ansible/module_utils/compat/selinux.py import-2.6!skip # pass/fail depends on presence of libselinux.so
-lib/ansible/module_utils/compat/selinux.py import-2.7!skip # pass/fail depends on presence of libselinux.so
-lib/ansible/module_utils/compat/selinux.py import-3.5!skip # pass/fail depends on presence of libselinux.so
-lib/ansible/module_utils/compat/selinux.py import-3.6!skip # pass/fail depends on presence of libselinux.so
-lib/ansible/module_utils/compat/selinux.py import-3.7!skip # pass/fail depends on presence of libselinux.so
-lib/ansible/module_utils/compat/selinux.py import-3.8!skip # pass/fail depends on presence of libselinux.so
-lib/ansible/module_utils/compat/selinux.py import-3.9!skip # pass/fail depends on presence of libselinux.so
-lib/ansible/module_utils/distro/__init__.py empty-init # breaks namespacing, bundled, do not override
-lib/ansible/module_utils/distro/_distro.py future-import-boilerplate # ignore bundled
-lib/ansible/module_utils/distro/_distro.py metaclass-boilerplate # ignore bundled
-lib/ansible/module_utils/distro/_distro.py no-assert
-lib/ansible/module_utils/distro/_distro.py pep8!skip # bundled code we don't want to modify
-lib/ansible/module_utils/facts/__init__.py empty-init # breaks namespacing, deprecate and eventually remove
-lib/ansible/module_utils/facts/network/linux.py pylint:disallowed-name
-lib/ansible/module_utils/powershell/Ansible.ModuleUtils.ArgvParser.psm1 pslint:PSUseApprovedVerbs
-lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSProvideCommentHelp # need to agree on best format for comment location
-lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSUseApprovedVerbs
-lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSCustomUseLiteralPath
-lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSProvideCommentHelp
-lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSCustomUseLiteralPath
-lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSUseApprovedVerbs
-lib/ansible/module_utils/powershell/Ansible.ModuleUtils.LinkUtil.psm1 pslint:PSUseApprovedVerbs
-lib/ansible/module_utils/pycompat24.py no-get-exception
-lib/ansible/module_utils/six/__init__.py empty-init # breaks namespacing, bundled, do not override
-lib/ansible/module_utils/six/__init__.py future-import-boilerplate # ignore bundled
-lib/ansible/module_utils/six/__init__.py metaclass-boilerplate # ignore bundled
-lib/ansible/module_utils/six/__init__.py no-basestring
-lib/ansible/module_utils/six/__init__.py no-dict-iteritems
-lib/ansible/module_utils/six/__init__.py no-dict-iterkeys
-lib/ansible/module_utils/six/__init__.py no-dict-itervalues
-lib/ansible/module_utils/six/__init__.py pylint:self-assigning-variable
-lib/ansible/module_utils/six/__init__.py replace-urlopen
-lib/ansible/module_utils/urls.py pylint:disallowed-name
-lib/ansible/module_utils/urls.py replace-urlopen
-lib/ansible/modules/apt.py validate-modules:parameter-invalid
lib/ansible/modules/apt_key.py validate-modules:parameter-type-not-in-doc
+lib/ansible/modules/apt.py validate-modules:parameter-invalid
lib/ansible/modules/apt_repository.py validate-modules:parameter-invalid
lib/ansible/modules/assemble.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/async_status.py use-argspec-type-path
@@ -101,55 +94,85 @@ lib/ansible/modules/stat.py validate-modules:undocumented-parameter
lib/ansible/modules/systemd.py validate-modules:parameter-invalid
lib/ansible/modules/systemd.py validate-modules:return-syntax-error
lib/ansible/modules/sysvinit.py validate-modules:return-syntax-error
-lib/ansible/modules/uri.py pylint:disallowed-name
lib/ansible/modules/uri.py validate-modules:doc-required-mismatch
lib/ansible/modules/user.py validate-modules:doc-default-does-not-match-spec
-lib/ansible/modules/user.py validate-modules:doc-default-incompatible-type
lib/ansible/modules/user.py validate-modules:use-run-command-not-popen
lib/ansible/modules/yum.py pylint:disallowed-name
lib/ansible/modules/yum.py validate-modules:parameter-invalid
lib/ansible/modules/yum_repository.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/yum_repository.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/yum_repository.py validate-modules:undocumented-parameter
+lib/ansible/module_utils/compat/_selectors2.py future-import-boilerplate # ignore bundled
+lib/ansible/module_utils/compat/_selectors2.py metaclass-boilerplate # ignore bundled
+lib/ansible/module_utils/compat/_selectors2.py pylint:disallowed-name
+lib/ansible/module_utils/compat/selinux.py import-2.7!skip # pass/fail depends on presence of libselinux.so
+lib/ansible/module_utils/compat/selinux.py import-3.5!skip # pass/fail depends on presence of libselinux.so
+lib/ansible/module_utils/compat/selinux.py import-3.6!skip # pass/fail depends on presence of libselinux.so
+lib/ansible/module_utils/compat/selinux.py import-3.7!skip # pass/fail depends on presence of libselinux.so
+lib/ansible/module_utils/compat/selinux.py import-3.8!skip # pass/fail depends on presence of libselinux.so
+lib/ansible/module_utils/compat/selinux.py import-3.9!skip # pass/fail depends on presence of libselinux.so
+lib/ansible/module_utils/distro/_distro.py future-import-boilerplate # ignore bundled
+lib/ansible/module_utils/distro/_distro.py metaclass-boilerplate # ignore bundled
+lib/ansible/module_utils/distro/_distro.py no-assert
+lib/ansible/module_utils/distro/_distro.py pylint:using-constant-test # bundled code we don't want to modify
+lib/ansible/module_utils/distro/_distro.py pep8!skip # bundled code we don't want to modify
+lib/ansible/module_utils/distro/__init__.py empty-init # breaks namespacing, bundled, do not override
+lib/ansible/module_utils/facts/__init__.py empty-init # breaks namespacing, deprecate and eventually remove
+lib/ansible/module_utils/facts/network/linux.py pylint:disallowed-name
+lib/ansible/module_utils/powershell/Ansible.ModuleUtils.ArgvParser.psm1 pslint:PSUseApprovedVerbs
+lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSProvideCommentHelp # need to agree on best format for comment location
+lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSUseApprovedVerbs
+lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSCustomUseLiteralPath
+lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSProvideCommentHelp
+lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSCustomUseLiteralPath
+lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSUseApprovedVerbs
+lib/ansible/module_utils/powershell/Ansible.ModuleUtils.LinkUtil.psm1 pslint:PSUseApprovedVerbs
+lib/ansible/module_utils/pycompat24.py no-get-exception
+lib/ansible/module_utils/six/__init__.py empty-init # breaks namespacing, bundled, do not override
+lib/ansible/module_utils/six/__init__.py future-import-boilerplate # ignore bundled
+lib/ansible/module_utils/six/__init__.py metaclass-boilerplate # ignore bundled
+lib/ansible/module_utils/six/__init__.py no-basestring
+lib/ansible/module_utils/six/__init__.py no-dict-iteritems
+lib/ansible/module_utils/six/__init__.py no-dict-iterkeys
+lib/ansible/module_utils/six/__init__.py no-dict-itervalues
+lib/ansible/module_utils/six/__init__.py pylint:self-assigning-variable
+lib/ansible/module_utils/six/__init__.py replace-urlopen
+lib/ansible/module_utils/urls.py pylint:arguments-renamed
+lib/ansible/module_utils/urls.py pylint:disallowed-name
+lib/ansible/module_utils/urls.py replace-urlopen
lib/ansible/parsing/vault/__init__.py pylint:disallowed-name
lib/ansible/parsing/yaml/objects.py pylint:arguments-renamed
-lib/ansible/plugins/callback/__init__.py pylint:arguments-renamed
-lib/ansible/plugins/inventory/advanced_host_list.py pylint:arguments-renamed
-lib/ansible/plugins/inventory/host_list.py pylint:arguments-renamed
-lib/ansible/plugins/lookup/random_choice.py pylint:arguments-renamed
-lib/ansible/plugins/shell/cmd.py pylint:arguments-renamed
lib/ansible/playbook/base.py pylint:disallowed-name
lib/ansible/playbook/collectionsearch.py required-and-default-attributes # https://github.com/ansible/ansible/issues/61460
lib/ansible/playbook/helpers.py pylint:disallowed-name
lib/ansible/plugins/action/normal.py action-plugin-docs # default action plugin for modules without a dedicated action plugin
lib/ansible/plugins/cache/base.py ansible-doc!skip # not a plugin, but a stub for backwards compatibility
+lib/ansible/plugins/callback/__init__.py pylint:arguments-renamed
+lib/ansible/plugins/inventory/advanced_host_list.py pylint:arguments-renamed
+lib/ansible/plugins/inventory/host_list.py pylint:arguments-renamed
+lib/ansible/plugins/lookup/random_choice.py pylint:arguments-renamed
lib/ansible/plugins/lookup/sequence.py pylint:disallowed-name
+lib/ansible/plugins/shell/cmd.py pylint:arguments-renamed
lib/ansible/plugins/strategy/__init__.py pylint:disallowed-name
lib/ansible/plugins/strategy/linear.py pylint:disallowed-name
-lib/ansible/vars/hostvars.py pylint:disallowed-name
lib/ansible/utils/collection_loader/_collection_finder.py pylint:deprecated-class
lib/ansible/utils/collection_loader/_collection_meta.py pylint:deprecated-class
-test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
-test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
-test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
+lib/ansible/vars/hostvars.py pylint:disallowed-name
test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-function # ignore, required for testing
-test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import-from # ignore, required for testing
-test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
+test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
+test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
+test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
+test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
+test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
+test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/modules/my_module.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util2.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util3.py pylint:relative-beyond-top-level
-test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/modules/my_module.py pylint:relative-beyond-top-level
test/integration/targets/gathering_facts/library/bogus_facts shebang
test/integration/targets/gathering_facts/library/facts_one shebang
test/integration/targets/gathering_facts/library/facts_two shebang
-test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.psm1 pslint!skip
-test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
-test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/xTestDsc.psd1 pslint!skip
-test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
-test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/xTestDsc.psd1 pslint!skip
-test/integration/targets/incidental_win_ping/library/win_ping_syntax_error.ps1 pslint!skip
test/integration/targets/incidental_win_reboot/templates/post_reboot.ps1 pslint!skip
test/integration/targets/json_cleanup/library/bad_json shebang
test/integration/targets/lookup_csvfile/files/crlf.csv line-endings
@@ -161,8 +184,8 @@ test/integration/targets/module_precedence/roles_with_extension/foo/library/ping
test/integration/targets/module_utils/library/test.py future-import-boilerplate # allow testing of Python 2.x implicit relative imports
test/integration/targets/module_utils/module_utils/bar0/foo.py pylint:disallowed-name
test/integration/targets/module_utils/module_utils/foo.py pylint:disallowed-name
-test/integration/targets/module_utils/module_utils/sub/bar/__init__.py pylint:disallowed-name
test/integration/targets/module_utils/module_utils/sub/bar/bar.py pylint:disallowed-name
+test/integration/targets/module_utils/module_utils/sub/bar/__init__.py pylint:disallowed-name
test/integration/targets/module_utils/module_utils/yak/zebra/foo.py pylint:disallowed-name
test/integration/targets/old_style_modules_posix/library/helloworld.sh shebang
test/integration/targets/template/files/encoding_1252_utf-8.expected no-smart-quotes
@@ -170,23 +193,22 @@ test/integration/targets/template/files/encoding_1252_windows-1252.expected no-s
test/integration/targets/template/files/foo.dos.txt line-endings
test/integration/targets/template/templates/encoding_1252.j2 no-smart-quotes
test/integration/targets/unicode/unicode.yml no-smart-quotes
+test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1 pslint!skip
test/integration/targets/win_exec_wrapper/library/test_fail.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_exec_wrapper/tasks/main.yml no-smart-quotes # We are explicitly testing smart quote support for env vars
test/integration/targets/win_fetch/tasks/main.yml no-smart-quotes # We are explictly testing smart quotes in the file name to fetch
test/integration/targets/win_module_utils/library/legacy_only_new_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_module_utils/library/legacy_only_old_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_script/files/test_script.ps1 pslint:PSAvoidUsingWriteHost # Keep
-test/integration/targets/win_script/files/test_script_creates_file.ps1 pslint:PSAvoidUsingCmdletAliases
test/integration/targets/win_script/files/test_script_removes_file.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_script/files/test_script_with_args.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_with_splatting.ps1 pslint:PSAvoidUsingWriteHost # Keep
-test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1 pslint!skip
test/lib/ansible_test/_data/requirements/sanity.pslint.ps1 pslint:PSCustomUseLiteralPath # Uses wildcards on purpose
test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
test/lib/ansible_test/_util/target/setup/requirements.py replace-urlopen
test/support/integration/plugins/inventory/aws_ec2.py pylint:use-a-generator
-test/support/integration/plugins/module_utils/network/common/utils.py pylint:use-a-generator
test/support/integration/plugins/modules/ec2_group.py pylint:use-a-generator
+test/support/integration/plugins/modules/timezone.py pylint:disallowed-name
test/support/integration/plugins/module_utils/aws/core.py pylint:property-with-parameters
test/support/integration/plugins/module_utils/cloud.py future-import-boilerplate
test/support/integration/plugins/module_utils/cloud.py metaclass-boilerplate
@@ -194,52 +216,119 @@ test/support/integration/plugins/module_utils/cloud.py pylint:isinstance-second-
test/support/integration/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py no-unicode-literals
-test/support/integration/plugins/module_utils/database.py future-import-boilerplate
-test/support/integration/plugins/module_utils/database.py metaclass-boilerplate
-test/support/integration/plugins/module_utils/mysql.py future-import-boilerplate
-test/support/integration/plugins/module_utils/mysql.py metaclass-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py future-import-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py metaclass-boilerplate
-test/support/integration/plugins/module_utils/postgres.py future-import-boilerplate
-test/support/integration/plugins/module_utils/postgres.py metaclass-boilerplate
-test/support/integration/plugins/modules/lvg.py pylint:disallowed-name
-test/support/integration/plugins/modules/timezone.py pylint:disallowed-name
+test/support/integration/plugins/module_utils/network/common/utils.py pylint:use-a-generator
+test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/filter/network.py pylint:consider-using-dict-comprehension
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py pep8:E203
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py pylint:unnecessary-comprehension
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py pylint:use-a-generator
+test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/cliconf/ios.py pylint:arguments-renamed
-test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/cliconf/vyos.py pylint:arguments-renamed
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py pep8:E501
+test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/cliconf/vyos.py pylint:arguments-renamed
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pep8:E231
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pylint:disallowed-name
+test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/module_utils/WebRequest.psm1 pslint!skip
+test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_uri.ps1 pslint!skip
test/support/windows-integration/plugins/modules/async_status.ps1 pslint!skip
test/support/windows-integration/plugins/modules/setup.ps1 pslint!skip
+test/support/windows-integration/plugins/modules/slurp.ps1 pslint!skip
+test/support/windows-integration/plugins/modules/win_acl.ps1 pslint!skip
+test/support/windows-integration/plugins/modules/win_certificate_store.ps1 pslint!skip
+test/support/windows-integration/plugins/modules/win_command.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_copy.ps1 pslint!skip
-test/support/windows-integration/plugins/modules/win_dsc.ps1 pslint!skip
-test/support/windows-integration/plugins/modules/win_feature.ps1 pslint!skip
-test/support/windows-integration/plugins/modules/win_find.ps1 pslint!skip
+test/support/windows-integration/plugins/modules/win_file.ps1 pslint!skip
+test/support/windows-integration/plugins/modules/win_get_url.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_lineinfile.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_regedit.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_shell.ps1 pslint!skip
+test/support/windows-integration/plugins/modules/win_stat.ps1 pslint!skip
+test/support/windows-integration/plugins/modules/win_tempfile.ps1 pslint!skip
+test/support/windows-integration/plugins/modules/win_user_right.ps1 pslint!skip
+test/support/windows-integration/plugins/modules/win_user.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_wait_for.ps1 pslint!skip
+test/support/windows-integration/plugins/modules/win_whoami.ps1 pslint!skip
test/units/executor/test_play_iterator.py pylint:disallowed-name
+test/units/modules/test_apt.py pylint:disallowed-name
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-no-version
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-version
test/units/module_utils/basic/test_run_command.py pylint:disallowed-name
test/units/module_utils/urls/fixtures/multipart.txt line-endings # Fixture for HTTP tests that use CRLF
-test/units/module_utils/urls/test_Request.py replace-urlopen
test/units/module_utils/urls/test_fetch_url.py replace-urlopen
-test/units/modules/test_apt.py pylint:disallowed-name
+test/units/module_utils/urls/test_Request.py replace-urlopen
test/units/parsing/vault/test_vault.py pylint:disallowed-name
test/units/playbook/role/test_role.py pylint:disallowed-name
test/units/plugins/test_plugins.py pylint:disallowed-name
test/units/template/test_templar.py pylint:disallowed-name
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py pylint:relative-beyond-top-level
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/modules/__init__.py empty-init # testing that collections don't need inits
-test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/ansible/__init__.py empty-init # testing that collections don't need inits
+test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/test_collection_loader.py pylint:undefined-variable # magic runtime local var splatting
+lib/ansible/module_utils/six/__init__.py mypy-2.7:has-type # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.5:has-type # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.6:has-type # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.7:has-type # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.8:has-type # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.9:has-type # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.10:has-type # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-2.7:name-defined # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.5:name-defined # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.6:name-defined # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.7:name-defined # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.8:name-defined # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.9:name-defined # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.10:name-defined # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-2.7:assignment # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.5:assignment # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.6:assignment # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.7:assignment # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-2.7:misc # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.5:misc # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.6:misc # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.7:misc # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.8:misc # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.9:misc # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.10:misc # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-2.7:var-annotated # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-2.7:attr-defined # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.5:var-annotated # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.5:attr-defined # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.6:var-annotated # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.6:attr-defined # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.7:var-annotated # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.7:attr-defined # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.8:var-annotated # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.8:attr-defined # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.9:var-annotated # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.9:attr-defined # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.10:var-annotated # vendored code
+lib/ansible/module_utils/six/__init__.py mypy-3.10:attr-defined # vendored code
+lib/ansible/module_utils/distro/_distro.py mypy-2.7:arg-type # vendored code
+lib/ansible/module_utils/distro/_distro.py mypy-3.5:valid-type # vendored code
+lib/ansible/module_utils/distro/_distro.py mypy-3.6:valid-type # vendored code
+lib/ansible/module_utils/distro/_distro.py mypy-3.7:valid-type # vendored code
+lib/ansible/module_utils/distro/_distro.py mypy-2.7:assignment # vendored code
+lib/ansible/module_utils/distro/_distro.py mypy-2.7:attr-defined # vendored code
+lib/ansible/module_utils/distro/_distro.py mypy-3.5:attr-defined # vendored code
+lib/ansible/module_utils/distro/_distro.py mypy-3.6:attr-defined # vendored code
+lib/ansible/module_utils/distro/_distro.py mypy-3.7:attr-defined # vendored code
+lib/ansible/module_utils/compat/_selectors2.py mypy-2.7:misc # vendored code
+lib/ansible/module_utils/compat/_selectors2.py mypy-3.5:misc # vendored code
+lib/ansible/module_utils/compat/_selectors2.py mypy-3.6:misc # vendored code
+lib/ansible/module_utils/compat/_selectors2.py mypy-3.7:misc # vendored code
+lib/ansible/module_utils/compat/_selectors2.py mypy-3.8:misc # vendored code
+lib/ansible/module_utils/compat/_selectors2.py mypy-3.9:misc # vendored code
+lib/ansible/module_utils/compat/_selectors2.py mypy-3.10:misc # vendored code
+lib/ansible/module_utils/compat/_selectors2.py mypy-2.7:assignment # vendored code
+lib/ansible/module_utils/compat/_selectors2.py mypy-3.5:assignment # vendored code
+lib/ansible/module_utils/compat/_selectors2.py mypy-3.6:assignment # vendored code
+lib/ansible/module_utils/compat/_selectors2.py mypy-3.7:assignment # vendored code
+lib/ansible/module_utils/compat/_selectors2.py mypy-3.8:assignment # vendored code
+lib/ansible/module_utils/compat/_selectors2.py mypy-3.9:assignment # vendored code
+lib/ansible/module_utils/compat/_selectors2.py mypy-3.10:assignment # vendored code
+lib/ansible/module_utils/compat/_selectors2.py mypy-2.7:attr-defined # vendored code
diff --git a/test/support/integration/plugins/inventory/foreman.py b/test/support/integration/plugins/inventory/foreman.py
index 39e0de33..d026ebdb 100644
--- a/test/support/integration/plugins/inventory/foreman.py
+++ b/test/support/integration/plugins/inventory/foreman.py
@@ -81,11 +81,12 @@ password: secure
validate_certs: False
'''
+from collections.abc import MutableMapping
+
from ansible.module_utils.compat.version import LooseVersion
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_bytes, to_native, to_text
-from ansible.module_utils.common._collections_compat import MutableMapping
from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable, to_safe_group_name, Constructable
# 3rd party imports
diff --git a/test/support/integration/plugins/lookup/rabbitmq.py b/test/support/integration/plugins/lookup/rabbitmq.py
deleted file mode 100644
index 7c2745f4..00000000
--- a/test/support/integration/plugins/lookup/rabbitmq.py
+++ /dev/null
@@ -1,190 +0,0 @@
-# (c) 2018, John Imison <john+github@imison.net>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-DOCUMENTATION = """
- lookup: rabbitmq
- author: John Imison <@Im0>
- version_added: "2.8"
- short_description: Retrieve messages from an AMQP/AMQPS RabbitMQ queue.
- description:
- - This lookup uses a basic get to retrieve all, or a limited number C(count), messages from a RabbitMQ queue.
- options:
- url:
- description:
- - An URI connection string to connect to the AMQP/AMQPS RabbitMQ server.
- - For more information refer to the URI spec U(https://www.rabbitmq.com/uri-spec.html).
- required: True
- queue:
- description:
- - The queue to get messages from.
- required: True
- count:
- description:
- - How many messages to collect from the queue.
- - If not set, defaults to retrieving all the messages from the queue.
- requirements:
- - The python pika package U(https://pypi.org/project/pika/).
- notes:
- - This lookup implements BlockingChannel.basic_get to get messages from a RabbitMQ server.
- - After retrieving a message from the server, receipt of the message is acknowledged and the message on the server is deleted.
- - Pika is a pure-Python implementation of the AMQP 0-9-1 protocol that tries to stay fairly independent of the underlying network support library.
- - More information about pika can be found at U(https://pika.readthedocs.io/en/stable/).
- - This plugin is tested against RabbitMQ. Other AMQP 0.9.1 protocol based servers may work but not tested/guaranteed.
- - Assigning the return messages to a variable under C(vars) may result in unexpected results as the lookup is evaluated every time the
- variable is referenced.
- - Currently this plugin only handles text based messages from a queue. Unexpected results may occur when retrieving binary data.
-"""
-
-
-EXAMPLES = """
-- name: Get all messages off a queue
- debug:
- msg: "{{ lookup('rabbitmq', url='amqp://guest:guest@192.168.0.10:5672/%2F', queue='hello') }}"
-
-
-# If you are intending on using the returned messages as a variable in more than
-# one task (eg. debug, template), it is recommended to set_fact.
-
-- name: Get 2 messages off a queue and set a fact for re-use
- set_fact:
- messages: "{{ lookup('rabbitmq', url='amqp://guest:guest@192.168.0.10:5672/%2F', queue='hello', count=2) }}"
-
-- name: Dump out contents of the messages
- debug:
- var: messages
-
-"""
-
-RETURN = """
- _list:
- description:
- - A list of dictionaries with keys and value from the queue.
- type: list
- contains:
- content_type:
- description: The content_type on the message in the queue.
- type: str
- delivery_mode:
- description: The delivery_mode on the message in the queue.
- type: str
- delivery_tag:
- description: The delivery_tag on the message in the queue.
- type: str
- exchange:
- description: The exchange the message came from.
- type: str
- message_count:
- description: The message_count for the message on the queue.
- type: str
- msg:
- description: The content of the message.
- type: str
- redelivered:
- description: The redelivered flag. True if the message has been delivered before.
- type: bool
- routing_key:
- description: The routing_key on the message in the queue.
- type: str
- headers:
- description: The headers for the message returned from the queue.
- type: dict
- json:
- description: If application/json is specified in content_type, json will be loaded into variables.
- type: dict
-
-"""
-
-import json
-
-from ansible.errors import AnsibleError, AnsibleParserError
-from ansible.plugins.lookup import LookupBase
-from ansible.module_utils._text import to_native, to_text
-from ansible.utils.display import Display
-
-try:
- import pika
- from pika import spec
- HAS_PIKA = True
-except ImportError:
- HAS_PIKA = False
-
-display = Display()
-
-
-class LookupModule(LookupBase):
-
- def run(self, terms, variables=None, url=None, queue=None, count=None):
- if not HAS_PIKA:
- raise AnsibleError('pika python package is required for rabbitmq lookup.')
- if not url:
- raise AnsibleError('URL is required for rabbitmq lookup.')
- if not queue:
- raise AnsibleError('Queue is required for rabbitmq lookup.')
-
- display.vvv(u"terms:%s : variables:%s url:%s queue:%s count:%s" % (terms, variables, url, queue, count))
-
- try:
- parameters = pika.URLParameters(url)
- except Exception as e:
- raise AnsibleError("URL malformed: %s" % to_native(e))
-
- try:
- connection = pika.BlockingConnection(parameters)
- except Exception as e:
- raise AnsibleError("Connection issue: %s" % to_native(e))
-
- try:
- conn_channel = connection.channel()
- except pika.exceptions.AMQPChannelError as e:
- try:
- connection.close()
- except pika.exceptions.AMQPConnectionError as ie:
- raise AnsibleError("Channel and connection closing issues: %s / %s" % to_native(e), to_native(ie))
- raise AnsibleError("Channel issue: %s" % to_native(e))
-
- ret = []
- idx = 0
-
- while True:
- method_frame, properties, body = conn_channel.basic_get(queue=queue)
- if method_frame:
- display.vvv(u"%s, %s, %s " % (method_frame, properties, to_text(body)))
-
- # TODO: In the future consider checking content_type and handle text/binary data differently.
- msg_details = dict({
- 'msg': to_text(body),
- 'message_count': method_frame.message_count,
- 'routing_key': method_frame.routing_key,
- 'delivery_tag': method_frame.delivery_tag,
- 'redelivered': method_frame.redelivered,
- 'exchange': method_frame.exchange,
- 'delivery_mode': properties.delivery_mode,
- 'content_type': properties.content_type,
- 'headers': properties.headers
- })
- if properties.content_type == 'application/json':
- try:
- msg_details['json'] = json.loads(msg_details['msg'])
- except ValueError as e:
- raise AnsibleError("Unable to decode JSON for message %s: %s" % (method_frame.delivery_tag, to_native(e)))
-
- ret.append(msg_details)
- conn_channel.basic_ack(method_frame.delivery_tag)
- idx += 1
- if method_frame.message_count == 0 or idx == count:
- break
- # If we didn't get a method_frame, exit.
- else:
- break
-
- if connection.is_closed:
- return [ret]
- else:
- try:
- connection.close()
- except pika.exceptions.AMQPConnectionError:
- pass
- return [ret]
diff --git a/test/support/integration/plugins/module_utils/crypto.py b/test/support/integration/plugins/module_utils/crypto.py
deleted file mode 100644
index f3f43f07..00000000
--- a/test/support/integration/plugins/module_utils/crypto.py
+++ /dev/null
@@ -1,2125 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# (c) 2016, Yanis Guenane <yanis+ansible@guenane.org>
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-#
-# ----------------------------------------------------------------------
-# A clearly marked portion of this file is licensed under the BSD license
-# Copyright (c) 2015, 2016 Paul Kehrer (@reaperhulk)
-# Copyright (c) 2017 Fraser Tweedale (@frasertweedale)
-# For more details, search for the function _obj2txt().
-# ---------------------------------------------------------------------
-# A clearly marked portion of this file is extracted from a project that
-# is licensed under the Apache License 2.0
-# Copyright (c) the OpenSSL contributors
-# For more details, search for the function _OID_MAP.
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-
-import sys
-from ansible.module_utils.compat.version import LooseVersion
-
-try:
- import OpenSSL
- from OpenSSL import crypto
-except ImportError:
- # An error will be raised in the calling class to let the end
- # user know that OpenSSL couldn't be found.
- pass
-
-try:
- import cryptography
- from cryptography import x509
- from cryptography.hazmat.backends import default_backend as cryptography_backend
- from cryptography.hazmat.primitives.serialization import load_pem_private_key
- from cryptography.hazmat.primitives import hashes
- from cryptography.hazmat.primitives import serialization
- import ipaddress
-
- # Older versions of cryptography (< 2.1) do not have __hash__ functions for
- # general name objects (DNSName, IPAddress, ...), while providing overloaded
- # equality and string representation operations. This makes it impossible to
- # use them in hash-based data structures such as set or dict. Since we are
- # actually doing that in openssl_certificate, and potentially in other code,
- # we need to monkey-patch __hash__ for these classes to make sure our code
- # works fine.
- if LooseVersion(cryptography.__version__) < LooseVersion('2.1'):
- # A very simply hash function which relies on the representation
- # of an object to be implemented. This is the case since at least
- # cryptography 1.0, see
- # https://github.com/pyca/cryptography/commit/7a9abce4bff36c05d26d8d2680303a6f64a0e84f
- def simple_hash(self):
- return hash(repr(self))
-
- # The hash functions for the following types were added for cryptography 2.1:
- # https://github.com/pyca/cryptography/commit/fbfc36da2a4769045f2373b004ddf0aff906cf38
- x509.DNSName.__hash__ = simple_hash
- x509.DirectoryName.__hash__ = simple_hash
- x509.GeneralName.__hash__ = simple_hash
- x509.IPAddress.__hash__ = simple_hash
- x509.OtherName.__hash__ = simple_hash
- x509.RegisteredID.__hash__ = simple_hash
-
- if LooseVersion(cryptography.__version__) < LooseVersion('1.2'):
- # The hash functions for the following types were added for cryptography 1.2:
- # https://github.com/pyca/cryptography/commit/b642deed88a8696e5f01ce6855ccf89985fc35d0
- # https://github.com/pyca/cryptography/commit/d1b5681f6db2bde7a14625538bd7907b08dfb486
- x509.RFC822Name.__hash__ = simple_hash
- x509.UniformResourceIdentifier.__hash__ = simple_hash
-
- # Test whether we have support for X25519, X448, Ed25519 and/or Ed448
- try:
- import cryptography.hazmat.primitives.asymmetric.x25519
- CRYPTOGRAPHY_HAS_X25519 = True
- try:
- cryptography.hazmat.primitives.asymmetric.x25519.X25519PrivateKey.private_bytes
- CRYPTOGRAPHY_HAS_X25519_FULL = True
- except AttributeError:
- CRYPTOGRAPHY_HAS_X25519_FULL = False
- except ImportError:
- CRYPTOGRAPHY_HAS_X25519 = False
- CRYPTOGRAPHY_HAS_X25519_FULL = False
- try:
- import cryptography.hazmat.primitives.asymmetric.x448
- CRYPTOGRAPHY_HAS_X448 = True
- except ImportError:
- CRYPTOGRAPHY_HAS_X448 = False
- try:
- import cryptography.hazmat.primitives.asymmetric.ed25519
- CRYPTOGRAPHY_HAS_ED25519 = True
- except ImportError:
- CRYPTOGRAPHY_HAS_ED25519 = False
- try:
- import cryptography.hazmat.primitives.asymmetric.ed448
- CRYPTOGRAPHY_HAS_ED448 = True
- except ImportError:
- CRYPTOGRAPHY_HAS_ED448 = False
-
- HAS_CRYPTOGRAPHY = True
-except ImportError:
- # Error handled in the calling module.
- CRYPTOGRAPHY_HAS_X25519 = False
- CRYPTOGRAPHY_HAS_X25519_FULL = False
- CRYPTOGRAPHY_HAS_X448 = False
- CRYPTOGRAPHY_HAS_ED25519 = False
- CRYPTOGRAPHY_HAS_ED448 = False
- HAS_CRYPTOGRAPHY = False
-
-
-import abc
-import base64
-import binascii
-import datetime
-import errno
-import hashlib
-import os
-import re
-import tempfile
-
-from ansible.module_utils import six
-from ansible.module_utils._text import to_native, to_bytes, to_text
-
-
-class OpenSSLObjectError(Exception):
- pass
-
-
-class OpenSSLBadPassphraseError(OpenSSLObjectError):
- pass
-
-
-def get_fingerprint_of_bytes(source):
- """Generate the fingerprint of the given bytes."""
-
- fingerprint = {}
-
- try:
- algorithms = hashlib.algorithms
- except AttributeError:
- try:
- algorithms = hashlib.algorithms_guaranteed
- except AttributeError:
- return None
-
- for algo in algorithms:
- f = getattr(hashlib, algo)
- try:
- h = f(source)
- except ValueError:
- # This can happen for hash algorithms not supported in FIPS mode
- # (https://github.com/ansible/ansible/issues/67213)
- continue
- try:
- # Certain hash functions have a hexdigest() which expects a length parameter
- pubkey_digest = h.hexdigest()
- except TypeError:
- pubkey_digest = h.hexdigest(32)
- fingerprint[algo] = ':'.join(pubkey_digest[i:i + 2] for i in range(0, len(pubkey_digest), 2))
-
- return fingerprint
-
-
-def get_fingerprint(path, passphrase=None, content=None, backend='pyopenssl'):
- """Generate the fingerprint of the public key. """
-
- privatekey = load_privatekey(path, passphrase=passphrase, content=content, check_passphrase=False, backend=backend)
-
- if backend == 'pyopenssl':
- try:
- publickey = crypto.dump_publickey(crypto.FILETYPE_ASN1, privatekey)
- except AttributeError:
- # If PyOpenSSL < 16.0 crypto.dump_publickey() will fail.
- try:
- bio = crypto._new_mem_buf()
- rc = crypto._lib.i2d_PUBKEY_bio(bio, privatekey._pkey)
- if rc != 1:
- crypto._raise_current_error()
- publickey = crypto._bio_to_string(bio)
- except AttributeError:
- # By doing this we prevent the code from raising an error
- # yet we return no value in the fingerprint hash.
- return None
- elif backend == 'cryptography':
- publickey = privatekey.public_key().public_bytes(
- serialization.Encoding.DER,
- serialization.PublicFormat.SubjectPublicKeyInfo
- )
-
- return get_fingerprint_of_bytes(publickey)
-
-
-def load_file_if_exists(path, module=None, ignore_errors=False):
- try:
- with open(path, 'rb') as f:
- return f.read()
- except EnvironmentError as exc:
- if exc.errno == errno.ENOENT:
- return None
- if ignore_errors:
- return None
- if module is None:
- raise
- module.fail_json('Error while loading {0} - {1}'.format(path, str(exc)))
- except Exception as exc:
- if ignore_errors:
- return None
- if module is None:
- raise
- module.fail_json('Error while loading {0} - {1}'.format(path, str(exc)))
-
-
-def load_privatekey(path, passphrase=None, check_passphrase=True, content=None, backend='pyopenssl'):
- """Load the specified OpenSSL private key.
-
- The content can also be specified via content; in that case,
- this function will not load the key from disk.
- """
-
- try:
- if content is None:
- with open(path, 'rb') as b_priv_key_fh:
- priv_key_detail = b_priv_key_fh.read()
- else:
- priv_key_detail = content
-
- if backend == 'pyopenssl':
-
- # First try: try to load with real passphrase (resp. empty string)
- # Will work if this is the correct passphrase, or the key is not
- # password-protected.
- try:
- result = crypto.load_privatekey(crypto.FILETYPE_PEM,
- priv_key_detail,
- to_bytes(passphrase or ''))
- except crypto.Error as e:
- if len(e.args) > 0 and len(e.args[0]) > 0:
- if e.args[0][0][2] in ('bad decrypt', 'bad password read'):
- # This happens in case we have the wrong passphrase.
- if passphrase is not None:
- raise OpenSSLBadPassphraseError('Wrong passphrase provided for private key!')
- else:
- raise OpenSSLBadPassphraseError('No passphrase provided, but private key is password-protected!')
- raise OpenSSLObjectError('Error while deserializing key: {0}'.format(e))
- if check_passphrase:
- # Next we want to make sure that the key is actually protected by
- # a passphrase (in case we did try the empty string before, make
- # sure that the key is not protected by the empty string)
- try:
- crypto.load_privatekey(crypto.FILETYPE_PEM,
- priv_key_detail,
- to_bytes('y' if passphrase == 'x' else 'x'))
- if passphrase is not None:
- # Since we can load the key without an exception, the
- # key isn't password-protected
- raise OpenSSLBadPassphraseError('Passphrase provided, but private key is not password-protected!')
- except crypto.Error as e:
- if passphrase is None and len(e.args) > 0 and len(e.args[0]) > 0:
- if e.args[0][0][2] in ('bad decrypt', 'bad password read'):
- # The key is obviously protected by the empty string.
- # Don't do this at home (if it's possible at all)...
- raise OpenSSLBadPassphraseError('No passphrase provided, but private key is password-protected!')
- elif backend == 'cryptography':
- try:
- result = load_pem_private_key(priv_key_detail,
- None if passphrase is None else to_bytes(passphrase),
- cryptography_backend())
- except TypeError as dummy:
- raise OpenSSLBadPassphraseError('Wrong or empty passphrase provided for private key')
- except ValueError as dummy:
- raise OpenSSLBadPassphraseError('Wrong passphrase provided for private key')
-
- return result
- except (IOError, OSError) as exc:
- raise OpenSSLObjectError(exc)
-
-
-def load_certificate(path, content=None, backend='pyopenssl'):
- """Load the specified certificate."""
-
- try:
- if content is None:
- with open(path, 'rb') as cert_fh:
- cert_content = cert_fh.read()
- else:
- cert_content = content
- if backend == 'pyopenssl':
- return crypto.load_certificate(crypto.FILETYPE_PEM, cert_content)
- elif backend == 'cryptography':
- return x509.load_pem_x509_certificate(cert_content, cryptography_backend())
- except (IOError, OSError) as exc:
- raise OpenSSLObjectError(exc)
-
-
-def load_certificate_request(path, content=None, backend='pyopenssl'):
- """Load the specified certificate signing request."""
- try:
- if content is None:
- with open(path, 'rb') as csr_fh:
- csr_content = csr_fh.read()
- else:
- csr_content = content
- except (IOError, OSError) as exc:
- raise OpenSSLObjectError(exc)
- if backend == 'pyopenssl':
- return crypto.load_certificate_request(crypto.FILETYPE_PEM, csr_content)
- elif backend == 'cryptography':
- return x509.load_pem_x509_csr(csr_content, cryptography_backend())
-
-
-def parse_name_field(input_dict):
- """Take a dict with key: value or key: list_of_values mappings and return a list of tuples"""
-
- result = []
- for key in input_dict:
- if isinstance(input_dict[key], list):
- for entry in input_dict[key]:
- result.append((key, entry))
- else:
- result.append((key, input_dict[key]))
- return result
-
-
-def convert_relative_to_datetime(relative_time_string):
- """Get a datetime.datetime or None from a string in the time format described in sshd_config(5)"""
-
- parsed_result = re.match(
- r"^(?P<prefix>[+-])((?P<weeks>\d+)[wW])?((?P<days>\d+)[dD])?((?P<hours>\d+)[hH])?((?P<minutes>\d+)[mM])?((?P<seconds>\d+)[sS]?)?$",
- relative_time_string)
-
- if parsed_result is None or len(relative_time_string) == 1:
- # not matched or only a single "+" or "-"
- return None
-
- offset = datetime.timedelta(0)
- if parsed_result.group("weeks") is not None:
- offset += datetime.timedelta(weeks=int(parsed_result.group("weeks")))
- if parsed_result.group("days") is not None:
- offset += datetime.timedelta(days=int(parsed_result.group("days")))
- if parsed_result.group("hours") is not None:
- offset += datetime.timedelta(hours=int(parsed_result.group("hours")))
- if parsed_result.group("minutes") is not None:
- offset += datetime.timedelta(
- minutes=int(parsed_result.group("minutes")))
- if parsed_result.group("seconds") is not None:
- offset += datetime.timedelta(
- seconds=int(parsed_result.group("seconds")))
-
- if parsed_result.group("prefix") == "+":
- return datetime.datetime.utcnow() + offset
- else:
- return datetime.datetime.utcnow() - offset
-
-
-def get_relative_time_option(input_string, input_name, backend='cryptography'):
- """Return an absolute timespec if a relative timespec or an ASN1 formatted
- string is provided.
-
- The return value will be a datetime object for the cryptography backend,
- and a ASN1 formatted string for the pyopenssl backend."""
- result = to_native(input_string)
- if result is None:
- raise OpenSSLObjectError(
- 'The timespec "%s" for %s is not valid' %
- input_string, input_name)
- # Relative time
- if result.startswith("+") or result.startswith("-"):
- result_datetime = convert_relative_to_datetime(result)
- if backend == 'pyopenssl':
- return result_datetime.strftime("%Y%m%d%H%M%SZ")
- elif backend == 'cryptography':
- return result_datetime
- # Absolute time
- if backend == 'pyopenssl':
- return input_string
- elif backend == 'cryptography':
- for date_fmt in ['%Y%m%d%H%M%SZ', '%Y%m%d%H%MZ', '%Y%m%d%H%M%S%z', '%Y%m%d%H%M%z']:
- try:
- return datetime.datetime.strptime(result, date_fmt)
- except ValueError:
- pass
-
- raise OpenSSLObjectError(
- 'The time spec "%s" for %s is invalid' %
- (input_string, input_name)
- )
-
-
-def select_message_digest(digest_string):
- digest = None
- if digest_string == 'sha256':
- digest = hashes.SHA256()
- elif digest_string == 'sha384':
- digest = hashes.SHA384()
- elif digest_string == 'sha512':
- digest = hashes.SHA512()
- elif digest_string == 'sha1':
- digest = hashes.SHA1()
- elif digest_string == 'md5':
- digest = hashes.MD5()
- return digest
-
-
-def write_file(module, content, default_mode=None, path=None):
- '''
- Writes content into destination file as securely as possible.
- Uses file arguments from module.
- '''
- # Find out parameters for file
- file_args = module.load_file_common_arguments(module.params, path=path)
- if file_args['mode'] is None:
- file_args['mode'] = default_mode
- # Create tempfile name
- tmp_fd, tmp_name = tempfile.mkstemp(prefix=b'.ansible_tmp')
- try:
- os.close(tmp_fd)
- except Exception as dummy:
- pass
- module.add_cleanup_file(tmp_name) # if we fail, let Ansible try to remove the file
- try:
- try:
- # Create tempfile
- file = os.open(tmp_name, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o600)
- os.write(file, content)
- os.close(file)
- except Exception as e:
- try:
- os.remove(tmp_name)
- except Exception as dummy:
- pass
- module.fail_json(msg='Error while writing result into temporary file: {0}'.format(e))
- # Update destination to wanted permissions
- if os.path.exists(file_args['path']):
- module.set_fs_attributes_if_different(file_args, False)
- # Move tempfile to final destination
- module.atomic_move(tmp_name, file_args['path'])
- # Try to update permissions again
- module.set_fs_attributes_if_different(file_args, False)
- except Exception as e:
- try:
- os.remove(tmp_name)
- except Exception as dummy:
- pass
- module.fail_json(msg='Error while writing result: {0}'.format(e))
-
-
-@six.add_metaclass(abc.ABCMeta)
-class OpenSSLObject(object):
-
- def __init__(self, path, state, force, check_mode):
- self.path = path
- self.state = state
- self.force = force
- self.name = os.path.basename(path)
- self.changed = False
- self.check_mode = check_mode
-
- def check(self, module, perms_required=True):
- """Ensure the resource is in its desired state."""
-
- def _check_state():
- return os.path.exists(self.path)
-
- def _check_perms(module):
- file_args = module.load_file_common_arguments(module.params)
- return not module.set_fs_attributes_if_different(file_args, False)
-
- if not perms_required:
- return _check_state()
-
- return _check_state() and _check_perms(module)
-
- @abc.abstractmethod
- def dump(self):
- """Serialize the object into a dictionary."""
-
- pass
-
- @abc.abstractmethod
- def generate(self):
- """Generate the resource."""
-
- pass
-
- def remove(self, module):
- """Remove the resource from the filesystem."""
-
- try:
- os.remove(self.path)
- self.changed = True
- except OSError as exc:
- if exc.errno != errno.ENOENT:
- raise OpenSSLObjectError(exc)
- else:
- pass
-
-
-# #####################################################################################
-# #####################################################################################
-# This has been extracted from the OpenSSL project's objects.txt:
-# https://github.com/openssl/openssl/blob/9537fe5757bb07761fa275d779bbd40bcf5530e4/crypto/objects/objects.txt
-# Extracted with https://gist.github.com/felixfontein/376748017ad65ead093d56a45a5bf376
-#
-# In case the following data structure has any copyrightable content, note that it is licensed as follows:
-# Copyright (c) the OpenSSL contributors
-# Licensed under the Apache License 2.0
-# https://github.com/openssl/openssl/blob/master/LICENSE
-_OID_MAP = {
- '0': ('itu-t', 'ITU-T', 'ccitt'),
- '0.3.4401.5': ('ntt-ds', ),
- '0.3.4401.5.3.1.9': ('camellia', ),
- '0.3.4401.5.3.1.9.1': ('camellia-128-ecb', 'CAMELLIA-128-ECB'),
- '0.3.4401.5.3.1.9.3': ('camellia-128-ofb', 'CAMELLIA-128-OFB'),
- '0.3.4401.5.3.1.9.4': ('camellia-128-cfb', 'CAMELLIA-128-CFB'),
- '0.3.4401.5.3.1.9.6': ('camellia-128-gcm', 'CAMELLIA-128-GCM'),
- '0.3.4401.5.3.1.9.7': ('camellia-128-ccm', 'CAMELLIA-128-CCM'),
- '0.3.4401.5.3.1.9.9': ('camellia-128-ctr', 'CAMELLIA-128-CTR'),
- '0.3.4401.5.3.1.9.10': ('camellia-128-cmac', 'CAMELLIA-128-CMAC'),
- '0.3.4401.5.3.1.9.21': ('camellia-192-ecb', 'CAMELLIA-192-ECB'),
- '0.3.4401.5.3.1.9.23': ('camellia-192-ofb', 'CAMELLIA-192-OFB'),
- '0.3.4401.5.3.1.9.24': ('camellia-192-cfb', 'CAMELLIA-192-CFB'),
- '0.3.4401.5.3.1.9.26': ('camellia-192-gcm', 'CAMELLIA-192-GCM'),
- '0.3.4401.5.3.1.9.27': ('camellia-192-ccm', 'CAMELLIA-192-CCM'),
- '0.3.4401.5.3.1.9.29': ('camellia-192-ctr', 'CAMELLIA-192-CTR'),
- '0.3.4401.5.3.1.9.30': ('camellia-192-cmac', 'CAMELLIA-192-CMAC'),
- '0.3.4401.5.3.1.9.41': ('camellia-256-ecb', 'CAMELLIA-256-ECB'),
- '0.3.4401.5.3.1.9.43': ('camellia-256-ofb', 'CAMELLIA-256-OFB'),
- '0.3.4401.5.3.1.9.44': ('camellia-256-cfb', 'CAMELLIA-256-CFB'),
- '0.3.4401.5.3.1.9.46': ('camellia-256-gcm', 'CAMELLIA-256-GCM'),
- '0.3.4401.5.3.1.9.47': ('camellia-256-ccm', 'CAMELLIA-256-CCM'),
- '0.3.4401.5.3.1.9.49': ('camellia-256-ctr', 'CAMELLIA-256-CTR'),
- '0.3.4401.5.3.1.9.50': ('camellia-256-cmac', 'CAMELLIA-256-CMAC'),
- '0.9': ('data', ),
- '0.9.2342': ('pss', ),
- '0.9.2342.19200300': ('ucl', ),
- '0.9.2342.19200300.100': ('pilot', ),
- '0.9.2342.19200300.100.1': ('pilotAttributeType', ),
- '0.9.2342.19200300.100.1.1': ('userId', 'UID'),
- '0.9.2342.19200300.100.1.2': ('textEncodedORAddress', ),
- '0.9.2342.19200300.100.1.3': ('rfc822Mailbox', 'mail'),
- '0.9.2342.19200300.100.1.4': ('info', ),
- '0.9.2342.19200300.100.1.5': ('favouriteDrink', ),
- '0.9.2342.19200300.100.1.6': ('roomNumber', ),
- '0.9.2342.19200300.100.1.7': ('photo', ),
- '0.9.2342.19200300.100.1.8': ('userClass', ),
- '0.9.2342.19200300.100.1.9': ('host', ),
- '0.9.2342.19200300.100.1.10': ('manager', ),
- '0.9.2342.19200300.100.1.11': ('documentIdentifier', ),
- '0.9.2342.19200300.100.1.12': ('documentTitle', ),
- '0.9.2342.19200300.100.1.13': ('documentVersion', ),
- '0.9.2342.19200300.100.1.14': ('documentAuthor', ),
- '0.9.2342.19200300.100.1.15': ('documentLocation', ),
- '0.9.2342.19200300.100.1.20': ('homeTelephoneNumber', ),
- '0.9.2342.19200300.100.1.21': ('secretary', ),
- '0.9.2342.19200300.100.1.22': ('otherMailbox', ),
- '0.9.2342.19200300.100.1.23': ('lastModifiedTime', ),
- '0.9.2342.19200300.100.1.24': ('lastModifiedBy', ),
- '0.9.2342.19200300.100.1.25': ('domainComponent', 'DC'),
- '0.9.2342.19200300.100.1.26': ('aRecord', ),
- '0.9.2342.19200300.100.1.27': ('pilotAttributeType27', ),
- '0.9.2342.19200300.100.1.28': ('mXRecord', ),
- '0.9.2342.19200300.100.1.29': ('nSRecord', ),
- '0.9.2342.19200300.100.1.30': ('sOARecord', ),
- '0.9.2342.19200300.100.1.31': ('cNAMERecord', ),
- '0.9.2342.19200300.100.1.37': ('associatedDomain', ),
- '0.9.2342.19200300.100.1.38': ('associatedName', ),
- '0.9.2342.19200300.100.1.39': ('homePostalAddress', ),
- '0.9.2342.19200300.100.1.40': ('personalTitle', ),
- '0.9.2342.19200300.100.1.41': ('mobileTelephoneNumber', ),
- '0.9.2342.19200300.100.1.42': ('pagerTelephoneNumber', ),
- '0.9.2342.19200300.100.1.43': ('friendlyCountryName', ),
- '0.9.2342.19200300.100.1.44': ('uniqueIdentifier', 'uid'),
- '0.9.2342.19200300.100.1.45': ('organizationalStatus', ),
- '0.9.2342.19200300.100.1.46': ('janetMailbox', ),
- '0.9.2342.19200300.100.1.47': ('mailPreferenceOption', ),
- '0.9.2342.19200300.100.1.48': ('buildingName', ),
- '0.9.2342.19200300.100.1.49': ('dSAQuality', ),
- '0.9.2342.19200300.100.1.50': ('singleLevelQuality', ),
- '0.9.2342.19200300.100.1.51': ('subtreeMinimumQuality', ),
- '0.9.2342.19200300.100.1.52': ('subtreeMaximumQuality', ),
- '0.9.2342.19200300.100.1.53': ('personalSignature', ),
- '0.9.2342.19200300.100.1.54': ('dITRedirect', ),
- '0.9.2342.19200300.100.1.55': ('audio', ),
- '0.9.2342.19200300.100.1.56': ('documentPublisher', ),
- '0.9.2342.19200300.100.3': ('pilotAttributeSyntax', ),
- '0.9.2342.19200300.100.3.4': ('iA5StringSyntax', ),
- '0.9.2342.19200300.100.3.5': ('caseIgnoreIA5StringSyntax', ),
- '0.9.2342.19200300.100.4': ('pilotObjectClass', ),
- '0.9.2342.19200300.100.4.3': ('pilotObject', ),
- '0.9.2342.19200300.100.4.4': ('pilotPerson', ),
- '0.9.2342.19200300.100.4.5': ('account', ),
- '0.9.2342.19200300.100.4.6': ('document', ),
- '0.9.2342.19200300.100.4.7': ('room', ),
- '0.9.2342.19200300.100.4.9': ('documentSeries', ),
- '0.9.2342.19200300.100.4.13': ('Domain', 'domain'),
- '0.9.2342.19200300.100.4.14': ('rFC822localPart', ),
- '0.9.2342.19200300.100.4.15': ('dNSDomain', ),
- '0.9.2342.19200300.100.4.17': ('domainRelatedObject', ),
- '0.9.2342.19200300.100.4.18': ('friendlyCountry', ),
- '0.9.2342.19200300.100.4.19': ('simpleSecurityObject', ),
- '0.9.2342.19200300.100.4.20': ('pilotOrganization', ),
- '0.9.2342.19200300.100.4.21': ('pilotDSA', ),
- '0.9.2342.19200300.100.4.22': ('qualityLabelledData', ),
- '0.9.2342.19200300.100.10': ('pilotGroups', ),
- '1': ('iso', 'ISO'),
- '1.0.9797.3.4': ('gmac', 'GMAC'),
- '1.0.10118.3.0.55': ('whirlpool', ),
- '1.2': ('ISO Member Body', 'member-body'),
- '1.2.156': ('ISO CN Member Body', 'ISO-CN'),
- '1.2.156.10197': ('oscca', ),
- '1.2.156.10197.1': ('sm-scheme', ),
- '1.2.156.10197.1.104.1': ('sm4-ecb', 'SM4-ECB'),
- '1.2.156.10197.1.104.2': ('sm4-cbc', 'SM4-CBC'),
- '1.2.156.10197.1.104.3': ('sm4-ofb', 'SM4-OFB'),
- '1.2.156.10197.1.104.4': ('sm4-cfb', 'SM4-CFB'),
- '1.2.156.10197.1.104.5': ('sm4-cfb1', 'SM4-CFB1'),
- '1.2.156.10197.1.104.6': ('sm4-cfb8', 'SM4-CFB8'),
- '1.2.156.10197.1.104.7': ('sm4-ctr', 'SM4-CTR'),
- '1.2.156.10197.1.301': ('sm2', 'SM2'),
- '1.2.156.10197.1.401': ('sm3', 'SM3'),
- '1.2.156.10197.1.501': ('SM2-with-SM3', 'SM2-SM3'),
- '1.2.156.10197.1.504': ('sm3WithRSAEncryption', 'RSA-SM3'),
- '1.2.392.200011.61.1.1.1.2': ('camellia-128-cbc', 'CAMELLIA-128-CBC'),
- '1.2.392.200011.61.1.1.1.3': ('camellia-192-cbc', 'CAMELLIA-192-CBC'),
- '1.2.392.200011.61.1.1.1.4': ('camellia-256-cbc', 'CAMELLIA-256-CBC'),
- '1.2.392.200011.61.1.1.3.2': ('id-camellia128-wrap', ),
- '1.2.392.200011.61.1.1.3.3': ('id-camellia192-wrap', ),
- '1.2.392.200011.61.1.1.3.4': ('id-camellia256-wrap', ),
- '1.2.410.200004': ('kisa', 'KISA'),
- '1.2.410.200004.1.3': ('seed-ecb', 'SEED-ECB'),
- '1.2.410.200004.1.4': ('seed-cbc', 'SEED-CBC'),
- '1.2.410.200004.1.5': ('seed-cfb', 'SEED-CFB'),
- '1.2.410.200004.1.6': ('seed-ofb', 'SEED-OFB'),
- '1.2.410.200046.1.1': ('aria', ),
- '1.2.410.200046.1.1.1': ('aria-128-ecb', 'ARIA-128-ECB'),
- '1.2.410.200046.1.1.2': ('aria-128-cbc', 'ARIA-128-CBC'),
- '1.2.410.200046.1.1.3': ('aria-128-cfb', 'ARIA-128-CFB'),
- '1.2.410.200046.1.1.4': ('aria-128-ofb', 'ARIA-128-OFB'),
- '1.2.410.200046.1.1.5': ('aria-128-ctr', 'ARIA-128-CTR'),
- '1.2.410.200046.1.1.6': ('aria-192-ecb', 'ARIA-192-ECB'),
- '1.2.410.200046.1.1.7': ('aria-192-cbc', 'ARIA-192-CBC'),
- '1.2.410.200046.1.1.8': ('aria-192-cfb', 'ARIA-192-CFB'),
- '1.2.410.200046.1.1.9': ('aria-192-ofb', 'ARIA-192-OFB'),
- '1.2.410.200046.1.1.10': ('aria-192-ctr', 'ARIA-192-CTR'),
- '1.2.410.200046.1.1.11': ('aria-256-ecb', 'ARIA-256-ECB'),
- '1.2.410.200046.1.1.12': ('aria-256-cbc', 'ARIA-256-CBC'),
- '1.2.410.200046.1.1.13': ('aria-256-cfb', 'ARIA-256-CFB'),
- '1.2.410.200046.1.1.14': ('aria-256-ofb', 'ARIA-256-OFB'),
- '1.2.410.200046.1.1.15': ('aria-256-ctr', 'ARIA-256-CTR'),
- '1.2.410.200046.1.1.34': ('aria-128-gcm', 'ARIA-128-GCM'),
- '1.2.410.200046.1.1.35': ('aria-192-gcm', 'ARIA-192-GCM'),
- '1.2.410.200046.1.1.36': ('aria-256-gcm', 'ARIA-256-GCM'),
- '1.2.410.200046.1.1.37': ('aria-128-ccm', 'ARIA-128-CCM'),
- '1.2.410.200046.1.1.38': ('aria-192-ccm', 'ARIA-192-CCM'),
- '1.2.410.200046.1.1.39': ('aria-256-ccm', 'ARIA-256-CCM'),
- '1.2.643.2.2': ('cryptopro', ),
- '1.2.643.2.2.3': ('GOST R 34.11-94 with GOST R 34.10-2001', 'id-GostR3411-94-with-GostR3410-2001'),
- '1.2.643.2.2.4': ('GOST R 34.11-94 with GOST R 34.10-94', 'id-GostR3411-94-with-GostR3410-94'),
- '1.2.643.2.2.9': ('GOST R 34.11-94', 'md_gost94'),
- '1.2.643.2.2.10': ('HMAC GOST 34.11-94', 'id-HMACGostR3411-94'),
- '1.2.643.2.2.14.0': ('id-Gost28147-89-None-KeyMeshing', ),
- '1.2.643.2.2.14.1': ('id-Gost28147-89-CryptoPro-KeyMeshing', ),
- '1.2.643.2.2.19': ('GOST R 34.10-2001', 'gost2001'),
- '1.2.643.2.2.20': ('GOST R 34.10-94', 'gost94'),
- '1.2.643.2.2.20.1': ('id-GostR3410-94-a', ),
- '1.2.643.2.2.20.2': ('id-GostR3410-94-aBis', ),
- '1.2.643.2.2.20.3': ('id-GostR3410-94-b', ),
- '1.2.643.2.2.20.4': ('id-GostR3410-94-bBis', ),
- '1.2.643.2.2.21': ('GOST 28147-89', 'gost89'),
- '1.2.643.2.2.22': ('GOST 28147-89 MAC', 'gost-mac'),
- '1.2.643.2.2.23': ('GOST R 34.11-94 PRF', 'prf-gostr3411-94'),
- '1.2.643.2.2.30.0': ('id-GostR3411-94-TestParamSet', ),
- '1.2.643.2.2.30.1': ('id-GostR3411-94-CryptoProParamSet', ),
- '1.2.643.2.2.31.0': ('id-Gost28147-89-TestParamSet', ),
- '1.2.643.2.2.31.1': ('id-Gost28147-89-CryptoPro-A-ParamSet', ),
- '1.2.643.2.2.31.2': ('id-Gost28147-89-CryptoPro-B-ParamSet', ),
- '1.2.643.2.2.31.3': ('id-Gost28147-89-CryptoPro-C-ParamSet', ),
- '1.2.643.2.2.31.4': ('id-Gost28147-89-CryptoPro-D-ParamSet', ),
- '1.2.643.2.2.31.5': ('id-Gost28147-89-CryptoPro-Oscar-1-1-ParamSet', ),
- '1.2.643.2.2.31.6': ('id-Gost28147-89-CryptoPro-Oscar-1-0-ParamSet', ),
- '1.2.643.2.2.31.7': ('id-Gost28147-89-CryptoPro-RIC-1-ParamSet', ),
- '1.2.643.2.2.32.0': ('id-GostR3410-94-TestParamSet', ),
- '1.2.643.2.2.32.2': ('id-GostR3410-94-CryptoPro-A-ParamSet', ),
- '1.2.643.2.2.32.3': ('id-GostR3410-94-CryptoPro-B-ParamSet', ),
- '1.2.643.2.2.32.4': ('id-GostR3410-94-CryptoPro-C-ParamSet', ),
- '1.2.643.2.2.32.5': ('id-GostR3410-94-CryptoPro-D-ParamSet', ),
- '1.2.643.2.2.33.1': ('id-GostR3410-94-CryptoPro-XchA-ParamSet', ),
- '1.2.643.2.2.33.2': ('id-GostR3410-94-CryptoPro-XchB-ParamSet', ),
- '1.2.643.2.2.33.3': ('id-GostR3410-94-CryptoPro-XchC-ParamSet', ),
- '1.2.643.2.2.35.0': ('id-GostR3410-2001-TestParamSet', ),
- '1.2.643.2.2.35.1': ('id-GostR3410-2001-CryptoPro-A-ParamSet', ),
- '1.2.643.2.2.35.2': ('id-GostR3410-2001-CryptoPro-B-ParamSet', ),
- '1.2.643.2.2.35.3': ('id-GostR3410-2001-CryptoPro-C-ParamSet', ),
- '1.2.643.2.2.36.0': ('id-GostR3410-2001-CryptoPro-XchA-ParamSet', ),
- '1.2.643.2.2.36.1': ('id-GostR3410-2001-CryptoPro-XchB-ParamSet', ),
- '1.2.643.2.2.98': ('GOST R 34.10-2001 DH', 'id-GostR3410-2001DH'),
- '1.2.643.2.2.99': ('GOST R 34.10-94 DH', 'id-GostR3410-94DH'),
- '1.2.643.2.9': ('cryptocom', ),
- '1.2.643.2.9.1.3.3': ('GOST R 34.11-94 with GOST R 34.10-94 Cryptocom', 'id-GostR3411-94-with-GostR3410-94-cc'),
- '1.2.643.2.9.1.3.4': ('GOST R 34.11-94 with GOST R 34.10-2001 Cryptocom', 'id-GostR3411-94-with-GostR3410-2001-cc'),
- '1.2.643.2.9.1.5.3': ('GOST 34.10-94 Cryptocom', 'gost94cc'),
- '1.2.643.2.9.1.5.4': ('GOST 34.10-2001 Cryptocom', 'gost2001cc'),
- '1.2.643.2.9.1.6.1': ('GOST 28147-89 Cryptocom ParamSet', 'id-Gost28147-89-cc'),
- '1.2.643.2.9.1.8.1': ('GOST R 3410-2001 Parameter Set Cryptocom', 'id-GostR3410-2001-ParamSet-cc'),
- '1.2.643.3.131.1.1': ('INN', 'INN'),
- '1.2.643.7.1': ('id-tc26', ),
- '1.2.643.7.1.1': ('id-tc26-algorithms', ),
- '1.2.643.7.1.1.1': ('id-tc26-sign', ),
- '1.2.643.7.1.1.1.1': ('GOST R 34.10-2012 with 256 bit modulus', 'gost2012_256'),
- '1.2.643.7.1.1.1.2': ('GOST R 34.10-2012 with 512 bit modulus', 'gost2012_512'),
- '1.2.643.7.1.1.2': ('id-tc26-digest', ),
- '1.2.643.7.1.1.2.2': ('GOST R 34.11-2012 with 256 bit hash', 'md_gost12_256'),
- '1.2.643.7.1.1.2.3': ('GOST R 34.11-2012 with 512 bit hash', 'md_gost12_512'),
- '1.2.643.7.1.1.3': ('id-tc26-signwithdigest', ),
- '1.2.643.7.1.1.3.2': ('GOST R 34.10-2012 with GOST R 34.11-2012 (256 bit)', 'id-tc26-signwithdigest-gost3410-2012-256'),
- '1.2.643.7.1.1.3.3': ('GOST R 34.10-2012 with GOST R 34.11-2012 (512 bit)', 'id-tc26-signwithdigest-gost3410-2012-512'),
- '1.2.643.7.1.1.4': ('id-tc26-mac', ),
- '1.2.643.7.1.1.4.1': ('HMAC GOST 34.11-2012 256 bit', 'id-tc26-hmac-gost-3411-2012-256'),
- '1.2.643.7.1.1.4.2': ('HMAC GOST 34.11-2012 512 bit', 'id-tc26-hmac-gost-3411-2012-512'),
- '1.2.643.7.1.1.5': ('id-tc26-cipher', ),
- '1.2.643.7.1.1.5.1': ('id-tc26-cipher-gostr3412-2015-magma', ),
- '1.2.643.7.1.1.5.1.1': ('id-tc26-cipher-gostr3412-2015-magma-ctracpkm', ),
- '1.2.643.7.1.1.5.1.2': ('id-tc26-cipher-gostr3412-2015-magma-ctracpkm-omac', ),
- '1.2.643.7.1.1.5.2': ('id-tc26-cipher-gostr3412-2015-kuznyechik', ),
- '1.2.643.7.1.1.5.2.1': ('id-tc26-cipher-gostr3412-2015-kuznyechik-ctracpkm', ),
- '1.2.643.7.1.1.5.2.2': ('id-tc26-cipher-gostr3412-2015-kuznyechik-ctracpkm-omac', ),
- '1.2.643.7.1.1.6': ('id-tc26-agreement', ),
- '1.2.643.7.1.1.6.1': ('id-tc26-agreement-gost-3410-2012-256', ),
- '1.2.643.7.1.1.6.2': ('id-tc26-agreement-gost-3410-2012-512', ),
- '1.2.643.7.1.1.7': ('id-tc26-wrap', ),
- '1.2.643.7.1.1.7.1': ('id-tc26-wrap-gostr3412-2015-magma', ),
- '1.2.643.7.1.1.7.1.1': ('id-tc26-wrap-gostr3412-2015-magma-kexp15', 'id-tc26-wrap-gostr3412-2015-kuznyechik-kexp15'),
- '1.2.643.7.1.1.7.2': ('id-tc26-wrap-gostr3412-2015-kuznyechik', ),
- '1.2.643.7.1.2': ('id-tc26-constants', ),
- '1.2.643.7.1.2.1': ('id-tc26-sign-constants', ),
- '1.2.643.7.1.2.1.1': ('id-tc26-gost-3410-2012-256-constants', ),
- '1.2.643.7.1.2.1.1.1': ('GOST R 34.10-2012 (256 bit) ParamSet A', 'id-tc26-gost-3410-2012-256-paramSetA'),
- '1.2.643.7.1.2.1.1.2': ('GOST R 34.10-2012 (256 bit) ParamSet B', 'id-tc26-gost-3410-2012-256-paramSetB'),
- '1.2.643.7.1.2.1.1.3': ('GOST R 34.10-2012 (256 bit) ParamSet C', 'id-tc26-gost-3410-2012-256-paramSetC'),
- '1.2.643.7.1.2.1.1.4': ('GOST R 34.10-2012 (256 bit) ParamSet D', 'id-tc26-gost-3410-2012-256-paramSetD'),
- '1.2.643.7.1.2.1.2': ('id-tc26-gost-3410-2012-512-constants', ),
- '1.2.643.7.1.2.1.2.0': ('GOST R 34.10-2012 (512 bit) testing parameter set', 'id-tc26-gost-3410-2012-512-paramSetTest'),
- '1.2.643.7.1.2.1.2.1': ('GOST R 34.10-2012 (512 bit) ParamSet A', 'id-tc26-gost-3410-2012-512-paramSetA'),
- '1.2.643.7.1.2.1.2.2': ('GOST R 34.10-2012 (512 bit) ParamSet B', 'id-tc26-gost-3410-2012-512-paramSetB'),
- '1.2.643.7.1.2.1.2.3': ('GOST R 34.10-2012 (512 bit) ParamSet C', 'id-tc26-gost-3410-2012-512-paramSetC'),
- '1.2.643.7.1.2.2': ('id-tc26-digest-constants', ),
- '1.2.643.7.1.2.5': ('id-tc26-cipher-constants', ),
- '1.2.643.7.1.2.5.1': ('id-tc26-gost-28147-constants', ),
- '1.2.643.7.1.2.5.1.1': ('GOST 28147-89 TC26 parameter set', 'id-tc26-gost-28147-param-Z'),
- '1.2.643.100.1': ('OGRN', 'OGRN'),
- '1.2.643.100.3': ('SNILS', 'SNILS'),
- '1.2.643.100.111': ('Signing Tool of Subject', 'subjectSignTool'),
- '1.2.643.100.112': ('Signing Tool of Issuer', 'issuerSignTool'),
- '1.2.804': ('ISO-UA', ),
- '1.2.804.2.1.1.1': ('ua-pki', ),
- '1.2.804.2.1.1.1.1.1.1': ('DSTU Gost 28147-2009', 'dstu28147'),
- '1.2.804.2.1.1.1.1.1.1.2': ('DSTU Gost 28147-2009 OFB mode', 'dstu28147-ofb'),
- '1.2.804.2.1.1.1.1.1.1.3': ('DSTU Gost 28147-2009 CFB mode', 'dstu28147-cfb'),
- '1.2.804.2.1.1.1.1.1.1.5': ('DSTU Gost 28147-2009 key wrap', 'dstu28147-wrap'),
- '1.2.804.2.1.1.1.1.1.2': ('HMAC DSTU Gost 34311-95', 'hmacWithDstu34311'),
- '1.2.804.2.1.1.1.1.2.1': ('DSTU Gost 34311-95', 'dstu34311'),
- '1.2.804.2.1.1.1.1.3.1.1': ('DSTU 4145-2002 little endian', 'dstu4145le'),
- '1.2.804.2.1.1.1.1.3.1.1.1.1': ('DSTU 4145-2002 big endian', 'dstu4145be'),
- '1.2.804.2.1.1.1.1.3.1.1.2.0': ('DSTU curve 0', 'uacurve0'),
- '1.2.804.2.1.1.1.1.3.1.1.2.1': ('DSTU curve 1', 'uacurve1'),
- '1.2.804.2.1.1.1.1.3.1.1.2.2': ('DSTU curve 2', 'uacurve2'),
- '1.2.804.2.1.1.1.1.3.1.1.2.3': ('DSTU curve 3', 'uacurve3'),
- '1.2.804.2.1.1.1.1.3.1.1.2.4': ('DSTU curve 4', 'uacurve4'),
- '1.2.804.2.1.1.1.1.3.1.1.2.5': ('DSTU curve 5', 'uacurve5'),
- '1.2.804.2.1.1.1.1.3.1.1.2.6': ('DSTU curve 6', 'uacurve6'),
- '1.2.804.2.1.1.1.1.3.1.1.2.7': ('DSTU curve 7', 'uacurve7'),
- '1.2.804.2.1.1.1.1.3.1.1.2.8': ('DSTU curve 8', 'uacurve8'),
- '1.2.804.2.1.1.1.1.3.1.1.2.9': ('DSTU curve 9', 'uacurve9'),
- '1.2.840': ('ISO US Member Body', 'ISO-US'),
- '1.2.840.10040': ('X9.57', 'X9-57'),
- '1.2.840.10040.2': ('holdInstruction', ),
- '1.2.840.10040.2.1': ('Hold Instruction None', 'holdInstructionNone'),
- '1.2.840.10040.2.2': ('Hold Instruction Call Issuer', 'holdInstructionCallIssuer'),
- '1.2.840.10040.2.3': ('Hold Instruction Reject', 'holdInstructionReject'),
- '1.2.840.10040.4': ('X9.57 CM ?', 'X9cm'),
- '1.2.840.10040.4.1': ('dsaEncryption', 'DSA'),
- '1.2.840.10040.4.3': ('dsaWithSHA1', 'DSA-SHA1'),
- '1.2.840.10045': ('ANSI X9.62', 'ansi-X9-62'),
- '1.2.840.10045.1': ('id-fieldType', ),
- '1.2.840.10045.1.1': ('prime-field', ),
- '1.2.840.10045.1.2': ('characteristic-two-field', ),
- '1.2.840.10045.1.2.3': ('id-characteristic-two-basis', ),
- '1.2.840.10045.1.2.3.1': ('onBasis', ),
- '1.2.840.10045.1.2.3.2': ('tpBasis', ),
- '1.2.840.10045.1.2.3.3': ('ppBasis', ),
- '1.2.840.10045.2': ('id-publicKeyType', ),
- '1.2.840.10045.2.1': ('id-ecPublicKey', ),
- '1.2.840.10045.3': ('ellipticCurve', ),
- '1.2.840.10045.3.0': ('c-TwoCurve', ),
- '1.2.840.10045.3.0.1': ('c2pnb163v1', ),
- '1.2.840.10045.3.0.2': ('c2pnb163v2', ),
- '1.2.840.10045.3.0.3': ('c2pnb163v3', ),
- '1.2.840.10045.3.0.4': ('c2pnb176v1', ),
- '1.2.840.10045.3.0.5': ('c2tnb191v1', ),
- '1.2.840.10045.3.0.6': ('c2tnb191v2', ),
- '1.2.840.10045.3.0.7': ('c2tnb191v3', ),
- '1.2.840.10045.3.0.8': ('c2onb191v4', ),
- '1.2.840.10045.3.0.9': ('c2onb191v5', ),
- '1.2.840.10045.3.0.10': ('c2pnb208w1', ),
- '1.2.840.10045.3.0.11': ('c2tnb239v1', ),
- '1.2.840.10045.3.0.12': ('c2tnb239v2', ),
- '1.2.840.10045.3.0.13': ('c2tnb239v3', ),
- '1.2.840.10045.3.0.14': ('c2onb239v4', ),
- '1.2.840.10045.3.0.15': ('c2onb239v5', ),
- '1.2.840.10045.3.0.16': ('c2pnb272w1', ),
- '1.2.840.10045.3.0.17': ('c2pnb304w1', ),
- '1.2.840.10045.3.0.18': ('c2tnb359v1', ),
- '1.2.840.10045.3.0.19': ('c2pnb368w1', ),
- '1.2.840.10045.3.0.20': ('c2tnb431r1', ),
- '1.2.840.10045.3.1': ('primeCurve', ),
- '1.2.840.10045.3.1.1': ('prime192v1', ),
- '1.2.840.10045.3.1.2': ('prime192v2', ),
- '1.2.840.10045.3.1.3': ('prime192v3', ),
- '1.2.840.10045.3.1.4': ('prime239v1', ),
- '1.2.840.10045.3.1.5': ('prime239v2', ),
- '1.2.840.10045.3.1.6': ('prime239v3', ),
- '1.2.840.10045.3.1.7': ('prime256v1', ),
- '1.2.840.10045.4': ('id-ecSigType', ),
- '1.2.840.10045.4.1': ('ecdsa-with-SHA1', ),
- '1.2.840.10045.4.2': ('ecdsa-with-Recommended', ),
- '1.2.840.10045.4.3': ('ecdsa-with-Specified', ),
- '1.2.840.10045.4.3.1': ('ecdsa-with-SHA224', ),
- '1.2.840.10045.4.3.2': ('ecdsa-with-SHA256', ),
- '1.2.840.10045.4.3.3': ('ecdsa-with-SHA384', ),
- '1.2.840.10045.4.3.4': ('ecdsa-with-SHA512', ),
- '1.2.840.10046.2.1': ('X9.42 DH', 'dhpublicnumber'),
- '1.2.840.113533.7.66.10': ('cast5-cbc', 'CAST5-CBC'),
- '1.2.840.113533.7.66.12': ('pbeWithMD5AndCast5CBC', ),
- '1.2.840.113533.7.66.13': ('password based MAC', 'id-PasswordBasedMAC'),
- '1.2.840.113533.7.66.30': ('Diffie-Hellman based MAC', 'id-DHBasedMac'),
- '1.2.840.113549': ('RSA Data Security, Inc.', 'rsadsi'),
- '1.2.840.113549.1': ('RSA Data Security, Inc. PKCS', 'pkcs'),
- '1.2.840.113549.1.1': ('pkcs1', ),
- '1.2.840.113549.1.1.1': ('rsaEncryption', ),
- '1.2.840.113549.1.1.2': ('md2WithRSAEncryption', 'RSA-MD2'),
- '1.2.840.113549.1.1.3': ('md4WithRSAEncryption', 'RSA-MD4'),
- '1.2.840.113549.1.1.4': ('md5WithRSAEncryption', 'RSA-MD5'),
- '1.2.840.113549.1.1.5': ('sha1WithRSAEncryption', 'RSA-SHA1'),
- '1.2.840.113549.1.1.6': ('rsaOAEPEncryptionSET', ),
- '1.2.840.113549.1.1.7': ('rsaesOaep', 'RSAES-OAEP'),
- '1.2.840.113549.1.1.8': ('mgf1', 'MGF1'),
- '1.2.840.113549.1.1.9': ('pSpecified', 'PSPECIFIED'),
- '1.2.840.113549.1.1.10': ('rsassaPss', 'RSASSA-PSS'),
- '1.2.840.113549.1.1.11': ('sha256WithRSAEncryption', 'RSA-SHA256'),
- '1.2.840.113549.1.1.12': ('sha384WithRSAEncryption', 'RSA-SHA384'),
- '1.2.840.113549.1.1.13': ('sha512WithRSAEncryption', 'RSA-SHA512'),
- '1.2.840.113549.1.1.14': ('sha224WithRSAEncryption', 'RSA-SHA224'),
- '1.2.840.113549.1.1.15': ('sha512-224WithRSAEncryption', 'RSA-SHA512/224'),
- '1.2.840.113549.1.1.16': ('sha512-256WithRSAEncryption', 'RSA-SHA512/256'),
- '1.2.840.113549.1.3': ('pkcs3', ),
- '1.2.840.113549.1.3.1': ('dhKeyAgreement', ),
- '1.2.840.113549.1.5': ('pkcs5', ),
- '1.2.840.113549.1.5.1': ('pbeWithMD2AndDES-CBC', 'PBE-MD2-DES'),
- '1.2.840.113549.1.5.3': ('pbeWithMD5AndDES-CBC', 'PBE-MD5-DES'),
- '1.2.840.113549.1.5.4': ('pbeWithMD2AndRC2-CBC', 'PBE-MD2-RC2-64'),
- '1.2.840.113549.1.5.6': ('pbeWithMD5AndRC2-CBC', 'PBE-MD5-RC2-64'),
- '1.2.840.113549.1.5.10': ('pbeWithSHA1AndDES-CBC', 'PBE-SHA1-DES'),
- '1.2.840.113549.1.5.11': ('pbeWithSHA1AndRC2-CBC', 'PBE-SHA1-RC2-64'),
- '1.2.840.113549.1.5.12': ('PBKDF2', ),
- '1.2.840.113549.1.5.13': ('PBES2', ),
- '1.2.840.113549.1.5.14': ('PBMAC1', ),
- '1.2.840.113549.1.7': ('pkcs7', ),
- '1.2.840.113549.1.7.1': ('pkcs7-data', ),
- '1.2.840.113549.1.7.2': ('pkcs7-signedData', ),
- '1.2.840.113549.1.7.3': ('pkcs7-envelopedData', ),
- '1.2.840.113549.1.7.4': ('pkcs7-signedAndEnvelopedData', ),
- '1.2.840.113549.1.7.5': ('pkcs7-digestData', ),
- '1.2.840.113549.1.7.6': ('pkcs7-encryptedData', ),
- '1.2.840.113549.1.9': ('pkcs9', ),
- '1.2.840.113549.1.9.1': ('emailAddress', ),
- '1.2.840.113549.1.9.2': ('unstructuredName', ),
- '1.2.840.113549.1.9.3': ('contentType', ),
- '1.2.840.113549.1.9.4': ('messageDigest', ),
- '1.2.840.113549.1.9.5': ('signingTime', ),
- '1.2.840.113549.1.9.6': ('countersignature', ),
- '1.2.840.113549.1.9.7': ('challengePassword', ),
- '1.2.840.113549.1.9.8': ('unstructuredAddress', ),
- '1.2.840.113549.1.9.9': ('extendedCertificateAttributes', ),
- '1.2.840.113549.1.9.14': ('Extension Request', 'extReq'),
- '1.2.840.113549.1.9.15': ('S/MIME Capabilities', 'SMIME-CAPS'),
- '1.2.840.113549.1.9.16': ('S/MIME', 'SMIME'),
- '1.2.840.113549.1.9.16.0': ('id-smime-mod', ),
- '1.2.840.113549.1.9.16.0.1': ('id-smime-mod-cms', ),
- '1.2.840.113549.1.9.16.0.2': ('id-smime-mod-ess', ),
- '1.2.840.113549.1.9.16.0.3': ('id-smime-mod-oid', ),
- '1.2.840.113549.1.9.16.0.4': ('id-smime-mod-msg-v3', ),
- '1.2.840.113549.1.9.16.0.5': ('id-smime-mod-ets-eSignature-88', ),
- '1.2.840.113549.1.9.16.0.6': ('id-smime-mod-ets-eSignature-97', ),
- '1.2.840.113549.1.9.16.0.7': ('id-smime-mod-ets-eSigPolicy-88', ),
- '1.2.840.113549.1.9.16.0.8': ('id-smime-mod-ets-eSigPolicy-97', ),
- '1.2.840.113549.1.9.16.1': ('id-smime-ct', ),
- '1.2.840.113549.1.9.16.1.1': ('id-smime-ct-receipt', ),
- '1.2.840.113549.1.9.16.1.2': ('id-smime-ct-authData', ),
- '1.2.840.113549.1.9.16.1.3': ('id-smime-ct-publishCert', ),
- '1.2.840.113549.1.9.16.1.4': ('id-smime-ct-TSTInfo', ),
- '1.2.840.113549.1.9.16.1.5': ('id-smime-ct-TDTInfo', ),
- '1.2.840.113549.1.9.16.1.6': ('id-smime-ct-contentInfo', ),
- '1.2.840.113549.1.9.16.1.7': ('id-smime-ct-DVCSRequestData', ),
- '1.2.840.113549.1.9.16.1.8': ('id-smime-ct-DVCSResponseData', ),
- '1.2.840.113549.1.9.16.1.9': ('id-smime-ct-compressedData', ),
- '1.2.840.113549.1.9.16.1.19': ('id-smime-ct-contentCollection', ),
- '1.2.840.113549.1.9.16.1.23': ('id-smime-ct-authEnvelopedData', ),
- '1.2.840.113549.1.9.16.1.27': ('id-ct-asciiTextWithCRLF', ),
- '1.2.840.113549.1.9.16.1.28': ('id-ct-xml', ),
- '1.2.840.113549.1.9.16.2': ('id-smime-aa', ),
- '1.2.840.113549.1.9.16.2.1': ('id-smime-aa-receiptRequest', ),
- '1.2.840.113549.1.9.16.2.2': ('id-smime-aa-securityLabel', ),
- '1.2.840.113549.1.9.16.2.3': ('id-smime-aa-mlExpandHistory', ),
- '1.2.840.113549.1.9.16.2.4': ('id-smime-aa-contentHint', ),
- '1.2.840.113549.1.9.16.2.5': ('id-smime-aa-msgSigDigest', ),
- '1.2.840.113549.1.9.16.2.6': ('id-smime-aa-encapContentType', ),
- '1.2.840.113549.1.9.16.2.7': ('id-smime-aa-contentIdentifier', ),
- '1.2.840.113549.1.9.16.2.8': ('id-smime-aa-macValue', ),
- '1.2.840.113549.1.9.16.2.9': ('id-smime-aa-equivalentLabels', ),
- '1.2.840.113549.1.9.16.2.10': ('id-smime-aa-contentReference', ),
- '1.2.840.113549.1.9.16.2.11': ('id-smime-aa-encrypKeyPref', ),
- '1.2.840.113549.1.9.16.2.12': ('id-smime-aa-signingCertificate', ),
- '1.2.840.113549.1.9.16.2.13': ('id-smime-aa-smimeEncryptCerts', ),
- '1.2.840.113549.1.9.16.2.14': ('id-smime-aa-timeStampToken', ),
- '1.2.840.113549.1.9.16.2.15': ('id-smime-aa-ets-sigPolicyId', ),
- '1.2.840.113549.1.9.16.2.16': ('id-smime-aa-ets-commitmentType', ),
- '1.2.840.113549.1.9.16.2.17': ('id-smime-aa-ets-signerLocation', ),
- '1.2.840.113549.1.9.16.2.18': ('id-smime-aa-ets-signerAttr', ),
- '1.2.840.113549.1.9.16.2.19': ('id-smime-aa-ets-otherSigCert', ),
- '1.2.840.113549.1.9.16.2.20': ('id-smime-aa-ets-contentTimestamp', ),
- '1.2.840.113549.1.9.16.2.21': ('id-smime-aa-ets-CertificateRefs', ),
- '1.2.840.113549.1.9.16.2.22': ('id-smime-aa-ets-RevocationRefs', ),
- '1.2.840.113549.1.9.16.2.23': ('id-smime-aa-ets-certValues', ),
- '1.2.840.113549.1.9.16.2.24': ('id-smime-aa-ets-revocationValues', ),
- '1.2.840.113549.1.9.16.2.25': ('id-smime-aa-ets-escTimeStamp', ),
- '1.2.840.113549.1.9.16.2.26': ('id-smime-aa-ets-certCRLTimestamp', ),
- '1.2.840.113549.1.9.16.2.27': ('id-smime-aa-ets-archiveTimeStamp', ),
- '1.2.840.113549.1.9.16.2.28': ('id-smime-aa-signatureType', ),
- '1.2.840.113549.1.9.16.2.29': ('id-smime-aa-dvcs-dvc', ),
- '1.2.840.113549.1.9.16.2.47': ('id-smime-aa-signingCertificateV2', ),
- '1.2.840.113549.1.9.16.3': ('id-smime-alg', ),
- '1.2.840.113549.1.9.16.3.1': ('id-smime-alg-ESDHwith3DES', ),
- '1.2.840.113549.1.9.16.3.2': ('id-smime-alg-ESDHwithRC2', ),
- '1.2.840.113549.1.9.16.3.3': ('id-smime-alg-3DESwrap', ),
- '1.2.840.113549.1.9.16.3.4': ('id-smime-alg-RC2wrap', ),
- '1.2.840.113549.1.9.16.3.5': ('id-smime-alg-ESDH', ),
- '1.2.840.113549.1.9.16.3.6': ('id-smime-alg-CMS3DESwrap', ),
- '1.2.840.113549.1.9.16.3.7': ('id-smime-alg-CMSRC2wrap', ),
- '1.2.840.113549.1.9.16.3.8': ('zlib compression', 'ZLIB'),
- '1.2.840.113549.1.9.16.3.9': ('id-alg-PWRI-KEK', ),
- '1.2.840.113549.1.9.16.4': ('id-smime-cd', ),
- '1.2.840.113549.1.9.16.4.1': ('id-smime-cd-ldap', ),
- '1.2.840.113549.1.9.16.5': ('id-smime-spq', ),
- '1.2.840.113549.1.9.16.5.1': ('id-smime-spq-ets-sqt-uri', ),
- '1.2.840.113549.1.9.16.5.2': ('id-smime-spq-ets-sqt-unotice', ),
- '1.2.840.113549.1.9.16.6': ('id-smime-cti', ),
- '1.2.840.113549.1.9.16.6.1': ('id-smime-cti-ets-proofOfOrigin', ),
- '1.2.840.113549.1.9.16.6.2': ('id-smime-cti-ets-proofOfReceipt', ),
- '1.2.840.113549.1.9.16.6.3': ('id-smime-cti-ets-proofOfDelivery', ),
- '1.2.840.113549.1.9.16.6.4': ('id-smime-cti-ets-proofOfSender', ),
- '1.2.840.113549.1.9.16.6.5': ('id-smime-cti-ets-proofOfApproval', ),
- '1.2.840.113549.1.9.16.6.6': ('id-smime-cti-ets-proofOfCreation', ),
- '1.2.840.113549.1.9.20': ('friendlyName', ),
- '1.2.840.113549.1.9.21': ('localKeyID', ),
- '1.2.840.113549.1.9.22': ('certTypes', ),
- '1.2.840.113549.1.9.22.1': ('x509Certificate', ),
- '1.2.840.113549.1.9.22.2': ('sdsiCertificate', ),
- '1.2.840.113549.1.9.23': ('crlTypes', ),
- '1.2.840.113549.1.9.23.1': ('x509Crl', ),
- '1.2.840.113549.1.12': ('pkcs12', ),
- '1.2.840.113549.1.12.1': ('pkcs12-pbeids', ),
- '1.2.840.113549.1.12.1.1': ('pbeWithSHA1And128BitRC4', 'PBE-SHA1-RC4-128'),
- '1.2.840.113549.1.12.1.2': ('pbeWithSHA1And40BitRC4', 'PBE-SHA1-RC4-40'),
- '1.2.840.113549.1.12.1.3': ('pbeWithSHA1And3-KeyTripleDES-CBC', 'PBE-SHA1-3DES'),
- '1.2.840.113549.1.12.1.4': ('pbeWithSHA1And2-KeyTripleDES-CBC', 'PBE-SHA1-2DES'),
- '1.2.840.113549.1.12.1.5': ('pbeWithSHA1And128BitRC2-CBC', 'PBE-SHA1-RC2-128'),
- '1.2.840.113549.1.12.1.6': ('pbeWithSHA1And40BitRC2-CBC', 'PBE-SHA1-RC2-40'),
- '1.2.840.113549.1.12.10': ('pkcs12-Version1', ),
- '1.2.840.113549.1.12.10.1': ('pkcs12-BagIds', ),
- '1.2.840.113549.1.12.10.1.1': ('keyBag', ),
- '1.2.840.113549.1.12.10.1.2': ('pkcs8ShroudedKeyBag', ),
- '1.2.840.113549.1.12.10.1.3': ('certBag', ),
- '1.2.840.113549.1.12.10.1.4': ('crlBag', ),
- '1.2.840.113549.1.12.10.1.5': ('secretBag', ),
- '1.2.840.113549.1.12.10.1.6': ('safeContentsBag', ),
- '1.2.840.113549.2.2': ('md2', 'MD2'),
- '1.2.840.113549.2.4': ('md4', 'MD4'),
- '1.2.840.113549.2.5': ('md5', 'MD5'),
- '1.2.840.113549.2.6': ('hmacWithMD5', ),
- '1.2.840.113549.2.7': ('hmacWithSHA1', ),
- '1.2.840.113549.2.8': ('hmacWithSHA224', ),
- '1.2.840.113549.2.9': ('hmacWithSHA256', ),
- '1.2.840.113549.2.10': ('hmacWithSHA384', ),
- '1.2.840.113549.2.11': ('hmacWithSHA512', ),
- '1.2.840.113549.2.12': ('hmacWithSHA512-224', ),
- '1.2.840.113549.2.13': ('hmacWithSHA512-256', ),
- '1.2.840.113549.3.2': ('rc2-cbc', 'RC2-CBC'),
- '1.2.840.113549.3.4': ('rc4', 'RC4'),
- '1.2.840.113549.3.7': ('des-ede3-cbc', 'DES-EDE3-CBC'),
- '1.2.840.113549.3.8': ('rc5-cbc', 'RC5-CBC'),
- '1.2.840.113549.3.10': ('des-cdmf', 'DES-CDMF'),
- '1.3': ('identified-organization', 'org', 'ORG'),
- '1.3.6': ('dod', 'DOD'),
- '1.3.6.1': ('iana', 'IANA', 'internet'),
- '1.3.6.1.1': ('Directory', 'directory'),
- '1.3.6.1.2': ('Management', 'mgmt'),
- '1.3.6.1.3': ('Experimental', 'experimental'),
- '1.3.6.1.4': ('Private', 'private'),
- '1.3.6.1.4.1': ('Enterprises', 'enterprises'),
- '1.3.6.1.4.1.188.7.1.1.2': ('idea-cbc', 'IDEA-CBC'),
- '1.3.6.1.4.1.311.2.1.14': ('Microsoft Extension Request', 'msExtReq'),
- '1.3.6.1.4.1.311.2.1.21': ('Microsoft Individual Code Signing', 'msCodeInd'),
- '1.3.6.1.4.1.311.2.1.22': ('Microsoft Commercial Code Signing', 'msCodeCom'),
- '1.3.6.1.4.1.311.10.3.1': ('Microsoft Trust List Signing', 'msCTLSign'),
- '1.3.6.1.4.1.311.10.3.3': ('Microsoft Server Gated Crypto', 'msSGC'),
- '1.3.6.1.4.1.311.10.3.4': ('Microsoft Encrypted File System', 'msEFS'),
- '1.3.6.1.4.1.311.17.1': ('Microsoft CSP Name', 'CSPName'),
- '1.3.6.1.4.1.311.17.2': ('Microsoft Local Key set', 'LocalKeySet'),
- '1.3.6.1.4.1.311.20.2.2': ('Microsoft Smartcardlogin', 'msSmartcardLogin'),
- '1.3.6.1.4.1.311.20.2.3': ('Microsoft Universal Principal Name', 'msUPN'),
- '1.3.6.1.4.1.311.60.2.1.1': ('jurisdictionLocalityName', 'jurisdictionL'),
- '1.3.6.1.4.1.311.60.2.1.2': ('jurisdictionStateOrProvinceName', 'jurisdictionST'),
- '1.3.6.1.4.1.311.60.2.1.3': ('jurisdictionCountryName', 'jurisdictionC'),
- '1.3.6.1.4.1.1466.344': ('dcObject', 'dcobject'),
- '1.3.6.1.4.1.1722.12.2.1.16': ('blake2b512', 'BLAKE2b512'),
- '1.3.6.1.4.1.1722.12.2.2.8': ('blake2s256', 'BLAKE2s256'),
- '1.3.6.1.4.1.3029.1.2': ('bf-cbc', 'BF-CBC'),
- '1.3.6.1.4.1.11129.2.4.2': ('CT Precertificate SCTs', 'ct_precert_scts'),
- '1.3.6.1.4.1.11129.2.4.3': ('CT Precertificate Poison', 'ct_precert_poison'),
- '1.3.6.1.4.1.11129.2.4.4': ('CT Precertificate Signer', 'ct_precert_signer'),
- '1.3.6.1.4.1.11129.2.4.5': ('CT Certificate SCTs', 'ct_cert_scts'),
- '1.3.6.1.4.1.11591.4.11': ('scrypt', 'id-scrypt'),
- '1.3.6.1.5': ('Security', 'security'),
- '1.3.6.1.5.2.3': ('id-pkinit', ),
- '1.3.6.1.5.2.3.4': ('PKINIT Client Auth', 'pkInitClientAuth'),
- '1.3.6.1.5.2.3.5': ('Signing KDC Response', 'pkInitKDC'),
- '1.3.6.1.5.5.7': ('PKIX', ),
- '1.3.6.1.5.5.7.0': ('id-pkix-mod', ),
- '1.3.6.1.5.5.7.0.1': ('id-pkix1-explicit-88', ),
- '1.3.6.1.5.5.7.0.2': ('id-pkix1-implicit-88', ),
- '1.3.6.1.5.5.7.0.3': ('id-pkix1-explicit-93', ),
- '1.3.6.1.5.5.7.0.4': ('id-pkix1-implicit-93', ),
- '1.3.6.1.5.5.7.0.5': ('id-mod-crmf', ),
- '1.3.6.1.5.5.7.0.6': ('id-mod-cmc', ),
- '1.3.6.1.5.5.7.0.7': ('id-mod-kea-profile-88', ),
- '1.3.6.1.5.5.7.0.8': ('id-mod-kea-profile-93', ),
- '1.3.6.1.5.5.7.0.9': ('id-mod-cmp', ),
- '1.3.6.1.5.5.7.0.10': ('id-mod-qualified-cert-88', ),
- '1.3.6.1.5.5.7.0.11': ('id-mod-qualified-cert-93', ),
- '1.3.6.1.5.5.7.0.12': ('id-mod-attribute-cert', ),
- '1.3.6.1.5.5.7.0.13': ('id-mod-timestamp-protocol', ),
- '1.3.6.1.5.5.7.0.14': ('id-mod-ocsp', ),
- '1.3.6.1.5.5.7.0.15': ('id-mod-dvcs', ),
- '1.3.6.1.5.5.7.0.16': ('id-mod-cmp2000', ),
- '1.3.6.1.5.5.7.1': ('id-pe', ),
- '1.3.6.1.5.5.7.1.1': ('Authority Information Access', 'authorityInfoAccess'),
- '1.3.6.1.5.5.7.1.2': ('Biometric Info', 'biometricInfo'),
- '1.3.6.1.5.5.7.1.3': ('qcStatements', ),
- '1.3.6.1.5.5.7.1.4': ('ac-auditEntity', ),
- '1.3.6.1.5.5.7.1.5': ('ac-targeting', ),
- '1.3.6.1.5.5.7.1.6': ('aaControls', ),
- '1.3.6.1.5.5.7.1.7': ('sbgp-ipAddrBlock', ),
- '1.3.6.1.5.5.7.1.8': ('sbgp-autonomousSysNum', ),
- '1.3.6.1.5.5.7.1.9': ('sbgp-routerIdentifier', ),
- '1.3.6.1.5.5.7.1.10': ('ac-proxying', ),
- '1.3.6.1.5.5.7.1.11': ('Subject Information Access', 'subjectInfoAccess'),
- '1.3.6.1.5.5.7.1.14': ('Proxy Certificate Information', 'proxyCertInfo'),
- '1.3.6.1.5.5.7.1.24': ('TLS Feature', 'tlsfeature'),
- '1.3.6.1.5.5.7.2': ('id-qt', ),
- '1.3.6.1.5.5.7.2.1': ('Policy Qualifier CPS', 'id-qt-cps'),
- '1.3.6.1.5.5.7.2.2': ('Policy Qualifier User Notice', 'id-qt-unotice'),
- '1.3.6.1.5.5.7.2.3': ('textNotice', ),
- '1.3.6.1.5.5.7.3': ('id-kp', ),
- '1.3.6.1.5.5.7.3.1': ('TLS Web Server Authentication', 'serverAuth'),
- '1.3.6.1.5.5.7.3.2': ('TLS Web Client Authentication', 'clientAuth'),
- '1.3.6.1.5.5.7.3.3': ('Code Signing', 'codeSigning'),
- '1.3.6.1.5.5.7.3.4': ('E-mail Protection', 'emailProtection'),
- '1.3.6.1.5.5.7.3.5': ('IPSec End System', 'ipsecEndSystem'),
- '1.3.6.1.5.5.7.3.6': ('IPSec Tunnel', 'ipsecTunnel'),
- '1.3.6.1.5.5.7.3.7': ('IPSec User', 'ipsecUser'),
- '1.3.6.1.5.5.7.3.8': ('Time Stamping', 'timeStamping'),
- '1.3.6.1.5.5.7.3.9': ('OCSP Signing', 'OCSPSigning'),
- '1.3.6.1.5.5.7.3.10': ('dvcs', 'DVCS'),
- '1.3.6.1.5.5.7.3.17': ('ipsec Internet Key Exchange', 'ipsecIKE'),
- '1.3.6.1.5.5.7.3.18': ('Ctrl/provision WAP Access', 'capwapAC'),
- '1.3.6.1.5.5.7.3.19': ('Ctrl/Provision WAP Termination', 'capwapWTP'),
- '1.3.6.1.5.5.7.3.21': ('SSH Client', 'secureShellClient'),
- '1.3.6.1.5.5.7.3.22': ('SSH Server', 'secureShellServer'),
- '1.3.6.1.5.5.7.3.23': ('Send Router', 'sendRouter'),
- '1.3.6.1.5.5.7.3.24': ('Send Proxied Router', 'sendProxiedRouter'),
- '1.3.6.1.5.5.7.3.25': ('Send Owner', 'sendOwner'),
- '1.3.6.1.5.5.7.3.26': ('Send Proxied Owner', 'sendProxiedOwner'),
- '1.3.6.1.5.5.7.3.27': ('CMC Certificate Authority', 'cmcCA'),
- '1.3.6.1.5.5.7.3.28': ('CMC Registration Authority', 'cmcRA'),
- '1.3.6.1.5.5.7.4': ('id-it', ),
- '1.3.6.1.5.5.7.4.1': ('id-it-caProtEncCert', ),
- '1.3.6.1.5.5.7.4.2': ('id-it-signKeyPairTypes', ),
- '1.3.6.1.5.5.7.4.3': ('id-it-encKeyPairTypes', ),
- '1.3.6.1.5.5.7.4.4': ('id-it-preferredSymmAlg', ),
- '1.3.6.1.5.5.7.4.5': ('id-it-caKeyUpdateInfo', ),
- '1.3.6.1.5.5.7.4.6': ('id-it-currentCRL', ),
- '1.3.6.1.5.5.7.4.7': ('id-it-unsupportedOIDs', ),
- '1.3.6.1.5.5.7.4.8': ('id-it-subscriptionRequest', ),
- '1.3.6.1.5.5.7.4.9': ('id-it-subscriptionResponse', ),
- '1.3.6.1.5.5.7.4.10': ('id-it-keyPairParamReq', ),
- '1.3.6.1.5.5.7.4.11': ('id-it-keyPairParamRep', ),
- '1.3.6.1.5.5.7.4.12': ('id-it-revPassphrase', ),
- '1.3.6.1.5.5.7.4.13': ('id-it-implicitConfirm', ),
- '1.3.6.1.5.5.7.4.14': ('id-it-confirmWaitTime', ),
- '1.3.6.1.5.5.7.4.15': ('id-it-origPKIMessage', ),
- '1.3.6.1.5.5.7.4.16': ('id-it-suppLangTags', ),
- '1.3.6.1.5.5.7.5': ('id-pkip', ),
- '1.3.6.1.5.5.7.5.1': ('id-regCtrl', ),
- '1.3.6.1.5.5.7.5.1.1': ('id-regCtrl-regToken', ),
- '1.3.6.1.5.5.7.5.1.2': ('id-regCtrl-authenticator', ),
- '1.3.6.1.5.5.7.5.1.3': ('id-regCtrl-pkiPublicationInfo', ),
- '1.3.6.1.5.5.7.5.1.4': ('id-regCtrl-pkiArchiveOptions', ),
- '1.3.6.1.5.5.7.5.1.5': ('id-regCtrl-oldCertID', ),
- '1.3.6.1.5.5.7.5.1.6': ('id-regCtrl-protocolEncrKey', ),
- '1.3.6.1.5.5.7.5.2': ('id-regInfo', ),
- '1.3.6.1.5.5.7.5.2.1': ('id-regInfo-utf8Pairs', ),
- '1.3.6.1.5.5.7.5.2.2': ('id-regInfo-certReq', ),
- '1.3.6.1.5.5.7.6': ('id-alg', ),
- '1.3.6.1.5.5.7.6.1': ('id-alg-des40', ),
- '1.3.6.1.5.5.7.6.2': ('id-alg-noSignature', ),
- '1.3.6.1.5.5.7.6.3': ('id-alg-dh-sig-hmac-sha1', ),
- '1.3.6.1.5.5.7.6.4': ('id-alg-dh-pop', ),
- '1.3.6.1.5.5.7.7': ('id-cmc', ),
- '1.3.6.1.5.5.7.7.1': ('id-cmc-statusInfo', ),
- '1.3.6.1.5.5.7.7.2': ('id-cmc-identification', ),
- '1.3.6.1.5.5.7.7.3': ('id-cmc-identityProof', ),
- '1.3.6.1.5.5.7.7.4': ('id-cmc-dataReturn', ),
- '1.3.6.1.5.5.7.7.5': ('id-cmc-transactionId', ),
- '1.3.6.1.5.5.7.7.6': ('id-cmc-senderNonce', ),
- '1.3.6.1.5.5.7.7.7': ('id-cmc-recipientNonce', ),
- '1.3.6.1.5.5.7.7.8': ('id-cmc-addExtensions', ),
- '1.3.6.1.5.5.7.7.9': ('id-cmc-encryptedPOP', ),
- '1.3.6.1.5.5.7.7.10': ('id-cmc-decryptedPOP', ),
- '1.3.6.1.5.5.7.7.11': ('id-cmc-lraPOPWitness', ),
- '1.3.6.1.5.5.7.7.15': ('id-cmc-getCert', ),
- '1.3.6.1.5.5.7.7.16': ('id-cmc-getCRL', ),
- '1.3.6.1.5.5.7.7.17': ('id-cmc-revokeRequest', ),
- '1.3.6.1.5.5.7.7.18': ('id-cmc-regInfo', ),
- '1.3.6.1.5.5.7.7.19': ('id-cmc-responseInfo', ),
- '1.3.6.1.5.5.7.7.21': ('id-cmc-queryPending', ),
- '1.3.6.1.5.5.7.7.22': ('id-cmc-popLinkRandom', ),
- '1.3.6.1.5.5.7.7.23': ('id-cmc-popLinkWitness', ),
- '1.3.6.1.5.5.7.7.24': ('id-cmc-confirmCertAcceptance', ),
- '1.3.6.1.5.5.7.8': ('id-on', ),
- '1.3.6.1.5.5.7.8.1': ('id-on-personalData', ),
- '1.3.6.1.5.5.7.8.3': ('Permanent Identifier', 'id-on-permanentIdentifier'),
- '1.3.6.1.5.5.7.9': ('id-pda', ),
- '1.3.6.1.5.5.7.9.1': ('id-pda-dateOfBirth', ),
- '1.3.6.1.5.5.7.9.2': ('id-pda-placeOfBirth', ),
- '1.3.6.1.5.5.7.9.3': ('id-pda-gender', ),
- '1.3.6.1.5.5.7.9.4': ('id-pda-countryOfCitizenship', ),
- '1.3.6.1.5.5.7.9.5': ('id-pda-countryOfResidence', ),
- '1.3.6.1.5.5.7.10': ('id-aca', ),
- '1.3.6.1.5.5.7.10.1': ('id-aca-authenticationInfo', ),
- '1.3.6.1.5.5.7.10.2': ('id-aca-accessIdentity', ),
- '1.3.6.1.5.5.7.10.3': ('id-aca-chargingIdentity', ),
- '1.3.6.1.5.5.7.10.4': ('id-aca-group', ),
- '1.3.6.1.5.5.7.10.5': ('id-aca-role', ),
- '1.3.6.1.5.5.7.10.6': ('id-aca-encAttrs', ),
- '1.3.6.1.5.5.7.11': ('id-qcs', ),
- '1.3.6.1.5.5.7.11.1': ('id-qcs-pkixQCSyntax-v1', ),
- '1.3.6.1.5.5.7.12': ('id-cct', ),
- '1.3.6.1.5.5.7.12.1': ('id-cct-crs', ),
- '1.3.6.1.5.5.7.12.2': ('id-cct-PKIData', ),
- '1.3.6.1.5.5.7.12.3': ('id-cct-PKIResponse', ),
- '1.3.6.1.5.5.7.21': ('id-ppl', ),
- '1.3.6.1.5.5.7.21.0': ('Any language', 'id-ppl-anyLanguage'),
- '1.3.6.1.5.5.7.21.1': ('Inherit all', 'id-ppl-inheritAll'),
- '1.3.6.1.5.5.7.21.2': ('Independent', 'id-ppl-independent'),
- '1.3.6.1.5.5.7.48': ('id-ad', ),
- '1.3.6.1.5.5.7.48.1': ('OCSP', 'OCSP', 'id-pkix-OCSP'),
- '1.3.6.1.5.5.7.48.1.1': ('Basic OCSP Response', 'basicOCSPResponse'),
- '1.3.6.1.5.5.7.48.1.2': ('OCSP Nonce', 'Nonce'),
- '1.3.6.1.5.5.7.48.1.3': ('OCSP CRL ID', 'CrlID'),
- '1.3.6.1.5.5.7.48.1.4': ('Acceptable OCSP Responses', 'acceptableResponses'),
- '1.3.6.1.5.5.7.48.1.5': ('OCSP No Check', 'noCheck'),
- '1.3.6.1.5.5.7.48.1.6': ('OCSP Archive Cutoff', 'archiveCutoff'),
- '1.3.6.1.5.5.7.48.1.7': ('OCSP Service Locator', 'serviceLocator'),
- '1.3.6.1.5.5.7.48.1.8': ('Extended OCSP Status', 'extendedStatus'),
- '1.3.6.1.5.5.7.48.1.9': ('valid', ),
- '1.3.6.1.5.5.7.48.1.10': ('path', ),
- '1.3.6.1.5.5.7.48.1.11': ('Trust Root', 'trustRoot'),
- '1.3.6.1.5.5.7.48.2': ('CA Issuers', 'caIssuers'),
- '1.3.6.1.5.5.7.48.3': ('AD Time Stamping', 'ad_timestamping'),
- '1.3.6.1.5.5.7.48.4': ('ad dvcs', 'AD_DVCS'),
- '1.3.6.1.5.5.7.48.5': ('CA Repository', 'caRepository'),
- '1.3.6.1.5.5.8.1.1': ('hmac-md5', 'HMAC-MD5'),
- '1.3.6.1.5.5.8.1.2': ('hmac-sha1', 'HMAC-SHA1'),
- '1.3.6.1.6': ('SNMPv2', 'snmpv2'),
- '1.3.6.1.7': ('Mail', ),
- '1.3.6.1.7.1': ('MIME MHS', 'mime-mhs'),
- '1.3.6.1.7.1.1': ('mime-mhs-headings', 'mime-mhs-headings'),
- '1.3.6.1.7.1.1.1': ('id-hex-partial-message', 'id-hex-partial-message'),
- '1.3.6.1.7.1.1.2': ('id-hex-multipart-message', 'id-hex-multipart-message'),
- '1.3.6.1.7.1.2': ('mime-mhs-bodies', 'mime-mhs-bodies'),
- '1.3.14.3.2': ('algorithm', 'algorithm'),
- '1.3.14.3.2.3': ('md5WithRSA', 'RSA-NP-MD5'),
- '1.3.14.3.2.6': ('des-ecb', 'DES-ECB'),
- '1.3.14.3.2.7': ('des-cbc', 'DES-CBC'),
- '1.3.14.3.2.8': ('des-ofb', 'DES-OFB'),
- '1.3.14.3.2.9': ('des-cfb', 'DES-CFB'),
- '1.3.14.3.2.11': ('rsaSignature', ),
- '1.3.14.3.2.12': ('dsaEncryption-old', 'DSA-old'),
- '1.3.14.3.2.13': ('dsaWithSHA', 'DSA-SHA'),
- '1.3.14.3.2.15': ('shaWithRSAEncryption', 'RSA-SHA'),
- '1.3.14.3.2.17': ('des-ede', 'DES-EDE'),
- '1.3.14.3.2.18': ('sha', 'SHA'),
- '1.3.14.3.2.26': ('sha1', 'SHA1'),
- '1.3.14.3.2.27': ('dsaWithSHA1-old', 'DSA-SHA1-old'),
- '1.3.14.3.2.29': ('sha1WithRSA', 'RSA-SHA1-2'),
- '1.3.36.3.2.1': ('ripemd160', 'RIPEMD160'),
- '1.3.36.3.3.1.2': ('ripemd160WithRSA', 'RSA-RIPEMD160'),
- '1.3.36.3.3.2.8.1.1.1': ('brainpoolP160r1', ),
- '1.3.36.3.3.2.8.1.1.2': ('brainpoolP160t1', ),
- '1.3.36.3.3.2.8.1.1.3': ('brainpoolP192r1', ),
- '1.3.36.3.3.2.8.1.1.4': ('brainpoolP192t1', ),
- '1.3.36.3.3.2.8.1.1.5': ('brainpoolP224r1', ),
- '1.3.36.3.3.2.8.1.1.6': ('brainpoolP224t1', ),
- '1.3.36.3.3.2.8.1.1.7': ('brainpoolP256r1', ),
- '1.3.36.3.3.2.8.1.1.8': ('brainpoolP256t1', ),
- '1.3.36.3.3.2.8.1.1.9': ('brainpoolP320r1', ),
- '1.3.36.3.3.2.8.1.1.10': ('brainpoolP320t1', ),
- '1.3.36.3.3.2.8.1.1.11': ('brainpoolP384r1', ),
- '1.3.36.3.3.2.8.1.1.12': ('brainpoolP384t1', ),
- '1.3.36.3.3.2.8.1.1.13': ('brainpoolP512r1', ),
- '1.3.36.3.3.2.8.1.1.14': ('brainpoolP512t1', ),
- '1.3.36.8.3.3': ('Professional Information or basis for Admission', 'x509ExtAdmission'),
- '1.3.101.1.4.1': ('Strong Extranet ID', 'SXNetID'),
- '1.3.101.110': ('X25519', ),
- '1.3.101.111': ('X448', ),
- '1.3.101.112': ('ED25519', ),
- '1.3.101.113': ('ED448', ),
- '1.3.111': ('ieee', ),
- '1.3.111.2.1619': ('IEEE Security in Storage Working Group', 'ieee-siswg'),
- '1.3.111.2.1619.0.1.1': ('aes-128-xts', 'AES-128-XTS'),
- '1.3.111.2.1619.0.1.2': ('aes-256-xts', 'AES-256-XTS'),
- '1.3.132': ('certicom-arc', ),
- '1.3.132.0': ('secg_ellipticCurve', ),
- '1.3.132.0.1': ('sect163k1', ),
- '1.3.132.0.2': ('sect163r1', ),
- '1.3.132.0.3': ('sect239k1', ),
- '1.3.132.0.4': ('sect113r1', ),
- '1.3.132.0.5': ('sect113r2', ),
- '1.3.132.0.6': ('secp112r1', ),
- '1.3.132.0.7': ('secp112r2', ),
- '1.3.132.0.8': ('secp160r1', ),
- '1.3.132.0.9': ('secp160k1', ),
- '1.3.132.0.10': ('secp256k1', ),
- '1.3.132.0.15': ('sect163r2', ),
- '1.3.132.0.16': ('sect283k1', ),
- '1.3.132.0.17': ('sect283r1', ),
- '1.3.132.0.22': ('sect131r1', ),
- '1.3.132.0.23': ('sect131r2', ),
- '1.3.132.0.24': ('sect193r1', ),
- '1.3.132.0.25': ('sect193r2', ),
- '1.3.132.0.26': ('sect233k1', ),
- '1.3.132.0.27': ('sect233r1', ),
- '1.3.132.0.28': ('secp128r1', ),
- '1.3.132.0.29': ('secp128r2', ),
- '1.3.132.0.30': ('secp160r2', ),
- '1.3.132.0.31': ('secp192k1', ),
- '1.3.132.0.32': ('secp224k1', ),
- '1.3.132.0.33': ('secp224r1', ),
- '1.3.132.0.34': ('secp384r1', ),
- '1.3.132.0.35': ('secp521r1', ),
- '1.3.132.0.36': ('sect409k1', ),
- '1.3.132.0.37': ('sect409r1', ),
- '1.3.132.0.38': ('sect571k1', ),
- '1.3.132.0.39': ('sect571r1', ),
- '1.3.132.1': ('secg-scheme', ),
- '1.3.132.1.11.0': ('dhSinglePass-stdDH-sha224kdf-scheme', ),
- '1.3.132.1.11.1': ('dhSinglePass-stdDH-sha256kdf-scheme', ),
- '1.3.132.1.11.2': ('dhSinglePass-stdDH-sha384kdf-scheme', ),
- '1.3.132.1.11.3': ('dhSinglePass-stdDH-sha512kdf-scheme', ),
- '1.3.132.1.14.0': ('dhSinglePass-cofactorDH-sha224kdf-scheme', ),
- '1.3.132.1.14.1': ('dhSinglePass-cofactorDH-sha256kdf-scheme', ),
- '1.3.132.1.14.2': ('dhSinglePass-cofactorDH-sha384kdf-scheme', ),
- '1.3.132.1.14.3': ('dhSinglePass-cofactorDH-sha512kdf-scheme', ),
- '1.3.133.16.840.63.0': ('x9-63-scheme', ),
- '1.3.133.16.840.63.0.2': ('dhSinglePass-stdDH-sha1kdf-scheme', ),
- '1.3.133.16.840.63.0.3': ('dhSinglePass-cofactorDH-sha1kdf-scheme', ),
- '2': ('joint-iso-itu-t', 'JOINT-ISO-ITU-T', 'joint-iso-ccitt'),
- '2.5': ('directory services (X.500)', 'X500'),
- '2.5.1.5': ('Selected Attribute Types', 'selected-attribute-types'),
- '2.5.1.5.55': ('clearance', ),
- '2.5.4': ('X509', ),
- '2.5.4.3': ('commonName', 'CN'),
- '2.5.4.4': ('surname', 'SN'),
- '2.5.4.5': ('serialNumber', ),
- '2.5.4.6': ('countryName', 'C'),
- '2.5.4.7': ('localityName', 'L'),
- '2.5.4.8': ('stateOrProvinceName', 'ST'),
- '2.5.4.9': ('streetAddress', 'street'),
- '2.5.4.10': ('organizationName', 'O'),
- '2.5.4.11': ('organizationalUnitName', 'OU'),
- '2.5.4.12': ('title', 'title'),
- '2.5.4.13': ('description', ),
- '2.5.4.14': ('searchGuide', ),
- '2.5.4.15': ('businessCategory', ),
- '2.5.4.16': ('postalAddress', ),
- '2.5.4.17': ('postalCode', ),
- '2.5.4.18': ('postOfficeBox', ),
- '2.5.4.19': ('physicalDeliveryOfficeName', ),
- '2.5.4.20': ('telephoneNumber', ),
- '2.5.4.21': ('telexNumber', ),
- '2.5.4.22': ('teletexTerminalIdentifier', ),
- '2.5.4.23': ('facsimileTelephoneNumber', ),
- '2.5.4.24': ('x121Address', ),
- '2.5.4.25': ('internationaliSDNNumber', ),
- '2.5.4.26': ('registeredAddress', ),
- '2.5.4.27': ('destinationIndicator', ),
- '2.5.4.28': ('preferredDeliveryMethod', ),
- '2.5.4.29': ('presentationAddress', ),
- '2.5.4.30': ('supportedApplicationContext', ),
- '2.5.4.31': ('member', ),
- '2.5.4.32': ('owner', ),
- '2.5.4.33': ('roleOccupant', ),
- '2.5.4.34': ('seeAlso', ),
- '2.5.4.35': ('userPassword', ),
- '2.5.4.36': ('userCertificate', ),
- '2.5.4.37': ('cACertificate', ),
- '2.5.4.38': ('authorityRevocationList', ),
- '2.5.4.39': ('certificateRevocationList', ),
- '2.5.4.40': ('crossCertificatePair', ),
- '2.5.4.41': ('name', 'name'),
- '2.5.4.42': ('givenName', 'GN'),
- '2.5.4.43': ('initials', 'initials'),
- '2.5.4.44': ('generationQualifier', ),
- '2.5.4.45': ('x500UniqueIdentifier', ),
- '2.5.4.46': ('dnQualifier', 'dnQualifier'),
- '2.5.4.47': ('enhancedSearchGuide', ),
- '2.5.4.48': ('protocolInformation', ),
- '2.5.4.49': ('distinguishedName', ),
- '2.5.4.50': ('uniqueMember', ),
- '2.5.4.51': ('houseIdentifier', ),
- '2.5.4.52': ('supportedAlgorithms', ),
- '2.5.4.53': ('deltaRevocationList', ),
- '2.5.4.54': ('dmdName', ),
- '2.5.4.65': ('pseudonym', ),
- '2.5.4.72': ('role', 'role'),
- '2.5.4.97': ('organizationIdentifier', ),
- '2.5.4.98': ('countryCode3c', 'c3'),
- '2.5.4.99': ('countryCode3n', 'n3'),
- '2.5.4.100': ('dnsName', ),
- '2.5.8': ('directory services - algorithms', 'X500algorithms'),
- '2.5.8.1.1': ('rsa', 'RSA'),
- '2.5.8.3.100': ('mdc2WithRSA', 'RSA-MDC2'),
- '2.5.8.3.101': ('mdc2', 'MDC2'),
- '2.5.29': ('id-ce', ),
- '2.5.29.9': ('X509v3 Subject Directory Attributes', 'subjectDirectoryAttributes'),
- '2.5.29.14': ('X509v3 Subject Key Identifier', 'subjectKeyIdentifier'),
- '2.5.29.15': ('X509v3 Key Usage', 'keyUsage'),
- '2.5.29.16': ('X509v3 Private Key Usage Period', 'privateKeyUsagePeriod'),
- '2.5.29.17': ('X509v3 Subject Alternative Name', 'subjectAltName'),
- '2.5.29.18': ('X509v3 Issuer Alternative Name', 'issuerAltName'),
- '2.5.29.19': ('X509v3 Basic Constraints', 'basicConstraints'),
- '2.5.29.20': ('X509v3 CRL Number', 'crlNumber'),
- '2.5.29.21': ('X509v3 CRL Reason Code', 'CRLReason'),
- '2.5.29.23': ('Hold Instruction Code', 'holdInstructionCode'),
- '2.5.29.24': ('Invalidity Date', 'invalidityDate'),
- '2.5.29.27': ('X509v3 Delta CRL Indicator', 'deltaCRL'),
- '2.5.29.28': ('X509v3 Issuing Distribution Point', 'issuingDistributionPoint'),
- '2.5.29.29': ('X509v3 Certificate Issuer', 'certificateIssuer'),
- '2.5.29.30': ('X509v3 Name Constraints', 'nameConstraints'),
- '2.5.29.31': ('X509v3 CRL Distribution Points', 'crlDistributionPoints'),
- '2.5.29.32': ('X509v3 Certificate Policies', 'certificatePolicies'),
- '2.5.29.32.0': ('X509v3 Any Policy', 'anyPolicy'),
- '2.5.29.33': ('X509v3 Policy Mappings', 'policyMappings'),
- '2.5.29.35': ('X509v3 Authority Key Identifier', 'authorityKeyIdentifier'),
- '2.5.29.36': ('X509v3 Policy Constraints', 'policyConstraints'),
- '2.5.29.37': ('X509v3 Extended Key Usage', 'extendedKeyUsage'),
- '2.5.29.37.0': ('Any Extended Key Usage', 'anyExtendedKeyUsage'),
- '2.5.29.46': ('X509v3 Freshest CRL', 'freshestCRL'),
- '2.5.29.54': ('X509v3 Inhibit Any Policy', 'inhibitAnyPolicy'),
- '2.5.29.55': ('X509v3 AC Targeting', 'targetInformation'),
- '2.5.29.56': ('X509v3 No Revocation Available', 'noRevAvail'),
- '2.16.840.1.101.3': ('csor', ),
- '2.16.840.1.101.3.4': ('nistAlgorithms', ),
- '2.16.840.1.101.3.4.1': ('aes', ),
- '2.16.840.1.101.3.4.1.1': ('aes-128-ecb', 'AES-128-ECB'),
- '2.16.840.1.101.3.4.1.2': ('aes-128-cbc', 'AES-128-CBC'),
- '2.16.840.1.101.3.4.1.3': ('aes-128-ofb', 'AES-128-OFB'),
- '2.16.840.1.101.3.4.1.4': ('aes-128-cfb', 'AES-128-CFB'),
- '2.16.840.1.101.3.4.1.5': ('id-aes128-wrap', ),
- '2.16.840.1.101.3.4.1.6': ('aes-128-gcm', 'id-aes128-GCM'),
- '2.16.840.1.101.3.4.1.7': ('aes-128-ccm', 'id-aes128-CCM'),
- '2.16.840.1.101.3.4.1.8': ('id-aes128-wrap-pad', ),
- '2.16.840.1.101.3.4.1.21': ('aes-192-ecb', 'AES-192-ECB'),
- '2.16.840.1.101.3.4.1.22': ('aes-192-cbc', 'AES-192-CBC'),
- '2.16.840.1.101.3.4.1.23': ('aes-192-ofb', 'AES-192-OFB'),
- '2.16.840.1.101.3.4.1.24': ('aes-192-cfb', 'AES-192-CFB'),
- '2.16.840.1.101.3.4.1.25': ('id-aes192-wrap', ),
- '2.16.840.1.101.3.4.1.26': ('aes-192-gcm', 'id-aes192-GCM'),
- '2.16.840.1.101.3.4.1.27': ('aes-192-ccm', 'id-aes192-CCM'),
- '2.16.840.1.101.3.4.1.28': ('id-aes192-wrap-pad', ),
- '2.16.840.1.101.3.4.1.41': ('aes-256-ecb', 'AES-256-ECB'),
- '2.16.840.1.101.3.4.1.42': ('aes-256-cbc', 'AES-256-CBC'),
- '2.16.840.1.101.3.4.1.43': ('aes-256-ofb', 'AES-256-OFB'),
- '2.16.840.1.101.3.4.1.44': ('aes-256-cfb', 'AES-256-CFB'),
- '2.16.840.1.101.3.4.1.45': ('id-aes256-wrap', ),
- '2.16.840.1.101.3.4.1.46': ('aes-256-gcm', 'id-aes256-GCM'),
- '2.16.840.1.101.3.4.1.47': ('aes-256-ccm', 'id-aes256-CCM'),
- '2.16.840.1.101.3.4.1.48': ('id-aes256-wrap-pad', ),
- '2.16.840.1.101.3.4.2': ('nist_hashalgs', ),
- '2.16.840.1.101.3.4.2.1': ('sha256', 'SHA256'),
- '2.16.840.1.101.3.4.2.2': ('sha384', 'SHA384'),
- '2.16.840.1.101.3.4.2.3': ('sha512', 'SHA512'),
- '2.16.840.1.101.3.4.2.4': ('sha224', 'SHA224'),
- '2.16.840.1.101.3.4.2.5': ('sha512-224', 'SHA512-224'),
- '2.16.840.1.101.3.4.2.6': ('sha512-256', 'SHA512-256'),
- '2.16.840.1.101.3.4.2.7': ('sha3-224', 'SHA3-224'),
- '2.16.840.1.101.3.4.2.8': ('sha3-256', 'SHA3-256'),
- '2.16.840.1.101.3.4.2.9': ('sha3-384', 'SHA3-384'),
- '2.16.840.1.101.3.4.2.10': ('sha3-512', 'SHA3-512'),
- '2.16.840.1.101.3.4.2.11': ('shake128', 'SHAKE128'),
- '2.16.840.1.101.3.4.2.12': ('shake256', 'SHAKE256'),
- '2.16.840.1.101.3.4.2.13': ('hmac-sha3-224', 'id-hmacWithSHA3-224'),
- '2.16.840.1.101.3.4.2.14': ('hmac-sha3-256', 'id-hmacWithSHA3-256'),
- '2.16.840.1.101.3.4.2.15': ('hmac-sha3-384', 'id-hmacWithSHA3-384'),
- '2.16.840.1.101.3.4.2.16': ('hmac-sha3-512', 'id-hmacWithSHA3-512'),
- '2.16.840.1.101.3.4.3': ('dsa_with_sha2', 'sigAlgs'),
- '2.16.840.1.101.3.4.3.1': ('dsa_with_SHA224', ),
- '2.16.840.1.101.3.4.3.2': ('dsa_with_SHA256', ),
- '2.16.840.1.101.3.4.3.3': ('dsa_with_SHA384', 'id-dsa-with-sha384'),
- '2.16.840.1.101.3.4.3.4': ('dsa_with_SHA512', 'id-dsa-with-sha512'),
- '2.16.840.1.101.3.4.3.5': ('dsa_with_SHA3-224', 'id-dsa-with-sha3-224'),
- '2.16.840.1.101.3.4.3.6': ('dsa_with_SHA3-256', 'id-dsa-with-sha3-256'),
- '2.16.840.1.101.3.4.3.7': ('dsa_with_SHA3-384', 'id-dsa-with-sha3-384'),
- '2.16.840.1.101.3.4.3.8': ('dsa_with_SHA3-512', 'id-dsa-with-sha3-512'),
- '2.16.840.1.101.3.4.3.9': ('ecdsa_with_SHA3-224', 'id-ecdsa-with-sha3-224'),
- '2.16.840.1.101.3.4.3.10': ('ecdsa_with_SHA3-256', 'id-ecdsa-with-sha3-256'),
- '2.16.840.1.101.3.4.3.11': ('ecdsa_with_SHA3-384', 'id-ecdsa-with-sha3-384'),
- '2.16.840.1.101.3.4.3.12': ('ecdsa_with_SHA3-512', 'id-ecdsa-with-sha3-512'),
- '2.16.840.1.101.3.4.3.13': ('RSA-SHA3-224', 'id-rsassa-pkcs1-v1_5-with-sha3-224'),
- '2.16.840.1.101.3.4.3.14': ('RSA-SHA3-256', 'id-rsassa-pkcs1-v1_5-with-sha3-256'),
- '2.16.840.1.101.3.4.3.15': ('RSA-SHA3-384', 'id-rsassa-pkcs1-v1_5-with-sha3-384'),
- '2.16.840.1.101.3.4.3.16': ('RSA-SHA3-512', 'id-rsassa-pkcs1-v1_5-with-sha3-512'),
- '2.16.840.1.113730': ('Netscape Communications Corp.', 'Netscape'),
- '2.16.840.1.113730.1': ('Netscape Certificate Extension', 'nsCertExt'),
- '2.16.840.1.113730.1.1': ('Netscape Cert Type', 'nsCertType'),
- '2.16.840.1.113730.1.2': ('Netscape Base Url', 'nsBaseUrl'),
- '2.16.840.1.113730.1.3': ('Netscape Revocation Url', 'nsRevocationUrl'),
- '2.16.840.1.113730.1.4': ('Netscape CA Revocation Url', 'nsCaRevocationUrl'),
- '2.16.840.1.113730.1.7': ('Netscape Renewal Url', 'nsRenewalUrl'),
- '2.16.840.1.113730.1.8': ('Netscape CA Policy Url', 'nsCaPolicyUrl'),
- '2.16.840.1.113730.1.12': ('Netscape SSL Server Name', 'nsSslServerName'),
- '2.16.840.1.113730.1.13': ('Netscape Comment', 'nsComment'),
- '2.16.840.1.113730.2': ('Netscape Data Type', 'nsDataType'),
- '2.16.840.1.113730.2.5': ('Netscape Certificate Sequence', 'nsCertSequence'),
- '2.16.840.1.113730.4.1': ('Netscape Server Gated Crypto', 'nsSGC'),
- '2.23': ('International Organizations', 'international-organizations'),
- '2.23.42': ('Secure Electronic Transactions', 'id-set'),
- '2.23.42.0': ('content types', 'set-ctype'),
- '2.23.42.0.0': ('setct-PANData', ),
- '2.23.42.0.1': ('setct-PANToken', ),
- '2.23.42.0.2': ('setct-PANOnly', ),
- '2.23.42.0.3': ('setct-OIData', ),
- '2.23.42.0.4': ('setct-PI', ),
- '2.23.42.0.5': ('setct-PIData', ),
- '2.23.42.0.6': ('setct-PIDataUnsigned', ),
- '2.23.42.0.7': ('setct-HODInput', ),
- '2.23.42.0.8': ('setct-AuthResBaggage', ),
- '2.23.42.0.9': ('setct-AuthRevReqBaggage', ),
- '2.23.42.0.10': ('setct-AuthRevResBaggage', ),
- '2.23.42.0.11': ('setct-CapTokenSeq', ),
- '2.23.42.0.12': ('setct-PInitResData', ),
- '2.23.42.0.13': ('setct-PI-TBS', ),
- '2.23.42.0.14': ('setct-PResData', ),
- '2.23.42.0.16': ('setct-AuthReqTBS', ),
- '2.23.42.0.17': ('setct-AuthResTBS', ),
- '2.23.42.0.18': ('setct-AuthResTBSX', ),
- '2.23.42.0.19': ('setct-AuthTokenTBS', ),
- '2.23.42.0.20': ('setct-CapTokenData', ),
- '2.23.42.0.21': ('setct-CapTokenTBS', ),
- '2.23.42.0.22': ('setct-AcqCardCodeMsg', ),
- '2.23.42.0.23': ('setct-AuthRevReqTBS', ),
- '2.23.42.0.24': ('setct-AuthRevResData', ),
- '2.23.42.0.25': ('setct-AuthRevResTBS', ),
- '2.23.42.0.26': ('setct-CapReqTBS', ),
- '2.23.42.0.27': ('setct-CapReqTBSX', ),
- '2.23.42.0.28': ('setct-CapResData', ),
- '2.23.42.0.29': ('setct-CapRevReqTBS', ),
- '2.23.42.0.30': ('setct-CapRevReqTBSX', ),
- '2.23.42.0.31': ('setct-CapRevResData', ),
- '2.23.42.0.32': ('setct-CredReqTBS', ),
- '2.23.42.0.33': ('setct-CredReqTBSX', ),
- '2.23.42.0.34': ('setct-CredResData', ),
- '2.23.42.0.35': ('setct-CredRevReqTBS', ),
- '2.23.42.0.36': ('setct-CredRevReqTBSX', ),
- '2.23.42.0.37': ('setct-CredRevResData', ),
- '2.23.42.0.38': ('setct-PCertReqData', ),
- '2.23.42.0.39': ('setct-PCertResTBS', ),
- '2.23.42.0.40': ('setct-BatchAdminReqData', ),
- '2.23.42.0.41': ('setct-BatchAdminResData', ),
- '2.23.42.0.42': ('setct-CardCInitResTBS', ),
- '2.23.42.0.43': ('setct-MeAqCInitResTBS', ),
- '2.23.42.0.44': ('setct-RegFormResTBS', ),
- '2.23.42.0.45': ('setct-CertReqData', ),
- '2.23.42.0.46': ('setct-CertReqTBS', ),
- '2.23.42.0.47': ('setct-CertResData', ),
- '2.23.42.0.48': ('setct-CertInqReqTBS', ),
- '2.23.42.0.49': ('setct-ErrorTBS', ),
- '2.23.42.0.50': ('setct-PIDualSignedTBE', ),
- '2.23.42.0.51': ('setct-PIUnsignedTBE', ),
- '2.23.42.0.52': ('setct-AuthReqTBE', ),
- '2.23.42.0.53': ('setct-AuthResTBE', ),
- '2.23.42.0.54': ('setct-AuthResTBEX', ),
- '2.23.42.0.55': ('setct-AuthTokenTBE', ),
- '2.23.42.0.56': ('setct-CapTokenTBE', ),
- '2.23.42.0.57': ('setct-CapTokenTBEX', ),
- '2.23.42.0.58': ('setct-AcqCardCodeMsgTBE', ),
- '2.23.42.0.59': ('setct-AuthRevReqTBE', ),
- '2.23.42.0.60': ('setct-AuthRevResTBE', ),
- '2.23.42.0.61': ('setct-AuthRevResTBEB', ),
- '2.23.42.0.62': ('setct-CapReqTBE', ),
- '2.23.42.0.63': ('setct-CapReqTBEX', ),
- '2.23.42.0.64': ('setct-CapResTBE', ),
- '2.23.42.0.65': ('setct-CapRevReqTBE', ),
- '2.23.42.0.66': ('setct-CapRevReqTBEX', ),
- '2.23.42.0.67': ('setct-CapRevResTBE', ),
- '2.23.42.0.68': ('setct-CredReqTBE', ),
- '2.23.42.0.69': ('setct-CredReqTBEX', ),
- '2.23.42.0.70': ('setct-CredResTBE', ),
- '2.23.42.0.71': ('setct-CredRevReqTBE', ),
- '2.23.42.0.72': ('setct-CredRevReqTBEX', ),
- '2.23.42.0.73': ('setct-CredRevResTBE', ),
- '2.23.42.0.74': ('setct-BatchAdminReqTBE', ),
- '2.23.42.0.75': ('setct-BatchAdminResTBE', ),
- '2.23.42.0.76': ('setct-RegFormReqTBE', ),
- '2.23.42.0.77': ('setct-CertReqTBE', ),
- '2.23.42.0.78': ('setct-CertReqTBEX', ),
- '2.23.42.0.79': ('setct-CertResTBE', ),
- '2.23.42.0.80': ('setct-CRLNotificationTBS', ),
- '2.23.42.0.81': ('setct-CRLNotificationResTBS', ),
- '2.23.42.0.82': ('setct-BCIDistributionTBS', ),
- '2.23.42.1': ('message extensions', 'set-msgExt'),
- '2.23.42.1.1': ('generic cryptogram', 'setext-genCrypt'),
- '2.23.42.1.3': ('merchant initiated auth', 'setext-miAuth'),
- '2.23.42.1.4': ('setext-pinSecure', ),
- '2.23.42.1.5': ('setext-pinAny', ),
- '2.23.42.1.7': ('setext-track2', ),
- '2.23.42.1.8': ('additional verification', 'setext-cv'),
- '2.23.42.3': ('set-attr', ),
- '2.23.42.3.0': ('setAttr-Cert', ),
- '2.23.42.3.0.0': ('set-rootKeyThumb', ),
- '2.23.42.3.0.1': ('set-addPolicy', ),
- '2.23.42.3.1': ('payment gateway capabilities', 'setAttr-PGWYcap'),
- '2.23.42.3.2': ('setAttr-TokenType', ),
- '2.23.42.3.2.1': ('setAttr-Token-EMV', ),
- '2.23.42.3.2.2': ('setAttr-Token-B0Prime', ),
- '2.23.42.3.3': ('issuer capabilities', 'setAttr-IssCap'),
- '2.23.42.3.3.3': ('setAttr-IssCap-CVM', ),
- '2.23.42.3.3.3.1': ('generate cryptogram', 'setAttr-GenCryptgrm'),
- '2.23.42.3.3.4': ('setAttr-IssCap-T2', ),
- '2.23.42.3.3.4.1': ('encrypted track 2', 'setAttr-T2Enc'),
- '2.23.42.3.3.4.2': ('cleartext track 2', 'setAttr-T2cleartxt'),
- '2.23.42.3.3.5': ('setAttr-IssCap-Sig', ),
- '2.23.42.3.3.5.1': ('ICC or token signature', 'setAttr-TokICCsig'),
- '2.23.42.3.3.5.2': ('secure device signature', 'setAttr-SecDevSig'),
- '2.23.42.5': ('set-policy', ),
- '2.23.42.5.0': ('set-policy-root', ),
- '2.23.42.7': ('certificate extensions', 'set-certExt'),
- '2.23.42.7.0': ('setCext-hashedRoot', ),
- '2.23.42.7.1': ('setCext-certType', ),
- '2.23.42.7.2': ('setCext-merchData', ),
- '2.23.42.7.3': ('setCext-cCertRequired', ),
- '2.23.42.7.4': ('setCext-tunneling', ),
- '2.23.42.7.5': ('setCext-setExt', ),
- '2.23.42.7.6': ('setCext-setQualf', ),
- '2.23.42.7.7': ('setCext-PGWYcapabilities', ),
- '2.23.42.7.8': ('setCext-TokenIdentifier', ),
- '2.23.42.7.9': ('setCext-Track2Data', ),
- '2.23.42.7.10': ('setCext-TokenType', ),
- '2.23.42.7.11': ('setCext-IssuerCapabilities', ),
- '2.23.42.8': ('set-brand', ),
- '2.23.42.8.1': ('set-brand-IATA-ATA', ),
- '2.23.42.8.4': ('set-brand-Visa', ),
- '2.23.42.8.5': ('set-brand-MasterCard', ),
- '2.23.42.8.30': ('set-brand-Diners', ),
- '2.23.42.8.34': ('set-brand-AmericanExpress', ),
- '2.23.42.8.35': ('set-brand-JCB', ),
- '2.23.42.8.6011': ('set-brand-Novus', ),
- '2.23.43': ('wap', ),
- '2.23.43.1': ('wap-wsg', ),
- '2.23.43.1.4': ('wap-wsg-idm-ecid', ),
- '2.23.43.1.4.1': ('wap-wsg-idm-ecid-wtls1', ),
- '2.23.43.1.4.3': ('wap-wsg-idm-ecid-wtls3', ),
- '2.23.43.1.4.4': ('wap-wsg-idm-ecid-wtls4', ),
- '2.23.43.1.4.5': ('wap-wsg-idm-ecid-wtls5', ),
- '2.23.43.1.4.6': ('wap-wsg-idm-ecid-wtls6', ),
- '2.23.43.1.4.7': ('wap-wsg-idm-ecid-wtls7', ),
- '2.23.43.1.4.8': ('wap-wsg-idm-ecid-wtls8', ),
- '2.23.43.1.4.9': ('wap-wsg-idm-ecid-wtls9', ),
- '2.23.43.1.4.10': ('wap-wsg-idm-ecid-wtls10', ),
- '2.23.43.1.4.11': ('wap-wsg-idm-ecid-wtls11', ),
- '2.23.43.1.4.12': ('wap-wsg-idm-ecid-wtls12', ),
-}
-# #####################################################################################
-# #####################################################################################
-
-_OID_LOOKUP = dict()
-_NORMALIZE_NAMES = dict()
-_NORMALIZE_NAMES_SHORT = dict()
-
-for dotted, names in _OID_MAP.items():
- for name in names:
- if name in _NORMALIZE_NAMES and _OID_LOOKUP[name] != dotted:
- raise AssertionError(
- 'Name collision during setup: "{0}" for OIDs {1} and {2}'
- .format(name, dotted, _OID_LOOKUP[name])
- )
- _NORMALIZE_NAMES[name] = names[0]
- _NORMALIZE_NAMES_SHORT[name] = names[-1]
- _OID_LOOKUP[name] = dotted
-for alias, original in [('userID', 'userId')]:
- if alias in _NORMALIZE_NAMES:
- raise AssertionError(
- 'Name collision during adding aliases: "{0}" (alias for "{1}") is already mapped to OID {2}'
- .format(alias, original, _OID_LOOKUP[alias])
- )
- _NORMALIZE_NAMES[alias] = original
- _NORMALIZE_NAMES_SHORT[alias] = _NORMALIZE_NAMES_SHORT[original]
- _OID_LOOKUP[alias] = _OID_LOOKUP[original]
-
-
-def pyopenssl_normalize_name(name, short=False):
- nid = OpenSSL._util.lib.OBJ_txt2nid(to_bytes(name))
- if nid != 0:
- b_name = OpenSSL._util.lib.OBJ_nid2ln(nid)
- name = to_text(OpenSSL._util.ffi.string(b_name))
- if short:
- return _NORMALIZE_NAMES_SHORT.get(name, name)
- else:
- return _NORMALIZE_NAMES.get(name, name)
-
-
-# #####################################################################################
-# #####################################################################################
-# # This excerpt is dual licensed under the terms of the Apache License, Version
-# # 2.0, and the BSD License. See the LICENSE file at
-# # https://github.com/pyca/cryptography/blob/master/LICENSE for complete details.
-# #
-# # Adapted from cryptography's hazmat/backends/openssl/decode_asn1.py
-# #
-# # Copyright (c) 2015, 2016 Paul Kehrer (@reaperhulk)
-# # Copyright (c) 2017 Fraser Tweedale (@frasertweedale)
-# #
-# # Relevant commits from cryptography project (https://github.com/pyca/cryptography):
-# # pyca/cryptography@719d536dd691e84e208534798f2eb4f82aaa2e07
-# # pyca/cryptography@5ab6d6a5c05572bd1c75f05baf264a2d0001894a
-# # pyca/cryptography@2e776e20eb60378e0af9b7439000d0e80da7c7e3
-# # pyca/cryptography@fb309ed24647d1be9e319b61b1f2aa8ebb87b90b
-# # pyca/cryptography@2917e460993c475c72d7146c50dc3bbc2414280d
-# # pyca/cryptography@3057f91ea9a05fb593825006d87a391286a4d828
-# # pyca/cryptography@d607dd7e5bc5c08854ec0c9baff70ba4a35be36f
-def _obj2txt(openssl_lib, openssl_ffi, obj):
- # Set to 80 on the recommendation of
- # https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values
- #
- # But OIDs longer than this occur in real life (e.g. Active
- # Directory makes some very long OIDs). So we need to detect
- # and properly handle the case where the default buffer is not
- # big enough.
- #
- buf_len = 80
- buf = openssl_ffi.new("char[]", buf_len)
-
- # 'res' is the number of bytes that *would* be written if the
- # buffer is large enough. If 'res' > buf_len - 1, we need to
- # alloc a big-enough buffer and go again.
- res = openssl_lib.OBJ_obj2txt(buf, buf_len, obj, 1)
- if res > buf_len - 1: # account for terminating null byte
- buf_len = res + 1
- buf = openssl_ffi.new("char[]", buf_len)
- res = openssl_lib.OBJ_obj2txt(buf, buf_len, obj, 1)
- return openssl_ffi.buffer(buf, res)[:].decode()
-# #####################################################################################
-# #####################################################################################
-
-
-def cryptography_get_extensions_from_cert(cert):
- # Since cryptography won't give us the DER value for an extension
- # (that is only stored for unrecognized extensions), we have to re-do
- # the extension parsing outselves.
- result = dict()
- backend = cert._backend
- x509_obj = cert._x509
-
- for i in range(backend._lib.X509_get_ext_count(x509_obj)):
- ext = backend._lib.X509_get_ext(x509_obj, i)
- if ext == backend._ffi.NULL:
- continue
- crit = backend._lib.X509_EXTENSION_get_critical(ext)
- data = backend._lib.X509_EXTENSION_get_data(ext)
- backend.openssl_assert(data != backend._ffi.NULL)
- der = backend._ffi.buffer(data.data, data.length)[:]
- entry = dict(
- critical=(crit == 1),
- value=base64.b64encode(der),
- )
- oid = _obj2txt(backend._lib, backend._ffi, backend._lib.X509_EXTENSION_get_object(ext))
- result[oid] = entry
- return result
-
-
-def cryptography_get_extensions_from_csr(csr):
- # Since cryptography won't give us the DER value for an extension
- # (that is only stored for unrecognized extensions), we have to re-do
- # the extension parsing outselves.
- result = dict()
- backend = csr._backend
-
- extensions = backend._lib.X509_REQ_get_extensions(csr._x509_req)
- extensions = backend._ffi.gc(
- extensions,
- lambda ext: backend._lib.sk_X509_EXTENSION_pop_free(
- ext,
- backend._ffi.addressof(backend._lib._original_lib, "X509_EXTENSION_free")
- )
- )
-
- for i in range(backend._lib.sk_X509_EXTENSION_num(extensions)):
- ext = backend._lib.sk_X509_EXTENSION_value(extensions, i)
- if ext == backend._ffi.NULL:
- continue
- crit = backend._lib.X509_EXTENSION_get_critical(ext)
- data = backend._lib.X509_EXTENSION_get_data(ext)
- backend.openssl_assert(data != backend._ffi.NULL)
- der = backend._ffi.buffer(data.data, data.length)[:]
- entry = dict(
- critical=(crit == 1),
- value=base64.b64encode(der),
- )
- oid = _obj2txt(backend._lib, backend._ffi, backend._lib.X509_EXTENSION_get_object(ext))
- result[oid] = entry
- return result
-
-
-def pyopenssl_get_extensions_from_cert(cert):
- # While pyOpenSSL allows us to get an extension's DER value, it won't
- # give us the dotted string for an OID. So we have to do some magic to
- # get hold of it.
- result = dict()
- ext_count = cert.get_extension_count()
- for i in range(0, ext_count):
- ext = cert.get_extension(i)
- entry = dict(
- critical=bool(ext.get_critical()),
- value=base64.b64encode(ext.get_data()),
- )
- oid = _obj2txt(
- OpenSSL._util.lib,
- OpenSSL._util.ffi,
- OpenSSL._util.lib.X509_EXTENSION_get_object(ext._extension)
- )
- # This could also be done a bit simpler:
- #
- # oid = _obj2txt(OpenSSL._util.lib, OpenSSL._util.ffi, OpenSSL._util.lib.OBJ_nid2obj(ext._nid))
- #
- # Unfortunately this gives the wrong result in case the linked OpenSSL
- # doesn't know the OID. That's why we have to get the OID dotted string
- # similarly to how cryptography does it.
- result[oid] = entry
- return result
-
-
-def pyopenssl_get_extensions_from_csr(csr):
- # While pyOpenSSL allows us to get an extension's DER value, it won't
- # give us the dotted string for an OID. So we have to do some magic to
- # get hold of it.
- result = dict()
- for ext in csr.get_extensions():
- entry = dict(
- critical=bool(ext.get_critical()),
- value=base64.b64encode(ext.get_data()),
- )
- oid = _obj2txt(
- OpenSSL._util.lib,
- OpenSSL._util.ffi,
- OpenSSL._util.lib.X509_EXTENSION_get_object(ext._extension)
- )
- # This could also be done a bit simpler:
- #
- # oid = _obj2txt(OpenSSL._util.lib, OpenSSL._util.ffi, OpenSSL._util.lib.OBJ_nid2obj(ext._nid))
- #
- # Unfortunately this gives the wrong result in case the linked OpenSSL
- # doesn't know the OID. That's why we have to get the OID dotted string
- # similarly to how cryptography does it.
- result[oid] = entry
- return result
-
-
-def cryptography_name_to_oid(name):
- dotted = _OID_LOOKUP.get(name)
- if dotted is None:
- raise OpenSSLObjectError('Cannot find OID for "{0}"'.format(name))
- return x509.oid.ObjectIdentifier(dotted)
-
-
-def cryptography_oid_to_name(oid, short=False):
- dotted_string = oid.dotted_string
- names = _OID_MAP.get(dotted_string)
- name = names[0] if names else oid._name
- if short:
- return _NORMALIZE_NAMES_SHORT.get(name, name)
- else:
- return _NORMALIZE_NAMES.get(name, name)
-
-
-def cryptography_get_name(name):
- '''
- Given a name string, returns a cryptography x509.Name object.
- Raises an OpenSSLObjectError if the name is unknown or cannot be parsed.
- '''
- try:
- if name.startswith('DNS:'):
- return x509.DNSName(to_text(name[4:]))
- if name.startswith('IP:'):
- return x509.IPAddress(ipaddress.ip_address(to_text(name[3:])))
- if name.startswith('email:'):
- return x509.RFC822Name(to_text(name[6:]))
- if name.startswith('URI:'):
- return x509.UniformResourceIdentifier(to_text(name[4:]))
- except Exception as e:
- raise OpenSSLObjectError('Cannot parse Subject Alternative Name "{0}": {1}'.format(name, e))
- if ':' not in name:
- raise OpenSSLObjectError('Cannot parse Subject Alternative Name "{0}" (forgot "DNS:" prefix?)'.format(name))
- raise OpenSSLObjectError('Cannot parse Subject Alternative Name "{0}" (potentially unsupported by cryptography backend)'.format(name))
-
-
-def _get_hex(bytesstr):
- if bytesstr is None:
- return bytesstr
- data = binascii.hexlify(bytesstr)
- data = to_text(b':'.join(data[i:i + 2] for i in range(0, len(data), 2)))
- return data
-
-
-def cryptography_decode_name(name):
- '''
- Given a cryptography x509.Name object, returns a string.
- Raises an OpenSSLObjectError if the name is not supported.
- '''
- if isinstance(name, x509.DNSName):
- return 'DNS:{0}'.format(name.value)
- if isinstance(name, x509.IPAddress):
- return 'IP:{0}'.format(name.value.compressed)
- if isinstance(name, x509.RFC822Name):
- return 'email:{0}'.format(name.value)
- if isinstance(name, x509.UniformResourceIdentifier):
- return 'URI:{0}'.format(name.value)
- if isinstance(name, x509.DirectoryName):
- # FIXME: test
- return 'DirName:' + ''.join(['/{0}:{1}'.format(attribute.oid._name, attribute.value) for attribute in name.value])
- if isinstance(name, x509.RegisteredID):
- # FIXME: test
- return 'RegisteredID:{0}'.format(name.value)
- if isinstance(name, x509.OtherName):
- # FIXME: test
- return '{0}:{1}'.format(name.type_id.dotted_string, _get_hex(name.value))
- raise OpenSSLObjectError('Cannot decode name "{0}"'.format(name))
-
-
-def _cryptography_get_keyusage(usage):
- '''
- Given a key usage identifier string, returns the parameter name used by cryptography's x509.KeyUsage().
- Raises an OpenSSLObjectError if the identifier is unknown.
- '''
- if usage in ('Digital Signature', 'digitalSignature'):
- return 'digital_signature'
- if usage in ('Non Repudiation', 'nonRepudiation'):
- return 'content_commitment'
- if usage in ('Key Encipherment', 'keyEncipherment'):
- return 'key_encipherment'
- if usage in ('Data Encipherment', 'dataEncipherment'):
- return 'data_encipherment'
- if usage in ('Key Agreement', 'keyAgreement'):
- return 'key_agreement'
- if usage in ('Certificate Sign', 'keyCertSign'):
- return 'key_cert_sign'
- if usage in ('CRL Sign', 'cRLSign'):
- return 'crl_sign'
- if usage in ('Encipher Only', 'encipherOnly'):
- return 'encipher_only'
- if usage in ('Decipher Only', 'decipherOnly'):
- return 'decipher_only'
- raise OpenSSLObjectError('Unknown key usage "{0}"'.format(usage))
-
-
-def cryptography_parse_key_usage_params(usages):
- '''
- Given a list of key usage identifier strings, returns the parameters for cryptography's x509.KeyUsage().
- Raises an OpenSSLObjectError if an identifier is unknown.
- '''
- params = dict(
- digital_signature=False,
- content_commitment=False,
- key_encipherment=False,
- data_encipherment=False,
- key_agreement=False,
- key_cert_sign=False,
- crl_sign=False,
- encipher_only=False,
- decipher_only=False,
- )
- for usage in usages:
- params[_cryptography_get_keyusage(usage)] = True
- return params
-
-
-def cryptography_get_basic_constraints(constraints):
- '''
- Given a list of constraints, returns a tuple (ca, path_length).
- Raises an OpenSSLObjectError if a constraint is unknown or cannot be parsed.
- '''
- ca = False
- path_length = None
- if constraints:
- for constraint in constraints:
- if constraint.startswith('CA:'):
- if constraint == 'CA:TRUE':
- ca = True
- elif constraint == 'CA:FALSE':
- ca = False
- else:
- raise OpenSSLObjectError('Unknown basic constraint value "{0}" for CA'.format(constraint[3:]))
- elif constraint.startswith('pathlen:'):
- v = constraint[len('pathlen:'):]
- try:
- path_length = int(v)
- except Exception as e:
- raise OpenSSLObjectError('Cannot parse path length constraint "{0}" ({1})'.format(v, e))
- else:
- raise OpenSSLObjectError('Unknown basic constraint "{0}"'.format(constraint))
- return ca, path_length
-
-
-def binary_exp_mod(f, e, m):
- '''Computes f^e mod m in O(log e) multiplications modulo m.'''
- # Compute len_e = floor(log_2(e))
- len_e = -1
- x = e
- while x > 0:
- x >>= 1
- len_e += 1
- # Compute f**e mod m
- result = 1
- for k in range(len_e, -1, -1):
- result = (result * result) % m
- if ((e >> k) & 1) != 0:
- result = (result * f) % m
- return result
-
-
-def simple_gcd(a, b):
- '''Compute GCD of its two inputs.'''
- while b != 0:
- a, b = b, a % b
- return a
-
-
-def quick_is_not_prime(n):
- '''Does some quick checks to see if we can poke a hole into the primality of n.
-
- A result of `False` does **not** mean that the number is prime; it just means
- that we couldn't detect quickly whether it is not prime.
- '''
- if n <= 2:
- return True
- # The constant in the next line is the product of all primes < 200
- if simple_gcd(n, 7799922041683461553249199106329813876687996789903550945093032474868511536164700810) > 1:
- return True
- # TODO: maybe do some iterations of Miller-Rabin to increase confidence
- # (https://en.wikipedia.org/wiki/Miller%E2%80%93Rabin_primality_test)
- return False
-
-
-python_version = (sys.version_info[0], sys.version_info[1])
-if python_version >= (2, 7) or python_version >= (3, 1):
- # Ansible still supports Python 2.6 on remote nodes
- def count_bits(no):
- no = abs(no)
- if no == 0:
- return 0
- return no.bit_length()
-else:
- # Slow, but works
- def count_bits(no):
- no = abs(no)
- count = 0
- while no > 0:
- no >>= 1
- count += 1
- return count
-
-
-PEM_START = '-----BEGIN '
-PEM_END = '-----'
-PKCS8_PRIVATEKEY_NAMES = ('PRIVATE KEY', 'ENCRYPTED PRIVATE KEY')
-PKCS1_PRIVATEKEY_SUFFIX = ' PRIVATE KEY'
-
-
-def identify_private_key_format(content):
- '''Given the contents of a private key file, identifies its format.'''
- # See https://github.com/openssl/openssl/blob/master/crypto/pem/pem_pkey.c#L40-L85
- # (PEM_read_bio_PrivateKey)
- # and https://github.com/openssl/openssl/blob/master/include/openssl/pem.h#L46-L47
- # (PEM_STRING_PKCS8, PEM_STRING_PKCS8INF)
- try:
- lines = content.decode('utf-8').splitlines(False)
- if lines[0].startswith(PEM_START) and lines[0].endswith(PEM_END) and len(lines[0]) > len(PEM_START) + len(PEM_END):
- name = lines[0][len(PEM_START):-len(PEM_END)]
- if name in PKCS8_PRIVATEKEY_NAMES:
- return 'pkcs8'
- if len(name) > len(PKCS1_PRIVATEKEY_SUFFIX) and name.endswith(PKCS1_PRIVATEKEY_SUFFIX):
- return 'pkcs1'
- return 'unknown-pem'
- except UnicodeDecodeError:
- pass
- return 'raw'
-
-
-def cryptography_key_needs_digest_for_signing(key):
- '''Tests whether the given private key requires a digest algorithm for signing.
-
- Ed25519 and Ed448 keys do not; they need None to be passed as the digest algorithm.
- '''
- if CRYPTOGRAPHY_HAS_ED25519 and isinstance(key, cryptography.hazmat.primitives.asymmetric.ed25519.Ed25519PrivateKey):
- return False
- if CRYPTOGRAPHY_HAS_ED448 and isinstance(key, cryptography.hazmat.primitives.asymmetric.ed448.Ed448PrivateKey):
- return False
- return True
-
-
-def cryptography_compare_public_keys(key1, key2):
- '''Tests whether two public keys are the same.
-
- Needs special logic for Ed25519 and Ed448 keys, since they do not have public_numbers().
- '''
- if CRYPTOGRAPHY_HAS_ED25519:
- a = isinstance(key1, cryptography.hazmat.primitives.asymmetric.ed25519.Ed25519PublicKey)
- b = isinstance(key2, cryptography.hazmat.primitives.asymmetric.ed25519.Ed25519PublicKey)
- if a or b:
- if not a or not b:
- return False
- a = key1.public_bytes(serialization.Encoding.Raw, serialization.PublicFormat.Raw)
- b = key2.public_bytes(serialization.Encoding.Raw, serialization.PublicFormat.Raw)
- return a == b
- if CRYPTOGRAPHY_HAS_ED448:
- a = isinstance(key1, cryptography.hazmat.primitives.asymmetric.ed448.Ed448PublicKey)
- b = isinstance(key2, cryptography.hazmat.primitives.asymmetric.ed448.Ed448PublicKey)
- if a or b:
- if not a or not b:
- return False
- a = key1.public_bytes(serialization.Encoding.Raw, serialization.PublicFormat.Raw)
- b = key2.public_bytes(serialization.Encoding.Raw, serialization.PublicFormat.Raw)
- return a == b
- return key1.public_numbers() == key2.public_numbers()
-
-
-if HAS_CRYPTOGRAPHY:
- REVOCATION_REASON_MAP = {
- 'unspecified': x509.ReasonFlags.unspecified,
- 'key_compromise': x509.ReasonFlags.key_compromise,
- 'ca_compromise': x509.ReasonFlags.ca_compromise,
- 'affiliation_changed': x509.ReasonFlags.affiliation_changed,
- 'superseded': x509.ReasonFlags.superseded,
- 'cessation_of_operation': x509.ReasonFlags.cessation_of_operation,
- 'certificate_hold': x509.ReasonFlags.certificate_hold,
- 'privilege_withdrawn': x509.ReasonFlags.privilege_withdrawn,
- 'aa_compromise': x509.ReasonFlags.aa_compromise,
- 'remove_from_crl': x509.ReasonFlags.remove_from_crl,
- }
- REVOCATION_REASON_MAP_INVERSE = dict()
- for k, v in REVOCATION_REASON_MAP.items():
- REVOCATION_REASON_MAP_INVERSE[v] = k
-
-
-def cryptography_decode_revoked_certificate(cert):
- result = {
- 'serial_number': cert.serial_number,
- 'revocation_date': cert.revocation_date,
- 'issuer': None,
- 'issuer_critical': False,
- 'reason': None,
- 'reason_critical': False,
- 'invalidity_date': None,
- 'invalidity_date_critical': False,
- }
- try:
- ext = cert.extensions.get_extension_for_class(x509.CertificateIssuer)
- result['issuer'] = list(ext.value)
- result['issuer_critical'] = ext.critical
- except x509.ExtensionNotFound:
- pass
- try:
- ext = cert.extensions.get_extension_for_class(x509.CRLReason)
- result['reason'] = ext.value.reason
- result['reason_critical'] = ext.critical
- except x509.ExtensionNotFound:
- pass
- try:
- ext = cert.extensions.get_extension_for_class(x509.InvalidityDate)
- result['invalidity_date'] = ext.value.invalidity_date
- result['invalidity_date_critical'] = ext.critical
- except x509.ExtensionNotFound:
- pass
- return result
diff --git a/test/support/integration/plugins/module_utils/database.py b/test/support/integration/plugins/module_utils/database.py
deleted file mode 100644
index 014939a2..00000000
--- a/test/support/integration/plugins/module_utils/database.py
+++ /dev/null
@@ -1,142 +0,0 @@
-# This code is part of Ansible, but is an independent component.
-# This particular file snippet, and this file snippet only, is BSD licensed.
-# Modules you write using this snippet, which is embedded dynamically by Ansible
-# still belong to the author of the module, and may assign their own license
-# to the complete work.
-#
-# Copyright (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without modification,
-# are permitted provided that the following conditions are met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright notice,
-# this list of conditions and the following disclaimer in the documentation
-# and/or other materials provided with the distribution.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
-# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
-# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-
-class SQLParseError(Exception):
- pass
-
-
-class UnclosedQuoteError(SQLParseError):
- pass
-
-
-# maps a type of identifier to the maximum number of dot levels that are
-# allowed to specify that identifier. For example, a database column can be
-# specified by up to 4 levels: database.schema.table.column
-_PG_IDENTIFIER_TO_DOT_LEVEL = dict(
- database=1,
- schema=2,
- table=3,
- column=4,
- role=1,
- tablespace=1,
- sequence=3,
- publication=1,
-)
-_MYSQL_IDENTIFIER_TO_DOT_LEVEL = dict(database=1, table=2, column=3, role=1, vars=1)
-
-
-def _find_end_quote(identifier, quote_char):
- accumulate = 0
- while True:
- try:
- quote = identifier.index(quote_char)
- except ValueError:
- raise UnclosedQuoteError
- accumulate = accumulate + quote
- try:
- next_char = identifier[quote + 1]
- except IndexError:
- return accumulate
- if next_char == quote_char:
- try:
- identifier = identifier[quote + 2:]
- accumulate = accumulate + 2
- except IndexError:
- raise UnclosedQuoteError
- else:
- return accumulate
-
-
-def _identifier_parse(identifier, quote_char):
- if not identifier:
- raise SQLParseError('Identifier name unspecified or unquoted trailing dot')
-
- already_quoted = False
- if identifier.startswith(quote_char):
- already_quoted = True
- try:
- end_quote = _find_end_quote(identifier[1:], quote_char=quote_char) + 1
- except UnclosedQuoteError:
- already_quoted = False
- else:
- if end_quote < len(identifier) - 1:
- if identifier[end_quote + 1] == '.':
- dot = end_quote + 1
- first_identifier = identifier[:dot]
- next_identifier = identifier[dot + 1:]
- further_identifiers = _identifier_parse(next_identifier, quote_char)
- further_identifiers.insert(0, first_identifier)
- else:
- raise SQLParseError('User escaped identifiers must escape extra quotes')
- else:
- further_identifiers = [identifier]
-
- if not already_quoted:
- try:
- dot = identifier.index('.')
- except ValueError:
- identifier = identifier.replace(quote_char, quote_char * 2)
- identifier = ''.join((quote_char, identifier, quote_char))
- further_identifiers = [identifier]
- else:
- if dot == 0 or dot >= len(identifier) - 1:
- identifier = identifier.replace(quote_char, quote_char * 2)
- identifier = ''.join((quote_char, identifier, quote_char))
- further_identifiers = [identifier]
- else:
- first_identifier = identifier[:dot]
- next_identifier = identifier[dot + 1:]
- further_identifiers = _identifier_parse(next_identifier, quote_char)
- first_identifier = first_identifier.replace(quote_char, quote_char * 2)
- first_identifier = ''.join((quote_char, first_identifier, quote_char))
- further_identifiers.insert(0, first_identifier)
-
- return further_identifiers
-
-
-def pg_quote_identifier(identifier, id_type):
- identifier_fragments = _identifier_parse(identifier, quote_char='"')
- if len(identifier_fragments) > _PG_IDENTIFIER_TO_DOT_LEVEL[id_type]:
- raise SQLParseError('PostgreSQL does not support %s with more than %i dots' % (id_type, _PG_IDENTIFIER_TO_DOT_LEVEL[id_type]))
- return '.'.join(identifier_fragments)
-
-
-def mysql_quote_identifier(identifier, id_type):
- identifier_fragments = _identifier_parse(identifier, quote_char='`')
- if (len(identifier_fragments) - 1) > _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type]:
- raise SQLParseError('MySQL does not support %s with more than %i dots' % (id_type, _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type]))
-
- special_cased_fragments = []
- for fragment in identifier_fragments:
- if fragment == '`*`':
- special_cased_fragments.append('*')
- else:
- special_cased_fragments.append(fragment)
-
- return '.'.join(special_cased_fragments)
diff --git a/test/support/integration/plugins/module_utils/ecs/__init__.py b/test/support/integration/plugins/module_utils/ecs/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/test/support/integration/plugins/module_utils/ecs/__init__.py
+++ /dev/null
diff --git a/test/support/integration/plugins/module_utils/ecs/api.py b/test/support/integration/plugins/module_utils/ecs/api.py
deleted file mode 100644
index d89b0333..00000000
--- a/test/support/integration/plugins/module_utils/ecs/api.py
+++ /dev/null
@@ -1,364 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# This code is part of Ansible, but is an independent component.
-# This particular file snippet, and this file snippet only, is licensed under the
-# Modified BSD License. Modules you write using this snippet, which is embedded
-# dynamically by Ansible, still belong to the author of the module, and may assign
-# their own license to the complete work.
-#
-# Copyright (c), Entrust Datacard Corporation, 2019
-# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
-
-# Redistribution and use in source and binary forms, with or without modification,
-# are permitted provided that the following conditions are met:
-# 1. Redistributions of source code must retain the above copyright notice,
-# this list of conditions and the following disclaimer.
-# 2. Redistributions in binary form must reproduce the above copyright notice,
-# this list of conditions and the following disclaimer in the documentation
-# and/or other materials provided with the distribution.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
-# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
-# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-from __future__ import absolute_import, division, print_function
-
-__metaclass__ = type
-
-import json
-import os
-import re
-import time
-import traceback
-
-from ansible.module_utils._text import to_text, to_native
-from ansible.module_utils.basic import missing_required_lib
-from ansible.module_utils.six.moves.urllib.parse import urlencode
-from ansible.module_utils.six.moves.urllib.error import HTTPError
-from ansible.module_utils.urls import Request
-
-YAML_IMP_ERR = None
-try:
- import yaml
-except ImportError:
- YAML_FOUND = False
- YAML_IMP_ERR = traceback.format_exc()
-else:
- YAML_FOUND = True
-
-valid_file_format = re.compile(r".*(\.)(yml|yaml|json)$")
-
-
-def ecs_client_argument_spec():
- return dict(
- entrust_api_user=dict(type='str', required=True),
- entrust_api_key=dict(type='str', required=True, no_log=True),
- entrust_api_client_cert_path=dict(type='path', required=True),
- entrust_api_client_cert_key_path=dict(type='path', required=True, no_log=True),
- entrust_api_specification_path=dict(type='path', default='https://cloud.entrust.net/EntrustCloud/documentation/cms-api-2.1.0.yaml'),
- )
-
-
-class SessionConfigurationException(Exception):
- """ Raised if we cannot configure a session with the API """
-
- pass
-
-
-class RestOperationException(Exception):
- """ Encapsulate a REST API error """
-
- def __init__(self, error):
- self.status = to_native(error.get("status", None))
- self.errors = [to_native(err.get("message")) for err in error.get("errors", {})]
- self.message = to_native(" ".join(self.errors))
-
-
-def generate_docstring(operation_spec):
- """Generate a docstring for an operation defined in operation_spec (swagger)"""
- # Description of the operation
- docs = operation_spec.get("description", "No Description")
- docs += "\n\n"
-
- # Parameters of the operation
- parameters = operation_spec.get("parameters", [])
- if len(parameters) != 0:
- docs += "\tArguments:\n\n"
- for parameter in parameters:
- docs += "{0} ({1}:{2}): {3}\n".format(
- parameter.get("name"),
- parameter.get("type", "No Type"),
- "Required" if parameter.get("required", False) else "Not Required",
- parameter.get("description"),
- )
-
- return docs
-
-
-def bind(instance, method, operation_spec):
- def binding_scope_fn(*args, **kwargs):
- return method(instance, *args, **kwargs)
-
- # Make sure we don't confuse users; add the proper name and documentation to the function.
- # Users can use !help(<function>) to get help on the function from interactive python or pdb
- operation_name = operation_spec.get("operationId").split("Using")[0]
- binding_scope_fn.__name__ = str(operation_name)
- binding_scope_fn.__doc__ = generate_docstring(operation_spec)
-
- return binding_scope_fn
-
-
-class RestOperation(object):
- def __init__(self, session, uri, method, parameters=None):
- self.session = session
- self.method = method
- if parameters is None:
- self.parameters = {}
- else:
- self.parameters = parameters
- self.url = "{scheme}://{host}{base_path}{uri}".format(scheme="https", host=session._spec.get("host"), base_path=session._spec.get("basePath"), uri=uri)
-
- def restmethod(self, *args, **kwargs):
- """Do the hard work of making the request here"""
-
- # gather named path parameters and do substitution on the URL
- if self.parameters:
- path_parameters = {}
- body_parameters = {}
- query_parameters = {}
- for x in self.parameters:
- expected_location = x.get("in")
- key_name = x.get("name", None)
- key_value = kwargs.get(key_name, None)
- if expected_location == "path" and key_name and key_value:
- path_parameters.update({key_name: key_value})
- elif expected_location == "body" and key_name and key_value:
- body_parameters.update({key_name: key_value})
- elif expected_location == "query" and key_name and key_value:
- query_parameters.update({key_name: key_value})
-
- if len(body_parameters.keys()) >= 1:
- body_parameters = body_parameters.get(list(body_parameters.keys())[0])
- else:
- body_parameters = None
- else:
- path_parameters = {}
- query_parameters = {}
- body_parameters = None
-
- # This will fail if we have not set path parameters with a KeyError
- url = self.url.format(**path_parameters)
- if query_parameters:
- # modify the URL to add path parameters
- url = url + "?" + urlencode(query_parameters)
-
- try:
- if body_parameters:
- body_parameters_json = json.dumps(body_parameters)
- response = self.session.request.open(method=self.method, url=url, data=body_parameters_json)
- else:
- response = self.session.request.open(method=self.method, url=url)
- request_error = False
- except HTTPError as e:
- # An HTTPError has the same methods available as a valid response from request.open
- response = e
- request_error = True
-
- # Return the result if JSON and success ({} for empty responses)
- # Raise an exception if there was a failure.
- try:
- result_code = response.getcode()
- result = json.loads(response.read())
- except ValueError:
- result = {}
-
- if result or result == {}:
- if result_code and result_code < 400:
- return result
- else:
- raise RestOperationException(result)
-
- # Raise a generic RestOperationException if this fails
- raise RestOperationException({"status": result_code, "errors": [{"message": "REST Operation Failed"}]})
-
-
-class Resource(object):
- """ Implement basic CRUD operations against a path. """
-
- def __init__(self, session):
- self.session = session
- self.parameters = {}
-
- for url in session._spec.get("paths").keys():
- methods = session._spec.get("paths").get(url)
- for method in methods.keys():
- operation_spec = methods.get(method)
- operation_name = operation_spec.get("operationId", None)
- parameters = operation_spec.get("parameters")
-
- if not operation_name:
- if method.lower() == "post":
- operation_name = "Create"
- elif method.lower() == "get":
- operation_name = "Get"
- elif method.lower() == "put":
- operation_name = "Update"
- elif method.lower() == "delete":
- operation_name = "Delete"
- elif method.lower() == "patch":
- operation_name = "Patch"
- else:
- raise SessionConfigurationException(to_native("Invalid REST method type {0}".format(method)))
-
- # Get the non-parameter parts of the URL and append to the operation name
- # e.g /application/version -> GetApplicationVersion
- # e.g. /application/{id} -> GetApplication
- # This may lead to duplicates, which we must prevent.
- operation_name += re.sub(r"{(.*)}", "", url).replace("/", " ").title().replace(" ", "")
- operation_spec["operationId"] = operation_name
-
- op = RestOperation(session, url, method, parameters)
- setattr(self, operation_name, bind(self, op.restmethod, operation_spec))
-
-
-# Session to encapsulate the connection parameters of the module_utils Request object, the api spec, etc
-class ECSSession(object):
- def __init__(self, name, **kwargs):
- """
- Initialize our session
- """
-
- self._set_config(name, **kwargs)
-
- def client(self):
- resource = Resource(self)
- return resource
-
- def _set_config(self, name, **kwargs):
- headers = {
- "Content-Type": "application/json",
- "Connection": "keep-alive",
- }
- self.request = Request(headers=headers, timeout=60)
-
- configurators = [self._read_config_vars]
- for configurator in configurators:
- self._config = configurator(name, **kwargs)
- if self._config:
- break
- if self._config is None:
- raise SessionConfigurationException(to_native("No Configuration Found."))
-
- # set up auth if passed
- entrust_api_user = self.get_config("entrust_api_user")
- entrust_api_key = self.get_config("entrust_api_key")
- if entrust_api_user and entrust_api_key:
- self.request.url_username = entrust_api_user
- self.request.url_password = entrust_api_key
- else:
- raise SessionConfigurationException(to_native("User and key must be provided."))
-
- # set up client certificate if passed (support all-in one or cert + key)
- entrust_api_cert = self.get_config("entrust_api_cert")
- entrust_api_cert_key = self.get_config("entrust_api_cert_key")
- if entrust_api_cert:
- self.request.client_cert = entrust_api_cert
- if entrust_api_cert_key:
- self.request.client_key = entrust_api_cert_key
- else:
- raise SessionConfigurationException(to_native("Client certificate for authentication to the API must be provided."))
-
- # set up the spec
- entrust_api_specification_path = self.get_config("entrust_api_specification_path")
-
- if not entrust_api_specification_path.startswith("http") and not os.path.isfile(entrust_api_specification_path):
- raise SessionConfigurationException(to_native("OpenAPI specification was not found at location {0}.".format(entrust_api_specification_path)))
- if not valid_file_format.match(entrust_api_specification_path):
- raise SessionConfigurationException(to_native("OpenAPI specification filename must end in .json, .yml or .yaml"))
-
- self.verify = True
-
- if entrust_api_specification_path.startswith("http"):
- try:
- http_response = Request().open(method="GET", url=entrust_api_specification_path)
- http_response_contents = http_response.read()
- if entrust_api_specification_path.endswith(".json"):
- self._spec = json.load(http_response_contents)
- elif entrust_api_specification_path.endswith(".yml") or entrust_api_specification_path.endswith(".yaml"):
- self._spec = yaml.safe_load(http_response_contents)
- except HTTPError as e:
- raise SessionConfigurationException(to_native("Error downloading specification from address '{0}', received error code '{1}'".format(
- entrust_api_specification_path, e.getcode())))
- else:
- with open(entrust_api_specification_path) as f:
- if ".json" in entrust_api_specification_path:
- self._spec = json.load(f)
- elif ".yml" in entrust_api_specification_path or ".yaml" in entrust_api_specification_path:
- self._spec = yaml.safe_load(f)
-
- def get_config(self, item):
- return self._config.get(item, None)
-
- def _read_config_vars(self, name, **kwargs):
- """ Read configuration from variables passed to the module. """
- config = {}
-
- entrust_api_specification_path = kwargs.get("entrust_api_specification_path")
- if not entrust_api_specification_path or (not entrust_api_specification_path.startswith("http") and not os.path.isfile(entrust_api_specification_path)):
- raise SessionConfigurationException(
- to_native(
- "Parameter provided for entrust_api_specification_path of value '{0}' was not a valid file path or HTTPS address.".format(
- entrust_api_specification_path
- )
- )
- )
-
- for required_file in ["entrust_api_cert", "entrust_api_cert_key"]:
- file_path = kwargs.get(required_file)
- if not file_path or not os.path.isfile(file_path):
- raise SessionConfigurationException(
- to_native("Parameter provided for {0} of value '{1}' was not a valid file path.".format(required_file, file_path))
- )
-
- for required_var in ["entrust_api_user", "entrust_api_key"]:
- if not kwargs.get(required_var):
- raise SessionConfigurationException(to_native("Parameter provided for {0} was missing.".format(required_var)))
-
- config["entrust_api_cert"] = kwargs.get("entrust_api_cert")
- config["entrust_api_cert_key"] = kwargs.get("entrust_api_cert_key")
- config["entrust_api_specification_path"] = kwargs.get("entrust_api_specification_path")
- config["entrust_api_user"] = kwargs.get("entrust_api_user")
- config["entrust_api_key"] = kwargs.get("entrust_api_key")
-
- return config
-
-
-def ECSClient(entrust_api_user=None, entrust_api_key=None, entrust_api_cert=None, entrust_api_cert_key=None, entrust_api_specification_path=None):
- """Create an ECS client"""
-
- if not YAML_FOUND:
- raise SessionConfigurationException(missing_required_lib("PyYAML"), exception=YAML_IMP_ERR)
-
- if entrust_api_specification_path is None:
- entrust_api_specification_path = "https://cloud.entrust.net/EntrustCloud/documentation/cms-api-2.1.0.yaml"
-
- # Not functionally necessary with current uses of this module_util, but better to be explicit for future use cases
- entrust_api_user = to_text(entrust_api_user)
- entrust_api_key = to_text(entrust_api_key)
- entrust_api_cert_key = to_text(entrust_api_cert_key)
- entrust_api_specification_path = to_text(entrust_api_specification_path)
-
- return ECSSession(
- "ecs",
- entrust_api_user=entrust_api_user,
- entrust_api_key=entrust_api_key,
- entrust_api_cert=entrust_api_cert,
- entrust_api_cert_key=entrust_api_cert_key,
- entrust_api_specification_path=entrust_api_specification_path,
- ).client()
diff --git a/test/support/integration/plugins/module_utils/mysql.py b/test/support/integration/plugins/module_utils/mysql.py
deleted file mode 100644
index 46198f36..00000000
--- a/test/support/integration/plugins/module_utils/mysql.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# This code is part of Ansible, but is an independent component.
-# This particular file snippet, and this file snippet only, is BSD licensed.
-# Modules you write using this snippet, which is embedded dynamically by Ansible
-# still belong to the author of the module, and may assign their own license
-# to the complete work.
-#
-# Copyright (c), Jonathan Mainguy <jon@soh.re>, 2015
-# Most of this was originally added by Sven Schliesing @muffl0n in the mysql_user.py module
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without modification,
-# are permitted provided that the following conditions are met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright notice,
-# this list of conditions and the following disclaimer in the documentation
-# and/or other materials provided with the distribution.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
-# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
-# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-import os
-
-try:
- import pymysql as mysql_driver
- _mysql_cursor_param = 'cursor'
-except ImportError:
- try:
- import MySQLdb as mysql_driver
- import MySQLdb.cursors
- _mysql_cursor_param = 'cursorclass'
- except ImportError:
- mysql_driver = None
-
-mysql_driver_fail_msg = 'The PyMySQL (Python 2.7 and Python 3.X) or MySQL-python (Python 2.X) module is required.'
-
-
-def mysql_connect(module, login_user=None, login_password=None, config_file='', ssl_cert=None, ssl_key=None, ssl_ca=None, db=None, cursor_class=None,
- connect_timeout=30, autocommit=False):
- config = {}
-
- if ssl_ca is not None or ssl_key is not None or ssl_cert is not None:
- config['ssl'] = {}
-
- if module.params['login_unix_socket']:
- config['unix_socket'] = module.params['login_unix_socket']
- else:
- config['host'] = module.params['login_host']
- config['port'] = module.params['login_port']
-
- if os.path.exists(config_file):
- config['read_default_file'] = config_file
-
- # If login_user or login_password are given, they should override the
- # config file
- if login_user is not None:
- config['user'] = login_user
- if login_password is not None:
- config['passwd'] = login_password
- if ssl_cert is not None:
- config['ssl']['cert'] = ssl_cert
- if ssl_key is not None:
- config['ssl']['key'] = ssl_key
- if ssl_ca is not None:
- config['ssl']['ca'] = ssl_ca
- if db is not None:
- config['db'] = db
- if connect_timeout is not None:
- config['connect_timeout'] = connect_timeout
-
- if _mysql_cursor_param == 'cursor':
- # In case of PyMySQL driver:
- db_connection = mysql_driver.connect(autocommit=autocommit, **config)
- else:
- # In case of MySQLdb driver
- db_connection = mysql_driver.connect(**config)
- if autocommit:
- db_connection.autocommit(True)
-
- if cursor_class == 'DictCursor':
- return db_connection.cursor(**{_mysql_cursor_param: mysql_driver.cursors.DictCursor}), db_connection
- else:
- return db_connection.cursor(), db_connection
-
-
-def mysql_common_argument_spec():
- return dict(
- login_user=dict(type='str', default=None),
- login_password=dict(type='str', no_log=True),
- login_host=dict(type='str', default='localhost'),
- login_port=dict(type='int', default=3306),
- login_unix_socket=dict(type='str'),
- config_file=dict(type='path', default='~/.my.cnf'),
- connect_timeout=dict(type='int', default=30),
- client_cert=dict(type='path', aliases=['ssl_cert']),
- client_key=dict(type='path', aliases=['ssl_key']),
- ca_cert=dict(type='path', aliases=['ssl_ca']),
- )
diff --git a/test/support/integration/plugins/module_utils/postgres.py b/test/support/integration/plugins/module_utils/postgres.py
deleted file mode 100644
index 0ccc6ed7..00000000
--- a/test/support/integration/plugins/module_utils/postgres.py
+++ /dev/null
@@ -1,330 +0,0 @@
-# This code is part of Ansible, but is an independent component.
-# This particular file snippet, and this file snippet only, is BSD licensed.
-# Modules you write using this snippet, which is embedded dynamically by Ansible
-# still belong to the author of the module, and may assign their own license
-# to the complete work.
-#
-# Copyright (c), Ted Timmons <ted@timmons.me>, 2017.
-# Most of this was originally added by other creators in the postgresql_user module.
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without modification,
-# are permitted provided that the following conditions are met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright notice,
-# this list of conditions and the following disclaimer in the documentation
-# and/or other materials provided with the distribution.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
-# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
-# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-psycopg2 = None # This line needs for unit tests
-try:
- import psycopg2
- HAS_PSYCOPG2 = True
-except ImportError:
- HAS_PSYCOPG2 = False
-
-from ansible.module_utils.basic import missing_required_lib
-from ansible.module_utils._text import to_native
-from ansible.module_utils.six import iteritems
-from ansible.module_utils.compat.version import LooseVersion
-
-
-def postgres_common_argument_spec():
- """
- Return a dictionary with connection options.
-
- The options are commonly used by most of PostgreSQL modules.
- """
- return dict(
- login_user=dict(default='postgres'),
- login_password=dict(default='', no_log=True),
- login_host=dict(default=''),
- login_unix_socket=dict(default=''),
- port=dict(type='int', default=5432, aliases=['login_port']),
- ssl_mode=dict(default='prefer', choices=['allow', 'disable', 'prefer', 'require', 'verify-ca', 'verify-full']),
- ca_cert=dict(aliases=['ssl_rootcert']),
- )
-
-
-def ensure_required_libs(module):
- """Check required libraries."""
- if not HAS_PSYCOPG2:
- module.fail_json(msg=missing_required_lib('psycopg2'))
-
- if module.params.get('ca_cert') and LooseVersion(psycopg2.__version__) < LooseVersion('2.4.3'):
- module.fail_json(msg='psycopg2 must be at least 2.4.3 in order to use the ca_cert parameter')
-
-
-def connect_to_db(module, conn_params, autocommit=False, fail_on_conn=True):
- """Connect to a PostgreSQL database.
-
- Return psycopg2 connection object.
-
- Args:
- module (AnsibleModule) -- object of ansible.module_utils.basic.AnsibleModule class
- conn_params (dict) -- dictionary with connection parameters
-
- Kwargs:
- autocommit (bool) -- commit automatically (default False)
- fail_on_conn (bool) -- fail if connection failed or just warn and return None (default True)
- """
- ensure_required_libs(module)
-
- db_connection = None
- try:
- db_connection = psycopg2.connect(**conn_params)
- if autocommit:
- if LooseVersion(psycopg2.__version__) >= LooseVersion('2.4.2'):
- db_connection.set_session(autocommit=True)
- else:
- db_connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
-
- # Switch role, if specified:
- if module.params.get('session_role'):
- cursor = db_connection.cursor(cursor_factory=psycopg2.extras.DictCursor)
-
- try:
- cursor.execute('SET ROLE "%s"' % module.params['session_role'])
- except Exception as e:
- module.fail_json(msg="Could not switch role: %s" % to_native(e))
- finally:
- cursor.close()
-
- except TypeError as e:
- if 'sslrootcert' in e.args[0]:
- module.fail_json(msg='Postgresql server must be at least '
- 'version 8.4 to support sslrootcert')
-
- if fail_on_conn:
- module.fail_json(msg="unable to connect to database: %s" % to_native(e))
- else:
- module.warn("PostgreSQL server is unavailable: %s" % to_native(e))
- db_connection = None
-
- except Exception as e:
- if fail_on_conn:
- module.fail_json(msg="unable to connect to database: %s" % to_native(e))
- else:
- module.warn("PostgreSQL server is unavailable: %s" % to_native(e))
- db_connection = None
-
- return db_connection
-
-
-def exec_sql(obj, query, query_params=None, ddl=False, add_to_executed=True, dont_exec=False):
- """Execute SQL.
-
- Auxiliary function for PostgreSQL user classes.
-
- Returns a query result if possible or True/False if ddl=True arg was passed.
- It necessary for statements that don't return any result (like DDL queries).
-
- Args:
- obj (obj) -- must be an object of a user class.
- The object must have module (AnsibleModule class object) and
- cursor (psycopg cursor object) attributes
- query (str) -- SQL query to execute
-
- Kwargs:
- query_params (dict or tuple) -- Query parameters to prevent SQL injections,
- could be a dict or tuple
- ddl (bool) -- must return True or False instead of rows (typical for DDL queries)
- (default False)
- add_to_executed (bool) -- append the query to obj.executed_queries attribute
- dont_exec (bool) -- used with add_to_executed=True to generate a query, add it
- to obj.executed_queries list and return True (default False)
- """
-
- if dont_exec:
- # This is usually needed to return queries in check_mode
- # without execution
- query = obj.cursor.mogrify(query, query_params)
- if add_to_executed:
- obj.executed_queries.append(query)
-
- return True
-
- try:
- if query_params is not None:
- obj.cursor.execute(query, query_params)
- else:
- obj.cursor.execute(query)
-
- if add_to_executed:
- if query_params is not None:
- obj.executed_queries.append(obj.cursor.mogrify(query, query_params))
- else:
- obj.executed_queries.append(query)
-
- if not ddl:
- res = obj.cursor.fetchall()
- return res
- return True
- except Exception as e:
- obj.module.fail_json(msg="Cannot execute SQL '%s': %s" % (query, to_native(e)))
- return False
-
-
-def get_conn_params(module, params_dict, warn_db_default=True):
- """Get connection parameters from the passed dictionary.
-
- Return a dictionary with parameters to connect to PostgreSQL server.
-
- Args:
- module (AnsibleModule) -- object of ansible.module_utils.basic.AnsibleModule class
- params_dict (dict) -- dictionary with variables
-
- Kwargs:
- warn_db_default (bool) -- warn that the default DB is used (default True)
- """
- # To use defaults values, keyword arguments must be absent, so
- # check which values are empty and don't include in the return dictionary
- params_map = {
- "login_host": "host",
- "login_user": "user",
- "login_password": "password",
- "port": "port",
- "ssl_mode": "sslmode",
- "ca_cert": "sslrootcert"
- }
-
- # Might be different in the modules:
- if params_dict.get('db'):
- params_map['db'] = 'database'
- elif params_dict.get('database'):
- params_map['database'] = 'database'
- elif params_dict.get('login_db'):
- params_map['login_db'] = 'database'
- else:
- if warn_db_default:
- module.warn('Database name has not been passed, '
- 'used default database to connect to.')
-
- kw = dict((params_map[k], v) for (k, v) in iteritems(params_dict)
- if k in params_map and v != '' and v is not None)
-
- # If a login_unix_socket is specified, incorporate it here.
- is_localhost = "host" not in kw or kw["host"] is None or kw["host"] == "localhost"
- if is_localhost and params_dict["login_unix_socket"] != "":
- kw["host"] = params_dict["login_unix_socket"]
-
- return kw
-
-
-class PgMembership(object):
- def __init__(self, module, cursor, groups, target_roles, fail_on_role=True):
- self.module = module
- self.cursor = cursor
- self.target_roles = [r.strip() for r in target_roles]
- self.groups = [r.strip() for r in groups]
- self.executed_queries = []
- self.granted = {}
- self.revoked = {}
- self.fail_on_role = fail_on_role
- self.non_existent_roles = []
- self.changed = False
- self.__check_roles_exist()
-
- def grant(self):
- for group in self.groups:
- self.granted[group] = []
-
- for role in self.target_roles:
- # If role is in a group now, pass:
- if self.__check_membership(group, role):
- continue
-
- query = 'GRANT "%s" TO "%s"' % (group, role)
- self.changed = exec_sql(self, query, ddl=True)
-
- if self.changed:
- self.granted[group].append(role)
-
- return self.changed
-
- def revoke(self):
- for group in self.groups:
- self.revoked[group] = []
-
- for role in self.target_roles:
- # If role is not in a group now, pass:
- if not self.__check_membership(group, role):
- continue
-
- query = 'REVOKE "%s" FROM "%s"' % (group, role)
- self.changed = exec_sql(self, query, ddl=True)
-
- if self.changed:
- self.revoked[group].append(role)
-
- return self.changed
-
- def __check_membership(self, src_role, dst_role):
- query = ("SELECT ARRAY(SELECT b.rolname FROM "
- "pg_catalog.pg_auth_members m "
- "JOIN pg_catalog.pg_roles b ON (m.roleid = b.oid) "
- "WHERE m.member = r.oid) "
- "FROM pg_catalog.pg_roles r "
- "WHERE r.rolname = %(dst_role)s")
-
- res = exec_sql(self, query, query_params={'dst_role': dst_role}, add_to_executed=False)
- membership = []
- if res:
- membership = res[0][0]
-
- if not membership:
- return False
-
- if src_role in membership:
- return True
-
- return False
-
- def __check_roles_exist(self):
- existent_groups = self.__roles_exist(self.groups)
- existent_roles = self.__roles_exist(self.target_roles)
-
- for group in self.groups:
- if group not in existent_groups:
- if self.fail_on_role:
- self.module.fail_json(msg="Role %s does not exist" % group)
- else:
- self.module.warn("Role %s does not exist, pass" % group)
- self.non_existent_roles.append(group)
-
- for role in self.target_roles:
- if role not in existent_roles:
- if self.fail_on_role:
- self.module.fail_json(msg="Role %s does not exist" % role)
- else:
- self.module.warn("Role %s does not exist, pass" % role)
-
- if role not in self.groups:
- self.non_existent_roles.append(role)
-
- else:
- if self.fail_on_role:
- self.module.exit_json(msg="Role role '%s' is a member of role '%s'" % (role, role))
- else:
- self.module.warn("Role role '%s' is a member of role '%s', pass" % (role, role))
-
- # Update role lists, excluding non existent roles:
- self.groups = [g for g in self.groups if g not in self.non_existent_roles]
-
- self.target_roles = [r for r in self.target_roles if r not in self.non_existent_roles]
-
- def __roles_exist(self, roles):
- tmp = ["'" + x + "'" for x in roles]
- query = "SELECT rolname FROM pg_roles WHERE rolname IN (%s)" % ','.join(tmp)
- return [x[0] for x in exec_sql(self, query, add_to_executed=False)]
diff --git a/test/support/integration/plugins/module_utils/rabbitmq.py b/test/support/integration/plugins/module_utils/rabbitmq.py
deleted file mode 100644
index cf764006..00000000
--- a/test/support/integration/plugins/module_utils/rabbitmq.py
+++ /dev/null
@@ -1,220 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright: (c) 2016, Jorge Rodriguez <jorge.rodriguez@tiriel.eu>
-# Copyright: (c) 2018, John Imison <john+github@imison.net>
-#
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-from ansible.module_utils._text import to_native
-from ansible.module_utils.basic import missing_required_lib
-from ansible.module_utils.six.moves.urllib import parse as urllib_parse
-from mimetypes import MimeTypes
-
-import os
-import json
-import traceback
-
-PIKA_IMP_ERR = None
-try:
- import pika
- import pika.exceptions
- from pika import spec
- HAS_PIKA = True
-except ImportError:
- PIKA_IMP_ERR = traceback.format_exc()
- HAS_PIKA = False
-
-
-def rabbitmq_argument_spec():
- return dict(
- login_user=dict(type='str', default='guest'),
- login_password=dict(type='str', default='guest', no_log=True),
- login_host=dict(type='str', default='localhost'),
- login_port=dict(type='str', default='15672'),
- login_protocol=dict(type='str', default='http', choices=['http', 'https']),
- ca_cert=dict(type='path', aliases=['cacert']),
- client_cert=dict(type='path', aliases=['cert']),
- client_key=dict(type='path', aliases=['key']),
- vhost=dict(type='str', default='/'),
- )
-
-
-# notification/rabbitmq_basic_publish.py
-class RabbitClient():
- def __init__(self, module):
- self.module = module
- self.params = module.params
- self.check_required_library()
- self.check_host_params()
- self.url = self.params['url']
- self.proto = self.params['proto']
- self.username = self.params['username']
- self.password = self.params['password']
- self.host = self.params['host']
- self.port = self.params['port']
- self.vhost = self.params['vhost']
- self.queue = self.params['queue']
- self.headers = self.params['headers']
- self.cafile = self.params['cafile']
- self.certfile = self.params['certfile']
- self.keyfile = self.params['keyfile']
-
- if self.host is not None:
- self.build_url()
-
- if self.cafile is not None:
- self.append_ssl_certs()
-
- self.connect_to_rabbitmq()
-
- def check_required_library(self):
- if not HAS_PIKA:
- self.module.fail_json(msg=missing_required_lib("pika"), exception=PIKA_IMP_ERR)
-
- def check_host_params(self):
- # Fail if url is specified and other conflicting parameters have been specified
- if self.params['url'] is not None and any(self.params[k] is not None for k in ['proto', 'host', 'port', 'password', 'username', 'vhost']):
- self.module.fail_json(msg="url and proto, host, port, vhost, username or password cannot be specified at the same time.")
-
- # Fail if url not specified and there is a missing parameter to build the url
- if self.params['url'] is None and any(self.params[k] is None for k in ['proto', 'host', 'port', 'password', 'username', 'vhost']):
- self.module.fail_json(msg="Connection parameters must be passed via url, or, proto, host, port, vhost, username or password.")
-
- def append_ssl_certs(self):
- ssl_options = {}
- if self.cafile:
- ssl_options['cafile'] = self.cafile
- if self.certfile:
- ssl_options['certfile'] = self.certfile
- if self.keyfile:
- ssl_options['keyfile'] = self.keyfile
-
- self.url = self.url + '?ssl_options=' + urllib_parse.quote(json.dumps(ssl_options))
-
- @staticmethod
- def rabbitmq_argument_spec():
- return dict(
- url=dict(type='str'),
- proto=dict(type='str', choices=['amqp', 'amqps']),
- host=dict(type='str'),
- port=dict(type='int'),
- username=dict(type='str'),
- password=dict(type='str', no_log=True),
- vhost=dict(type='str'),
- queue=dict(type='str')
- )
-
- ''' Consider some file size limits here '''
- def _read_file(self, path):
- try:
- with open(path, "rb") as file_handle:
- return file_handle.read()
- except IOError as e:
- self.module.fail_json(msg="Unable to open file %s: %s" % (path, to_native(e)))
-
- @staticmethod
- def _check_file_mime_type(path):
- mime = MimeTypes()
- return mime.guess_type(path)
-
- def build_url(self):
- self.url = '{0}://{1}:{2}@{3}:{4}/{5}'.format(self.proto,
- self.username,
- self.password,
- self.host,
- self.port,
- self.vhost)
-
- def connect_to_rabbitmq(self):
- """
- Function to connect to rabbitmq using username and password
- """
- try:
- parameters = pika.URLParameters(self.url)
- except Exception as e:
- self.module.fail_json(msg="URL malformed: %s" % to_native(e))
-
- try:
- self.connection = pika.BlockingConnection(parameters)
- except Exception as e:
- self.module.fail_json(msg="Connection issue: %s" % to_native(e))
-
- try:
- self.conn_channel = self.connection.channel()
- except pika.exceptions.AMQPChannelError as e:
- self.close_connection()
- self.module.fail_json(msg="Channel issue: %s" % to_native(e))
-
- def close_connection(self):
- try:
- self.connection.close()
- except pika.exceptions.AMQPConnectionError:
- pass
-
- def basic_publish(self):
- self.content_type = self.params.get("content_type")
-
- if self.params.get("body") is not None:
- args = dict(
- body=self.params.get("body"),
- exchange=self.params.get("exchange"),
- routing_key=self.params.get("routing_key"),
- properties=pika.BasicProperties(content_type=self.content_type, delivery_mode=1, headers=self.headers))
-
- # If src (file) is defined and content_type is left as default, do a mime lookup on the file
- if self.params.get("src") is not None and self.content_type == 'text/plain':
- self.content_type = RabbitClient._check_file_mime_type(self.params.get("src"))[0]
- self.headers.update(
- filename=os.path.basename(self.params.get("src"))
- )
-
- args = dict(
- body=self._read_file(self.params.get("src")),
- exchange=self.params.get("exchange"),
- routing_key=self.params.get("routing_key"),
- properties=pika.BasicProperties(content_type=self.content_type,
- delivery_mode=1,
- headers=self.headers
- ))
- elif self.params.get("src") is not None:
- args = dict(
- body=self._read_file(self.params.get("src")),
- exchange=self.params.get("exchange"),
- routing_key=self.params.get("routing_key"),
- properties=pika.BasicProperties(content_type=self.content_type,
- delivery_mode=1,
- headers=self.headers
- ))
-
- try:
- # If queue is not defined, RabbitMQ will return the queue name of the automatically generated queue.
- if self.queue is None:
- result = self.conn_channel.queue_declare(durable=self.params.get("durable"),
- exclusive=self.params.get("exclusive"),
- auto_delete=self.params.get("auto_delete"))
- self.conn_channel.confirm_delivery()
- self.queue = result.method.queue
- else:
- self.conn_channel.queue_declare(queue=self.queue,
- durable=self.params.get("durable"),
- exclusive=self.params.get("exclusive"),
- auto_delete=self.params.get("auto_delete"))
- self.conn_channel.confirm_delivery()
- except Exception as e:
- self.module.fail_json(msg="Queue declare issue: %s" % to_native(e))
-
- # https://github.com/ansible/ansible/blob/devel/lib/ansible/module_utils/cloudstack.py#L150
- if args['routing_key'] is None:
- args['routing_key'] = self.queue
-
- if args['exchange'] is None:
- args['exchange'] = ''
-
- try:
- self.conn_channel.basic_publish(**args)
- return True
- except pika.exceptions.UnroutableError:
- return False
diff --git a/test/support/integration/plugins/modules/aws_s3.py b/test/support/integration/plugins/modules/aws_s3.py
deleted file mode 100644
index 54874f05..00000000
--- a/test/support/integration/plugins/modules/aws_s3.py
+++ /dev/null
@@ -1,925 +0,0 @@
-#!/usr/bin/python
-# This file is part of Ansible
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['stableinterface'],
- 'supported_by': 'core'}
-
-
-DOCUMENTATION = '''
----
-module: aws_s3
-short_description: manage objects in S3.
-description:
- - This module allows the user to manage S3 buckets and the objects within them. Includes support for creating and
- deleting both objects and buckets, retrieving objects as files or strings and generating download links.
- This module has a dependency on boto3 and botocore.
-notes:
- - In 2.4, this module has been renamed from C(s3) into M(aws_s3).
-version_added: "1.1"
-options:
- bucket:
- description:
- - Bucket name.
- required: true
- type: str
- dest:
- description:
- - The destination file path when downloading an object/key with a GET operation.
- version_added: "1.3"
- type: path
- encrypt:
- description:
- - When set for PUT mode, asks for server-side encryption.
- default: true
- version_added: "2.0"
- type: bool
- encryption_mode:
- description:
- - What encryption mode to use if I(encrypt=true).
- default: AES256
- choices:
- - AES256
- - aws:kms
- version_added: "2.7"
- type: str
- expiry:
- description:
- - Time limit (in seconds) for the URL generated and returned by S3/Walrus when performing a I(mode=put) or I(mode=geturl) operation.
- default: 600
- aliases: ['expiration']
- type: int
- headers:
- description:
- - Custom headers for PUT operation, as a dictionary of 'key=value' and 'key=value,key=value'.
- version_added: "2.0"
- type: dict
- marker:
- description:
- - Specifies the key to start with when using list mode. Object keys are returned in alphabetical order, starting with key after the marker in order.
- version_added: "2.0"
- type: str
- max_keys:
- description:
- - Max number of results to return in list mode, set this if you want to retrieve fewer than the default 1000 keys.
- default: 1000
- version_added: "2.0"
- type: int
- metadata:
- description:
- - Metadata for PUT operation, as a dictionary of 'key=value' and 'key=value,key=value'.
- version_added: "1.6"
- type: dict
- mode:
- description:
- - Switches the module behaviour between put (upload), get (download), geturl (return download url, Ansible 1.3+),
- getstr (download object as string (1.3+)), list (list keys, Ansible 2.0+), create (bucket), delete (bucket),
- and delobj (delete object, Ansible 2.0+).
- required: true
- choices: ['get', 'put', 'delete', 'create', 'geturl', 'getstr', 'delobj', 'list']
- type: str
- object:
- description:
- - Keyname of the object inside the bucket. Can be used to create "virtual directories", see examples.
- type: str
- permission:
- description:
- - This option lets the user set the canned permissions on the object/bucket that are created.
- The permissions that can be set are C(private), C(public-read), C(public-read-write), C(authenticated-read) for a bucket or
- C(private), C(public-read), C(public-read-write), C(aws-exec-read), C(authenticated-read), C(bucket-owner-read),
- C(bucket-owner-full-control) for an object. Multiple permissions can be specified as a list.
- default: ['private']
- version_added: "2.0"
- type: list
- elements: str
- prefix:
- description:
- - Limits the response to keys that begin with the specified prefix for list mode.
- default: ""
- version_added: "2.0"
- type: str
- version:
- description:
- - Version ID of the object inside the bucket. Can be used to get a specific version of a file if versioning is enabled in the target bucket.
- version_added: "2.0"
- type: str
- overwrite:
- description:
- - Force overwrite either locally on the filesystem or remotely with the object/key. Used with PUT and GET operations.
- Boolean or one of [always, never, different], true is equal to 'always' and false is equal to 'never', new in 2.0.
- When this is set to 'different', the md5 sum of the local file is compared with the 'ETag' of the object/key in S3.
- The ETag may or may not be an MD5 digest of the object data. See the ETag response header here
- U(https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html)
- default: 'always'
- aliases: ['force']
- version_added: "1.2"
- type: str
- retries:
- description:
- - On recoverable failure, how many times to retry before actually failing.
- default: 0
- version_added: "2.0"
- type: int
- aliases: ['retry']
- s3_url:
- description:
- - S3 URL endpoint for usage with Ceph, Eucalyptus and fakes3 etc. Otherwise assumes AWS.
- aliases: [ S3_URL ]
- type: str
- dualstack:
- description:
- - Enables Amazon S3 Dual-Stack Endpoints, allowing S3 communications using both IPv4 and IPv6.
- - Requires at least botocore version 1.4.45.
- type: bool
- default: false
- version_added: "2.7"
- rgw:
- description:
- - Enable Ceph RGW S3 support. This option requires an explicit url via I(s3_url).
- default: false
- version_added: "2.2"
- type: bool
- src:
- description:
- - The source file path when performing a PUT operation.
- version_added: "1.3"
- type: str
- ignore_nonexistent_bucket:
- description:
- - "Overrides initial bucket lookups in case bucket or iam policies are restrictive. Example: a user may have the
- GetObject permission but no other permissions. In this case using the option mode: get will fail without specifying
- I(ignore_nonexistent_bucket=true)."
- version_added: "2.3"
- type: bool
- encryption_kms_key_id:
- description:
- - KMS key id to use when encrypting objects using I(encrypting=aws:kms). Ignored if I(encryption) is not C(aws:kms)
- version_added: "2.7"
- type: str
-requirements: [ "boto3", "botocore" ]
-author:
- - "Lester Wade (@lwade)"
- - "Sloane Hertel (@s-hertel)"
-extends_documentation_fragment:
- - aws
- - ec2
-'''
-
-EXAMPLES = '''
-- name: Simple PUT operation
- aws_s3:
- bucket: mybucket
- object: /my/desired/key.txt
- src: /usr/local/myfile.txt
- mode: put
-
-- name: Simple PUT operation in Ceph RGW S3
- aws_s3:
- bucket: mybucket
- object: /my/desired/key.txt
- src: /usr/local/myfile.txt
- mode: put
- rgw: true
- s3_url: "http://localhost:8000"
-
-- name: Simple GET operation
- aws_s3:
- bucket: mybucket
- object: /my/desired/key.txt
- dest: /usr/local/myfile.txt
- mode: get
-
-- name: Get a specific version of an object.
- aws_s3:
- bucket: mybucket
- object: /my/desired/key.txt
- version: 48c9ee5131af7a716edc22df9772aa6f
- dest: /usr/local/myfile.txt
- mode: get
-
-- name: PUT/upload with metadata
- aws_s3:
- bucket: mybucket
- object: /my/desired/key.txt
- src: /usr/local/myfile.txt
- mode: put
- metadata: 'Content-Encoding=gzip,Cache-Control=no-cache'
-
-- name: PUT/upload with custom headers
- aws_s3:
- bucket: mybucket
- object: /my/desired/key.txt
- src: /usr/local/myfile.txt
- mode: put
- headers: 'x-amz-grant-full-control=emailAddress=owner@example.com'
-
-- name: List keys simple
- aws_s3:
- bucket: mybucket
- mode: list
-
-- name: List keys all options
- aws_s3:
- bucket: mybucket
- mode: list
- prefix: /my/desired/
- marker: /my/desired/0023.txt
- max_keys: 472
-
-- name: Create an empty bucket
- aws_s3:
- bucket: mybucket
- mode: create
- permission: public-read
-
-- name: Create a bucket with key as directory, in the EU region
- aws_s3:
- bucket: mybucket
- object: /my/directory/path
- mode: create
- region: eu-west-1
-
-- name: Delete a bucket and all contents
- aws_s3:
- bucket: mybucket
- mode: delete
-
-- name: GET an object but don't download if the file checksums match. New in 2.0
- aws_s3:
- bucket: mybucket
- object: /my/desired/key.txt
- dest: /usr/local/myfile.txt
- mode: get
- overwrite: different
-
-- name: Delete an object from a bucket
- aws_s3:
- bucket: mybucket
- object: /my/desired/key.txt
- mode: delobj
-'''
-
-RETURN = '''
-msg:
- description: Message indicating the status of the operation.
- returned: always
- type: str
- sample: PUT operation complete
-url:
- description: URL of the object.
- returned: (for put and geturl operations)
- type: str
- sample: https://my-bucket.s3.amazonaws.com/my-key.txt?AWSAccessKeyId=<access-key>&Expires=1506888865&Signature=<signature>
-expiry:
- description: Number of seconds the presigned url is valid for.
- returned: (for geturl operation)
- type: int
- sample: 600
-contents:
- description: Contents of the object as string.
- returned: (for getstr operation)
- type: str
- sample: "Hello, world!"
-s3_keys:
- description: List of object keys.
- returned: (for list operation)
- type: list
- elements: str
- sample:
- - prefix1/
- - prefix1/key1
- - prefix1/key2
-'''
-
-import mimetypes
-import os
-from ansible.module_utils.six.moves.urllib.parse import urlparse
-from ssl import SSLError
-from ansible.module_utils.basic import to_text, to_native
-from ansible.module_utils.aws.core import AnsibleAWSModule
-from ansible.module_utils.aws.s3 import calculate_etag, HAS_MD5
-from ansible.module_utils.ec2 import get_aws_connection_info, boto3_conn
-
-try:
- import botocore
-except ImportError:
- pass # will be detected by imported AnsibleAWSModule
-
-IGNORE_S3_DROP_IN_EXCEPTIONS = ['XNotImplemented', 'NotImplemented']
-
-
-class Sigv4Required(Exception):
- pass
-
-
-def key_check(module, s3, bucket, obj, version=None, validate=True):
- exists = True
- try:
- if version:
- s3.head_object(Bucket=bucket, Key=obj, VersionId=version)
- else:
- s3.head_object(Bucket=bucket, Key=obj)
- except botocore.exceptions.ClientError as e:
- # if a client error is thrown, check if it's a 404 error
- # if it's a 404 error, then the object does not exist
- error_code = int(e.response['Error']['Code'])
- if error_code == 404:
- exists = False
- elif error_code == 403 and validate is False:
- pass
- else:
- module.fail_json_aws(e, msg="Failed while looking up object (during key check) %s." % obj)
- except botocore.exceptions.BotoCoreError as e:
- module.fail_json_aws(e, msg="Failed while looking up object (during key check) %s." % obj)
- return exists
-
-
-def etag_compare(module, local_file, s3, bucket, obj, version=None):
- s3_etag = get_etag(s3, bucket, obj, version=version)
- local_etag = calculate_etag(module, local_file, s3_etag, s3, bucket, obj, version)
-
- return s3_etag == local_etag
-
-
-def get_etag(s3, bucket, obj, version=None):
- if version:
- key_check = s3.head_object(Bucket=bucket, Key=obj, VersionId=version)
- else:
- key_check = s3.head_object(Bucket=bucket, Key=obj)
- if not key_check:
- return None
- return key_check['ETag']
-
-
-def bucket_check(module, s3, bucket, validate=True):
- exists = True
- try:
- s3.head_bucket(Bucket=bucket)
- except botocore.exceptions.ClientError as e:
- # If a client error is thrown, then check that it was a 404 error.
- # If it was a 404 error, then the bucket does not exist.
- error_code = int(e.response['Error']['Code'])
- if error_code == 404:
- exists = False
- elif error_code == 403 and validate is False:
- pass
- else:
- module.fail_json_aws(e, msg="Failed while looking up bucket (during bucket_check) %s." % bucket)
- except botocore.exceptions.EndpointConnectionError as e:
- module.fail_json_aws(e, msg="Invalid endpoint provided")
- except botocore.exceptions.BotoCoreError as e:
- module.fail_json_aws(e, msg="Failed while looking up bucket (during bucket_check) %s." % bucket)
- return exists
-
-
-def create_bucket(module, s3, bucket, location=None):
- if module.check_mode:
- module.exit_json(msg="CREATE operation skipped - running in check mode", changed=True)
- configuration = {}
- if location not in ('us-east-1', None):
- configuration['LocationConstraint'] = location
- try:
- if len(configuration) > 0:
- s3.create_bucket(Bucket=bucket, CreateBucketConfiguration=configuration)
- else:
- s3.create_bucket(Bucket=bucket)
- if module.params.get('permission'):
- # Wait for the bucket to exist before setting ACLs
- s3.get_waiter('bucket_exists').wait(Bucket=bucket)
- for acl in module.params.get('permission'):
- s3.put_bucket_acl(ACL=acl, Bucket=bucket)
- except botocore.exceptions.ClientError as e:
- if e.response['Error']['Code'] in IGNORE_S3_DROP_IN_EXCEPTIONS:
- module.warn("PutBucketAcl is not implemented by your storage provider. Set the permission parameters to the empty list to avoid this warning")
- else:
- module.fail_json_aws(e, msg="Failed while creating bucket or setting acl (check that you have CreateBucket and PutBucketAcl permission).")
- except botocore.exceptions.BotoCoreError as e:
- module.fail_json_aws(e, msg="Failed while creating bucket or setting acl (check that you have CreateBucket and PutBucketAcl permission).")
-
- if bucket:
- return True
-
-
-def paginated_list(s3, **pagination_params):
- pg = s3.get_paginator('list_objects_v2')
- for page in pg.paginate(**pagination_params):
- yield [data['Key'] for data in page.get('Contents', [])]
-
-
-def paginated_versioned_list_with_fallback(s3, **pagination_params):
- try:
- versioned_pg = s3.get_paginator('list_object_versions')
- for page in versioned_pg.paginate(**pagination_params):
- delete_markers = [{'Key': data['Key'], 'VersionId': data['VersionId']} for data in page.get('DeleteMarkers', [])]
- current_objects = [{'Key': data['Key'], 'VersionId': data['VersionId']} for data in page.get('Versions', [])]
- yield delete_markers + current_objects
- except botocore.exceptions.ClientError as e:
- if to_text(e.response['Error']['Code']) in IGNORE_S3_DROP_IN_EXCEPTIONS + ['AccessDenied']:
- for page in paginated_list(s3, **pagination_params):
- yield [{'Key': data['Key']} for data in page]
-
-
-def list_keys(module, s3, bucket, prefix, marker, max_keys):
- pagination_params = {'Bucket': bucket}
- for param_name, param_value in (('Prefix', prefix), ('StartAfter', marker), ('MaxKeys', max_keys)):
- pagination_params[param_name] = param_value
- try:
- keys = sum(paginated_list(s3, **pagination_params), [])
- module.exit_json(msg="LIST operation complete", s3_keys=keys)
- except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
- module.fail_json_aws(e, msg="Failed while listing the keys in the bucket {0}".format(bucket))
-
-
-def delete_bucket(module, s3, bucket):
- if module.check_mode:
- module.exit_json(msg="DELETE operation skipped - running in check mode", changed=True)
- try:
- exists = bucket_check(module, s3, bucket)
- if exists is False:
- return False
- # if there are contents then we need to delete them before we can delete the bucket
- for keys in paginated_versioned_list_with_fallback(s3, Bucket=bucket):
- if keys:
- s3.delete_objects(Bucket=bucket, Delete={'Objects': keys})
- s3.delete_bucket(Bucket=bucket)
- return True
- except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
- module.fail_json_aws(e, msg="Failed while deleting bucket %s." % bucket)
-
-
-def delete_key(module, s3, bucket, obj):
- if module.check_mode:
- module.exit_json(msg="DELETE operation skipped - running in check mode", changed=True)
- try:
- s3.delete_object(Bucket=bucket, Key=obj)
- module.exit_json(msg="Object deleted from bucket %s." % (bucket), changed=True)
- except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
- module.fail_json_aws(e, msg="Failed while trying to delete %s." % obj)
-
-
-def create_dirkey(module, s3, bucket, obj, encrypt):
- if module.check_mode:
- module.exit_json(msg="PUT operation skipped - running in check mode", changed=True)
- try:
- params = {'Bucket': bucket, 'Key': obj, 'Body': b''}
- if encrypt:
- params['ServerSideEncryption'] = module.params['encryption_mode']
- if module.params['encryption_kms_key_id'] and module.params['encryption_mode'] == 'aws:kms':
- params['SSEKMSKeyId'] = module.params['encryption_kms_key_id']
-
- s3.put_object(**params)
- for acl in module.params.get('permission'):
- s3.put_object_acl(ACL=acl, Bucket=bucket, Key=obj)
- except botocore.exceptions.ClientError as e:
- if e.response['Error']['Code'] in IGNORE_S3_DROP_IN_EXCEPTIONS:
- module.warn("PutObjectAcl is not implemented by your storage provider. Set the permissions parameters to the empty list to avoid this warning")
- else:
- module.fail_json_aws(e, msg="Failed while creating object %s." % obj)
- except botocore.exceptions.BotoCoreError as e:
- module.fail_json_aws(e, msg="Failed while creating object %s." % obj)
- module.exit_json(msg="Virtual directory %s created in bucket %s" % (obj, bucket), changed=True)
-
-
-def path_check(path):
- if os.path.exists(path):
- return True
- else:
- return False
-
-
-def option_in_extra_args(option):
- temp_option = option.replace('-', '').lower()
-
- allowed_extra_args = {'acl': 'ACL', 'cachecontrol': 'CacheControl', 'contentdisposition': 'ContentDisposition',
- 'contentencoding': 'ContentEncoding', 'contentlanguage': 'ContentLanguage',
- 'contenttype': 'ContentType', 'expires': 'Expires', 'grantfullcontrol': 'GrantFullControl',
- 'grantread': 'GrantRead', 'grantreadacp': 'GrantReadACP', 'grantwriteacp': 'GrantWriteACP',
- 'metadata': 'Metadata', 'requestpayer': 'RequestPayer', 'serversideencryption': 'ServerSideEncryption',
- 'storageclass': 'StorageClass', 'ssecustomeralgorithm': 'SSECustomerAlgorithm', 'ssecustomerkey': 'SSECustomerKey',
- 'ssecustomerkeymd5': 'SSECustomerKeyMD5', 'ssekmskeyid': 'SSEKMSKeyId', 'websiteredirectlocation': 'WebsiteRedirectLocation'}
-
- if temp_option in allowed_extra_args:
- return allowed_extra_args[temp_option]
-
-
-def upload_s3file(module, s3, bucket, obj, src, expiry, metadata, encrypt, headers):
- if module.check_mode:
- module.exit_json(msg="PUT operation skipped - running in check mode", changed=True)
- try:
- extra = {}
- if encrypt:
- extra['ServerSideEncryption'] = module.params['encryption_mode']
- if module.params['encryption_kms_key_id'] and module.params['encryption_mode'] == 'aws:kms':
- extra['SSEKMSKeyId'] = module.params['encryption_kms_key_id']
- if metadata:
- extra['Metadata'] = {}
-
- # determine object metadata and extra arguments
- for option in metadata:
- extra_args_option = option_in_extra_args(option)
- if extra_args_option is not None:
- extra[extra_args_option] = metadata[option]
- else:
- extra['Metadata'][option] = metadata[option]
-
- if 'ContentType' not in extra:
- content_type = mimetypes.guess_type(src)[0]
- if content_type is None:
- # s3 default content type
- content_type = 'binary/octet-stream'
- extra['ContentType'] = content_type
-
- s3.upload_file(Filename=src, Bucket=bucket, Key=obj, ExtraArgs=extra)
- except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
- module.fail_json_aws(e, msg="Unable to complete PUT operation.")
- try:
- for acl in module.params.get('permission'):
- s3.put_object_acl(ACL=acl, Bucket=bucket, Key=obj)
- except botocore.exceptions.ClientError as e:
- if e.response['Error']['Code'] in IGNORE_S3_DROP_IN_EXCEPTIONS:
- module.warn("PutObjectAcl is not implemented by your storage provider. Set the permission parameters to the empty list to avoid this warning")
- else:
- module.fail_json_aws(e, msg="Unable to set object ACL")
- except botocore.exceptions.BotoCoreError as e:
- module.fail_json_aws(e, msg="Unable to set object ACL")
- try:
- url = s3.generate_presigned_url(ClientMethod='put_object',
- Params={'Bucket': bucket, 'Key': obj},
- ExpiresIn=expiry)
- except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
- module.fail_json_aws(e, msg="Unable to generate presigned URL")
- module.exit_json(msg="PUT operation complete", url=url, changed=True)
-
-
-def download_s3file(module, s3, bucket, obj, dest, retries, version=None):
- if module.check_mode:
- module.exit_json(msg="GET operation skipped - running in check mode", changed=True)
- # retries is the number of loops; range/xrange needs to be one
- # more to get that count of loops.
- try:
- if version:
- key = s3.get_object(Bucket=bucket, Key=obj, VersionId=version)
- else:
- key = s3.get_object(Bucket=bucket, Key=obj)
- except botocore.exceptions.ClientError as e:
- if e.response['Error']['Code'] == 'InvalidArgument' and 'require AWS Signature Version 4' in to_text(e):
- raise Sigv4Required()
- elif e.response['Error']['Code'] not in ("403", "404"):
- # AccessDenied errors may be triggered if 1) file does not exist or 2) file exists but
- # user does not have the s3:GetObject permission. 404 errors are handled by download_file().
- module.fail_json_aws(e, msg="Could not find the key %s." % obj)
- except botocore.exceptions.BotoCoreError as e:
- module.fail_json_aws(e, msg="Could not find the key %s." % obj)
-
- optional_kwargs = {'ExtraArgs': {'VersionId': version}} if version else {}
- for x in range(0, retries + 1):
- try:
- s3.download_file(bucket, obj, dest, **optional_kwargs)
- module.exit_json(msg="GET operation complete", changed=True)
- except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
- # actually fail on last pass through the loop.
- if x >= retries:
- module.fail_json_aws(e, msg="Failed while downloading %s." % obj)
- # otherwise, try again, this may be a transient timeout.
- except SSLError as e: # will ClientError catch SSLError?
- # actually fail on last pass through the loop.
- if x >= retries:
- module.fail_json_aws(e, msg="s3 download failed")
- # otherwise, try again, this may be a transient timeout.
-
-
-def download_s3str(module, s3, bucket, obj, version=None, validate=True):
- if module.check_mode:
- module.exit_json(msg="GET operation skipped - running in check mode", changed=True)
- try:
- if version:
- contents = to_native(s3.get_object(Bucket=bucket, Key=obj, VersionId=version)["Body"].read())
- else:
- contents = to_native(s3.get_object(Bucket=bucket, Key=obj)["Body"].read())
- module.exit_json(msg="GET operation complete", contents=contents, changed=True)
- except botocore.exceptions.ClientError as e:
- if e.response['Error']['Code'] == 'InvalidArgument' and 'require AWS Signature Version 4' in to_text(e):
- raise Sigv4Required()
- else:
- module.fail_json_aws(e, msg="Failed while getting contents of object %s as a string." % obj)
- except botocore.exceptions.BotoCoreError as e:
- module.fail_json_aws(e, msg="Failed while getting contents of object %s as a string." % obj)
-
-
-def get_download_url(module, s3, bucket, obj, expiry, changed=True):
- try:
- url = s3.generate_presigned_url(ClientMethod='get_object',
- Params={'Bucket': bucket, 'Key': obj},
- ExpiresIn=expiry)
- module.exit_json(msg="Download url:", url=url, expiry=expiry, changed=changed)
- except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
- module.fail_json_aws(e, msg="Failed while getting download url.")
-
-
-def is_fakes3(s3_url):
- """ Return True if s3_url has scheme fakes3:// """
- if s3_url is not None:
- return urlparse(s3_url).scheme in ('fakes3', 'fakes3s')
- else:
- return False
-
-
-def get_s3_connection(module, aws_connect_kwargs, location, rgw, s3_url, sig_4=False):
- if s3_url and rgw: # TODO - test this
- rgw = urlparse(s3_url)
- params = dict(module=module, conn_type='client', resource='s3', use_ssl=rgw.scheme == 'https', region=location, endpoint=s3_url, **aws_connect_kwargs)
- elif is_fakes3(s3_url):
- fakes3 = urlparse(s3_url)
- port = fakes3.port
- if fakes3.scheme == 'fakes3s':
- protocol = "https"
- if port is None:
- port = 443
- else:
- protocol = "http"
- if port is None:
- port = 80
- params = dict(module=module, conn_type='client', resource='s3', region=location,
- endpoint="%s://%s:%s" % (protocol, fakes3.hostname, to_text(port)),
- use_ssl=fakes3.scheme == 'fakes3s', **aws_connect_kwargs)
- else:
- params = dict(module=module, conn_type='client', resource='s3', region=location, endpoint=s3_url, **aws_connect_kwargs)
- if module.params['mode'] == 'put' and module.params['encryption_mode'] == 'aws:kms':
- params['config'] = botocore.client.Config(signature_version='s3v4')
- elif module.params['mode'] in ('get', 'getstr') and sig_4:
- params['config'] = botocore.client.Config(signature_version='s3v4')
- if module.params['dualstack']:
- dualconf = botocore.client.Config(s3={'use_dualstack_endpoint': True})
- if 'config' in params:
- params['config'] = params['config'].merge(dualconf)
- else:
- params['config'] = dualconf
- return boto3_conn(**params)
-
-
-def main():
- argument_spec = dict(
- bucket=dict(required=True),
- dest=dict(default=None, type='path'),
- encrypt=dict(default=True, type='bool'),
- encryption_mode=dict(choices=['AES256', 'aws:kms'], default='AES256'),
- expiry=dict(default=600, type='int', aliases=['expiration']),
- headers=dict(type='dict'),
- marker=dict(default=""),
- max_keys=dict(default=1000, type='int'),
- metadata=dict(type='dict'),
- mode=dict(choices=['get', 'put', 'delete', 'create', 'geturl', 'getstr', 'delobj', 'list'], required=True),
- object=dict(),
- permission=dict(type='list', default=['private']),
- version=dict(default=None),
- overwrite=dict(aliases=['force'], default='always'),
- prefix=dict(default=""),
- retries=dict(aliases=['retry'], type='int', default=0),
- s3_url=dict(aliases=['S3_URL']),
- dualstack=dict(default='no', type='bool'),
- rgw=dict(default='no', type='bool'),
- src=dict(),
- ignore_nonexistent_bucket=dict(default=False, type='bool'),
- encryption_kms_key_id=dict()
- )
- module = AnsibleAWSModule(
- argument_spec=argument_spec,
- supports_check_mode=True,
- required_if=[['mode', 'put', ['src', 'object']],
- ['mode', 'get', ['dest', 'object']],
- ['mode', 'getstr', ['object']],
- ['mode', 'geturl', ['object']]],
- )
-
- bucket = module.params.get('bucket')
- encrypt = module.params.get('encrypt')
- expiry = module.params.get('expiry')
- dest = module.params.get('dest', '')
- headers = module.params.get('headers')
- marker = module.params.get('marker')
- max_keys = module.params.get('max_keys')
- metadata = module.params.get('metadata')
- mode = module.params.get('mode')
- obj = module.params.get('object')
- version = module.params.get('version')
- overwrite = module.params.get('overwrite')
- prefix = module.params.get('prefix')
- retries = module.params.get('retries')
- s3_url = module.params.get('s3_url')
- dualstack = module.params.get('dualstack')
- rgw = module.params.get('rgw')
- src = module.params.get('src')
- ignore_nonexistent_bucket = module.params.get('ignore_nonexistent_bucket')
-
- object_canned_acl = ["private", "public-read", "public-read-write", "aws-exec-read", "authenticated-read", "bucket-owner-read", "bucket-owner-full-control"]
- bucket_canned_acl = ["private", "public-read", "public-read-write", "authenticated-read"]
-
- if overwrite not in ['always', 'never', 'different']:
- if module.boolean(overwrite):
- overwrite = 'always'
- else:
- overwrite = 'never'
-
- if overwrite == 'different' and not HAS_MD5:
- module.fail_json(msg='overwrite=different is unavailable: ETag calculation requires MD5 support')
-
- region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
-
- if region in ('us-east-1', '', None):
- # default to US Standard region
- location = 'us-east-1'
- else:
- # Boto uses symbolic names for locations but region strings will
- # actually work fine for everything except us-east-1 (US Standard)
- location = region
-
- if module.params.get('object'):
- obj = module.params['object']
- # If there is a top level object, do nothing - if the object starts with /
- # remove the leading character to maintain compatibility with Ansible versions < 2.4
- if obj.startswith('/'):
- obj = obj[1:]
-
- # Bucket deletion does not require obj. Prevents ambiguity with delobj.
- if obj and mode == "delete":
- module.fail_json(msg='Parameter obj cannot be used with mode=delete')
-
- # allow eucarc environment variables to be used if ansible vars aren't set
- if not s3_url and 'S3_URL' in os.environ:
- s3_url = os.environ['S3_URL']
-
- if dualstack and s3_url is not None and 'amazonaws.com' not in s3_url:
- module.fail_json(msg='dualstack only applies to AWS S3')
-
- if dualstack and not module.botocore_at_least('1.4.45'):
- module.fail_json(msg='dualstack requires botocore >= 1.4.45')
-
- # rgw requires an explicit url
- if rgw and not s3_url:
- module.fail_json(msg='rgw flavour requires s3_url')
-
- # Look at s3_url and tweak connection settings
- # if connecting to RGW, Walrus or fakes3
- if s3_url:
- for key in ['validate_certs', 'security_token', 'profile_name']:
- aws_connect_kwargs.pop(key, None)
- s3 = get_s3_connection(module, aws_connect_kwargs, location, rgw, s3_url)
-
- validate = not ignore_nonexistent_bucket
-
- # separate types of ACLs
- bucket_acl = [acl for acl in module.params.get('permission') if acl in bucket_canned_acl]
- object_acl = [acl for acl in module.params.get('permission') if acl in object_canned_acl]
- error_acl = [acl for acl in module.params.get('permission') if acl not in bucket_canned_acl and acl not in object_canned_acl]
- if error_acl:
- module.fail_json(msg='Unknown permission specified: %s' % error_acl)
-
- # First, we check to see if the bucket exists, we get "bucket" returned.
- bucketrtn = bucket_check(module, s3, bucket, validate=validate)
-
- if validate and mode not in ('create', 'put', 'delete') and not bucketrtn:
- module.fail_json(msg="Source bucket cannot be found.")
-
- if mode == 'get':
- keyrtn = key_check(module, s3, bucket, obj, version=version, validate=validate)
- if keyrtn is False:
- if version:
- module.fail_json(msg="Key %s with version id %s does not exist." % (obj, version))
- else:
- module.fail_json(msg="Key %s does not exist." % obj)
-
- if path_check(dest) and overwrite != 'always':
- if overwrite == 'never':
- module.exit_json(msg="Local object already exists and overwrite is disabled.", changed=False)
- if etag_compare(module, dest, s3, bucket, obj, version=version):
- module.exit_json(msg="Local and remote object are identical, ignoring. Use overwrite=always parameter to force.", changed=False)
-
- try:
- download_s3file(module, s3, bucket, obj, dest, retries, version=version)
- except Sigv4Required:
- s3 = get_s3_connection(module, aws_connect_kwargs, location, rgw, s3_url, sig_4=True)
- download_s3file(module, s3, bucket, obj, dest, retries, version=version)
-
- if mode == 'put':
-
- # if putting an object in a bucket yet to be created, acls for the bucket and/or the object may be specified
- # these were separated into the variables bucket_acl and object_acl above
-
- if not path_check(src):
- module.fail_json(msg="Local object for PUT does not exist")
-
- if bucketrtn:
- keyrtn = key_check(module, s3, bucket, obj, version=version, validate=validate)
- else:
- # If the bucket doesn't exist we should create it.
- # only use valid bucket acls for create_bucket function
- module.params['permission'] = bucket_acl
- create_bucket(module, s3, bucket, location)
-
- if keyrtn and overwrite != 'always':
- if overwrite == 'never' or etag_compare(module, src, s3, bucket, obj):
- # Return the download URL for the existing object
- get_download_url(module, s3, bucket, obj, expiry, changed=False)
-
- # only use valid object acls for the upload_s3file function
- module.params['permission'] = object_acl
- upload_s3file(module, s3, bucket, obj, src, expiry, metadata, encrypt, headers)
-
- # Delete an object from a bucket, not the entire bucket
- if mode == 'delobj':
- if obj is None:
- module.fail_json(msg="object parameter is required")
- if bucket:
- deletertn = delete_key(module, s3, bucket, obj)
- if deletertn is True:
- module.exit_json(msg="Object deleted from bucket %s." % bucket, changed=True)
- else:
- module.fail_json(msg="Bucket parameter is required.")
-
- # Delete an entire bucket, including all objects in the bucket
- if mode == 'delete':
- if bucket:
- deletertn = delete_bucket(module, s3, bucket)
- if deletertn is True:
- module.exit_json(msg="Bucket %s and all keys have been deleted." % bucket, changed=True)
- else:
- module.fail_json(msg="Bucket parameter is required.")
-
- # Support for listing a set of keys
- if mode == 'list':
- exists = bucket_check(module, s3, bucket)
-
- # If the bucket does not exist then bail out
- if not exists:
- module.fail_json(msg="Target bucket (%s) cannot be found" % bucket)
-
- list_keys(module, s3, bucket, prefix, marker, max_keys)
-
- # Need to research how to create directories without "populating" a key, so this should just do bucket creation for now.
- # WE SHOULD ENABLE SOME WAY OF CREATING AN EMPTY KEY TO CREATE "DIRECTORY" STRUCTURE, AWS CONSOLE DOES THIS.
- if mode == 'create':
-
- # if both creating a bucket and putting an object in it, acls for the bucket and/or the object may be specified
- # these were separated above into the variables bucket_acl and object_acl
-
- if bucket and not obj:
- if bucketrtn:
- module.exit_json(msg="Bucket already exists.", changed=False)
- else:
- # only use valid bucket acls when creating the bucket
- module.params['permission'] = bucket_acl
- module.exit_json(msg="Bucket created successfully", changed=create_bucket(module, s3, bucket, location))
- if bucket and obj:
- if obj.endswith('/'):
- dirobj = obj
- else:
- dirobj = obj + "/"
- if bucketrtn:
- if key_check(module, s3, bucket, dirobj):
- module.exit_json(msg="Bucket %s and key %s already exists." % (bucket, obj), changed=False)
- else:
- # setting valid object acls for the create_dirkey function
- module.params['permission'] = object_acl
- create_dirkey(module, s3, bucket, dirobj, encrypt)
- else:
- # only use valid bucket acls for the create_bucket function
- module.params['permission'] = bucket_acl
- created = create_bucket(module, s3, bucket, location)
- # only use valid object acls for the create_dirkey function
- module.params['permission'] = object_acl
- create_dirkey(module, s3, bucket, dirobj, encrypt)
-
- # Support for grabbing the time-expired URL for an object in S3/Walrus.
- if mode == 'geturl':
- if not bucket and not obj:
- module.fail_json(msg="Bucket and Object parameters must be set")
-
- keyrtn = key_check(module, s3, bucket, obj, version=version, validate=validate)
- if keyrtn:
- get_download_url(module, s3, bucket, obj, expiry)
- else:
- module.fail_json(msg="Key %s does not exist." % obj)
-
- if mode == 'getstr':
- if bucket and obj:
- keyrtn = key_check(module, s3, bucket, obj, version=version, validate=validate)
- if keyrtn:
- try:
- download_s3str(module, s3, bucket, obj, version=version)
- except Sigv4Required:
- s3 = get_s3_connection(module, aws_connect_kwargs, location, rgw, s3_url, sig_4=True)
- download_s3str(module, s3, bucket, obj, version=version)
- elif version is not None:
- module.fail_json(msg="Key %s with version id %s does not exist." % (obj, version))
- else:
- module.fail_json(msg="Key %s does not exist." % obj)
-
- module.exit_json(failed=False)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/cloud_init_data_facts.py b/test/support/integration/plugins/modules/cloud_init_data_facts.py
deleted file mode 100644
index 4f871b99..00000000
--- a/test/support/integration/plugins/modules/cloud_init_data_facts.py
+++ /dev/null
@@ -1,134 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-#
-# (c) 2018, René Moser <mail@renemoser.net>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
-
-
-DOCUMENTATION = '''
----
-module: cloud_init_data_facts
-short_description: Retrieve facts of cloud-init.
-description:
- - Gathers facts by reading the status.json and result.json of cloud-init.
-version_added: 2.6
-author: René Moser (@resmo)
-options:
- filter:
- description:
- - Filter facts
- choices: [ status, result ]
-notes:
- - See http://cloudinit.readthedocs.io/ for more information about cloud-init.
-'''
-
-EXAMPLES = '''
-- name: Gather all facts of cloud init
- cloud_init_data_facts:
- register: result
-
-- debug:
- var: result
-
-- name: Wait for cloud init to finish
- cloud_init_data_facts:
- filter: status
- register: res
- until: "res.cloud_init_data_facts.status.v1.stage is defined and not res.cloud_init_data_facts.status.v1.stage"
- retries: 50
- delay: 5
-'''
-
-RETURN = '''
----
-cloud_init_data_facts:
- description: Facts of result and status.
- returned: success
- type: dict
- sample: '{
- "status": {
- "v1": {
- "datasource": "DataSourceCloudStack",
- "errors": []
- },
- "result": {
- "v1": {
- "datasource": "DataSourceCloudStack",
- "init": {
- "errors": [],
- "finished": 1522066377.0185432,
- "start": 1522066375.2648022
- },
- "init-local": {
- "errors": [],
- "finished": 1522066373.70919,
- "start": 1522066373.4726632
- },
- "modules-config": {
- "errors": [],
- "finished": 1522066380.9097016,
- "start": 1522066379.0011985
- },
- "modules-final": {
- "errors": [],
- "finished": 1522066383.56594,
- "start": 1522066382.3449218
- },
- "stage": null
- }
- }'
-'''
-
-import os
-
-from ansible.module_utils.basic import AnsibleModule
-from ansible.module_utils._text import to_text
-
-
-CLOUD_INIT_PATH = "/var/lib/cloud/data/"
-
-
-def gather_cloud_init_data_facts(module):
- res = {
- 'cloud_init_data_facts': dict()
- }
-
- for i in ['result', 'status']:
- filter = module.params.get('filter')
- if filter is None or filter == i:
- res['cloud_init_data_facts'][i] = dict()
- json_file = CLOUD_INIT_PATH + i + '.json'
-
- if os.path.exists(json_file):
- f = open(json_file, 'rb')
- contents = to_text(f.read(), errors='surrogate_or_strict')
- f.close()
-
- if contents:
- res['cloud_init_data_facts'][i] = module.from_json(contents)
- return res
-
-
-def main():
- module = AnsibleModule(
- argument_spec=dict(
- filter=dict(choices=['result', 'status']),
- ),
- supports_check_mode=True,
- )
-
- facts = gather_cloud_init_data_facts(module)
- result = dict(changed=False, ansible_facts=facts, **facts)
- module.exit_json(**result)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/deploy_helper.py b/test/support/integration/plugins/modules/deploy_helper.py
deleted file mode 100644
index 38594dde..00000000
--- a/test/support/integration/plugins/modules/deploy_helper.py
+++ /dev/null
@@ -1,521 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# (c) 2014, Jasper N. Brouwer <jasper@nerdsweide.nl>
-# (c) 2014, Ramon de la Fuente <ramon@delafuente.nl>
-#
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
-
-
-DOCUMENTATION = '''
----
-module: deploy_helper
-version_added: "2.0"
-author: "Ramon de la Fuente (@ramondelafuente)"
-short_description: Manages some of the steps common in deploying projects.
-description:
- - The Deploy Helper manages some of the steps common in deploying software.
- It creates a folder structure, manages a symlink for the current release
- and cleans up old releases.
- - "Running it with the C(state=query) or C(state=present) will return the C(deploy_helper) fact.
- C(project_path), whatever you set in the path parameter,
- C(current_path), the path to the symlink that points to the active release,
- C(releases_path), the path to the folder to keep releases in,
- C(shared_path), the path to the folder to keep shared resources in,
- C(unfinished_filename), the file to check for to recognize unfinished builds,
- C(previous_release), the release the 'current' symlink is pointing to,
- C(previous_release_path), the full path to the 'current' symlink target,
- C(new_release), either the 'release' parameter or a generated timestamp,
- C(new_release_path), the path to the new release folder (not created by the module)."
-
-options:
- path:
- required: True
- aliases: ['dest']
- description:
- - the root path of the project. Alias I(dest).
- Returned in the C(deploy_helper.project_path) fact.
-
- state:
- description:
- - the state of the project.
- C(query) will only gather facts,
- C(present) will create the project I(root) folder, and in it the I(releases) and I(shared) folders,
- C(finalize) will remove the unfinished_filename file, create a symlink to the newly
- deployed release and optionally clean old releases,
- C(clean) will remove failed & old releases,
- C(absent) will remove the project folder (synonymous to the M(file) module with C(state=absent))
- choices: [ present, finalize, absent, clean, query ]
- default: present
-
- release:
- description:
- - the release version that is being deployed. Defaults to a timestamp format %Y%m%d%H%M%S (i.e. '20141119223359').
- This parameter is optional during C(state=present), but needs to be set explicitly for C(state=finalize).
- You can use the generated fact C(release={{ deploy_helper.new_release }}).
-
- releases_path:
- description:
- - the name of the folder that will hold the releases. This can be relative to C(path) or absolute.
- Returned in the C(deploy_helper.releases_path) fact.
- default: releases
-
- shared_path:
- description:
- - the name of the folder that will hold the shared resources. This can be relative to C(path) or absolute.
- If this is set to an empty string, no shared folder will be created.
- Returned in the C(deploy_helper.shared_path) fact.
- default: shared
-
- current_path:
- description:
- - the name of the symlink that is created when the deploy is finalized. Used in C(finalize) and C(clean).
- Returned in the C(deploy_helper.current_path) fact.
- default: current
-
- unfinished_filename:
- description:
- - the name of the file that indicates a deploy has not finished. All folders in the releases_path that
- contain this file will be deleted on C(state=finalize) with clean=True, or C(state=clean). This file is
- automatically deleted from the I(new_release_path) during C(state=finalize).
- default: DEPLOY_UNFINISHED
-
- clean:
- description:
- - Whether to run the clean procedure in case of C(state=finalize).
- type: bool
- default: 'yes'
-
- keep_releases:
- description:
- - the number of old releases to keep when cleaning. Used in C(finalize) and C(clean). Any unfinished builds
- will be deleted first, so only correct releases will count. The current version will not count.
- default: 5
-
-notes:
- - Facts are only returned for C(state=query) and C(state=present). If you use both, you should pass any overridden
- parameters to both calls, otherwise the second call will overwrite the facts of the first one.
- - When using C(state=clean), the releases are ordered by I(creation date). You should be able to switch to a
- new naming strategy without problems.
- - Because of the default behaviour of generating the I(new_release) fact, this module will not be idempotent
- unless you pass your own release name with C(release). Due to the nature of deploying software, this should not
- be much of a problem.
-'''
-
-EXAMPLES = '''
-
-# General explanation, starting with an example folder structure for a project:
-
-# root:
-# releases:
-# - 20140415234508
-# - 20140415235146
-# - 20140416082818
-#
-# shared:
-# - sessions
-# - uploads
-#
-# current: releases/20140416082818
-
-
-# The 'releases' folder holds all the available releases. A release is a complete build of the application being
-# deployed. This can be a clone of a repository for example, or a sync of a local folder on your filesystem.
-# Having timestamped folders is one way of having distinct releases, but you could choose your own strategy like
-# git tags or commit hashes.
-#
-# During a deploy, a new folder should be created in the releases folder and any build steps required should be
-# performed. Once the new build is ready, the deploy procedure is 'finalized' by replacing the 'current' symlink
-# with a link to this build.
-#
-# The 'shared' folder holds any resource that is shared between releases. Examples of this are web-server
-# session files, or files uploaded by users of your application. It's quite common to have symlinks from a release
-# folder pointing to a shared/subfolder, and creating these links would be automated as part of the build steps.
-#
-# The 'current' symlink points to one of the releases. Probably the latest one, unless a deploy is in progress.
-# The web-server's root for the project will go through this symlink, so the 'downtime' when switching to a new
-# release is reduced to the time it takes to switch the link.
-#
-# To distinguish between successful builds and unfinished ones, a file can be placed in the folder of the release
-# that is currently in progress. The existence of this file will mark it as unfinished, and allow an automated
-# procedure to remove it during cleanup.
-
-
-# Typical usage
-- name: Initialize the deploy root and gather facts
- deploy_helper:
- path: /path/to/root
-- name: Clone the project to the new release folder
- git:
- repo: git://foosball.example.org/path/to/repo.git
- dest: '{{ deploy_helper.new_release_path }}'
- version: v1.1.1
-- name: Add an unfinished file, to allow cleanup on successful finalize
- file:
- path: '{{ deploy_helper.new_release_path }}/{{ deploy_helper.unfinished_filename }}'
- state: touch
-- name: Perform some build steps, like running your dependency manager for example
- composer:
- command: install
- working_dir: '{{ deploy_helper.new_release_path }}'
-- name: Create some folders in the shared folder
- file:
- path: '{{ deploy_helper.shared_path }}/{{ item }}'
- state: directory
- with_items:
- - sessions
- - uploads
-- name: Add symlinks from the new release to the shared folder
- file:
- path: '{{ deploy_helper.new_release_path }}/{{ item.path }}'
- src: '{{ deploy_helper.shared_path }}/{{ item.src }}'
- state: link
- with_items:
- - path: app/sessions
- src: sessions
- - path: web/uploads
- src: uploads
-- name: Finalize the deploy, removing the unfinished file and switching the symlink
- deploy_helper:
- path: /path/to/root
- release: '{{ deploy_helper.new_release }}'
- state: finalize
-
-# Retrieving facts before running a deploy
-- name: Run 'state=query' to gather facts without changing anything
- deploy_helper:
- path: /path/to/root
- state: query
-# Remember to set the 'release' parameter when you actually call 'state=present' later
-- name: Initialize the deploy root
- deploy_helper:
- path: /path/to/root
- release: '{{ deploy_helper.new_release }}'
- state: present
-
-# all paths can be absolute or relative (to the 'path' parameter)
-- deploy_helper:
- path: /path/to/root
- releases_path: /var/www/project/releases
- shared_path: /var/www/shared
- current_path: /var/www/active
-
-# Using your own naming strategy for releases (a version tag in this case):
-- deploy_helper:
- path: /path/to/root
- release: v1.1.1
- state: present
-- deploy_helper:
- path: /path/to/root
- release: '{{ deploy_helper.new_release }}'
- state: finalize
-
-# Using a different unfinished_filename:
-- deploy_helper:
- path: /path/to/root
- unfinished_filename: README.md
- release: '{{ deploy_helper.new_release }}'
- state: finalize
-
-# Postponing the cleanup of older builds:
-- deploy_helper:
- path: /path/to/root
- release: '{{ deploy_helper.new_release }}'
- state: finalize
- clean: False
-- deploy_helper:
- path: /path/to/root
- state: clean
-# Or running the cleanup ahead of the new deploy
-- deploy_helper:
- path: /path/to/root
- state: clean
-- deploy_helper:
- path: /path/to/root
- state: present
-
-# Keeping more old releases:
-- deploy_helper:
- path: /path/to/root
- release: '{{ deploy_helper.new_release }}'
- state: finalize
- keep_releases: 10
-# Or, if you use 'clean=false' on finalize:
-- deploy_helper:
- path: /path/to/root
- state: clean
- keep_releases: 10
-
-# Removing the entire project root folder
-- deploy_helper:
- path: /path/to/root
- state: absent
-
-# Debugging the facts returned by the module
-- deploy_helper:
- path: /path/to/root
-- debug:
- var: deploy_helper
-'''
-import os
-import shutil
-import time
-import traceback
-
-from ansible.module_utils.basic import AnsibleModule
-from ansible.module_utils._text import to_native
-
-
-class DeployHelper(object):
-
- def __init__(self, module):
- self.module = module
- self.file_args = module.load_file_common_arguments(module.params)
-
- self.clean = module.params['clean']
- self.current_path = module.params['current_path']
- self.keep_releases = module.params['keep_releases']
- self.path = module.params['path']
- self.release = module.params['release']
- self.releases_path = module.params['releases_path']
- self.shared_path = module.params['shared_path']
- self.state = module.params['state']
- self.unfinished_filename = module.params['unfinished_filename']
-
- def gather_facts(self):
- current_path = os.path.join(self.path, self.current_path)
- releases_path = os.path.join(self.path, self.releases_path)
- if self.shared_path:
- shared_path = os.path.join(self.path, self.shared_path)
- else:
- shared_path = None
-
- previous_release, previous_release_path = self._get_last_release(current_path)
-
- if not self.release and (self.state == 'query' or self.state == 'present'):
- self.release = time.strftime("%Y%m%d%H%M%S")
-
- if self.release:
- new_release_path = os.path.join(releases_path, self.release)
- else:
- new_release_path = None
-
- return {
- 'project_path': self.path,
- 'current_path': current_path,
- 'releases_path': releases_path,
- 'shared_path': shared_path,
- 'previous_release': previous_release,
- 'previous_release_path': previous_release_path,
- 'new_release': self.release,
- 'new_release_path': new_release_path,
- 'unfinished_filename': self.unfinished_filename
- }
-
- def delete_path(self, path):
- if not os.path.lexists(path):
- return False
-
- if not os.path.isdir(path):
- self.module.fail_json(msg="%s exists but is not a directory" % path)
-
- if not self.module.check_mode:
- try:
- shutil.rmtree(path, ignore_errors=False)
- except Exception as e:
- self.module.fail_json(msg="rmtree failed: %s" % to_native(e), exception=traceback.format_exc())
-
- return True
-
- def create_path(self, path):
- changed = False
-
- if not os.path.lexists(path):
- changed = True
- if not self.module.check_mode:
- os.makedirs(path)
-
- elif not os.path.isdir(path):
- self.module.fail_json(msg="%s exists but is not a directory" % path)
-
- changed += self.module.set_directory_attributes_if_different(self._get_file_args(path), changed)
-
- return changed
-
- def check_link(self, path):
- if os.path.lexists(path):
- if not os.path.islink(path):
- self.module.fail_json(msg="%s exists but is not a symbolic link" % path)
-
- def create_link(self, source, link_name):
- changed = False
-
- if os.path.islink(link_name):
- norm_link = os.path.normpath(os.path.realpath(link_name))
- norm_source = os.path.normpath(os.path.realpath(source))
- if norm_link == norm_source:
- changed = False
- else:
- changed = True
- if not self.module.check_mode:
- if not os.path.lexists(source):
- self.module.fail_json(msg="the symlink target %s doesn't exists" % source)
- tmp_link_name = link_name + '.' + self.unfinished_filename
- if os.path.islink(tmp_link_name):
- os.unlink(tmp_link_name)
- os.symlink(source, tmp_link_name)
- os.rename(tmp_link_name, link_name)
- else:
- changed = True
- if not self.module.check_mode:
- os.symlink(source, link_name)
-
- return changed
-
- def remove_unfinished_file(self, new_release_path):
- changed = False
- unfinished_file_path = os.path.join(new_release_path, self.unfinished_filename)
- if os.path.lexists(unfinished_file_path):
- changed = True
- if not self.module.check_mode:
- os.remove(unfinished_file_path)
-
- return changed
-
- def remove_unfinished_builds(self, releases_path):
- changes = 0
-
- for release in os.listdir(releases_path):
- if os.path.isfile(os.path.join(releases_path, release, self.unfinished_filename)):
- if self.module.check_mode:
- changes += 1
- else:
- changes += self.delete_path(os.path.join(releases_path, release))
-
- return changes
-
- def remove_unfinished_link(self, path):
- changed = False
-
- tmp_link_name = os.path.join(path, self.release + '.' + self.unfinished_filename)
- if not self.module.check_mode and os.path.exists(tmp_link_name):
- changed = True
- os.remove(tmp_link_name)
-
- return changed
-
- def cleanup(self, releases_path, reserve_version):
- changes = 0
-
- if os.path.lexists(releases_path):
- releases = [f for f in os.listdir(releases_path) if os.path.isdir(os.path.join(releases_path, f))]
- try:
- releases.remove(reserve_version)
- except ValueError:
- pass
-
- if not self.module.check_mode:
- releases.sort(key=lambda x: os.path.getctime(os.path.join(releases_path, x)), reverse=True)
- for release in releases[self.keep_releases:]:
- changes += self.delete_path(os.path.join(releases_path, release))
- elif len(releases) > self.keep_releases:
- changes += (len(releases) - self.keep_releases)
-
- return changes
-
- def _get_file_args(self, path):
- file_args = self.file_args.copy()
- file_args['path'] = path
- return file_args
-
- def _get_last_release(self, current_path):
- previous_release = None
- previous_release_path = None
-
- if os.path.lexists(current_path):
- previous_release_path = os.path.realpath(current_path)
- previous_release = os.path.basename(previous_release_path)
-
- return previous_release, previous_release_path
-
-
-def main():
-
- module = AnsibleModule(
- argument_spec=dict(
- path=dict(aliases=['dest'], required=True, type='path'),
- release=dict(required=False, type='str', default=None),
- releases_path=dict(required=False, type='str', default='releases'),
- shared_path=dict(required=False, type='path', default='shared'),
- current_path=dict(required=False, type='path', default='current'),
- keep_releases=dict(required=False, type='int', default=5),
- clean=dict(required=False, type='bool', default=True),
- unfinished_filename=dict(required=False, type='str', default='DEPLOY_UNFINISHED'),
- state=dict(required=False, choices=['present', 'absent', 'clean', 'finalize', 'query'], default='present')
- ),
- add_file_common_args=True,
- supports_check_mode=True
- )
-
- deploy_helper = DeployHelper(module)
- facts = deploy_helper.gather_facts()
-
- result = {
- 'state': deploy_helper.state
- }
-
- changes = 0
-
- if deploy_helper.state == 'query':
- result['ansible_facts'] = {'deploy_helper': facts}
-
- elif deploy_helper.state == 'present':
- deploy_helper.check_link(facts['current_path'])
- changes += deploy_helper.create_path(facts['project_path'])
- changes += deploy_helper.create_path(facts['releases_path'])
- if deploy_helper.shared_path:
- changes += deploy_helper.create_path(facts['shared_path'])
-
- result['ansible_facts'] = {'deploy_helper': facts}
-
- elif deploy_helper.state == 'finalize':
- if not deploy_helper.release:
- module.fail_json(msg="'release' is a required parameter for state=finalize (try the 'deploy_helper.new_release' fact)")
- if deploy_helper.keep_releases <= 0:
- module.fail_json(msg="'keep_releases' should be at least 1")
-
- changes += deploy_helper.remove_unfinished_file(facts['new_release_path'])
- changes += deploy_helper.create_link(facts['new_release_path'], facts['current_path'])
- if deploy_helper.clean:
- changes += deploy_helper.remove_unfinished_link(facts['project_path'])
- changes += deploy_helper.remove_unfinished_builds(facts['releases_path'])
- changes += deploy_helper.cleanup(facts['releases_path'], facts['new_release'])
-
- elif deploy_helper.state == 'clean':
- changes += deploy_helper.remove_unfinished_link(facts['project_path'])
- changes += deploy_helper.remove_unfinished_builds(facts['releases_path'])
- changes += deploy_helper.cleanup(facts['releases_path'], facts['new_release'])
-
- elif deploy_helper.state == 'absent':
- # destroy the facts
- result['ansible_facts'] = {'deploy_helper': []}
- changes += deploy_helper.delete_path(facts['project_path'])
-
- if changes > 0:
- result['changed'] = True
- else:
- result['changed'] = False
-
- module.exit_json(**result)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/ec2_ami_info.py b/test/support/integration/plugins/modules/ec2_ami_info.py
index 53c2374d..26f86946 100644
--- a/test/support/integration/plugins/modules/ec2_ami_info.py
+++ b/test/support/integration/plugins/modules/ec2_ami_info.py
@@ -269,9 +269,6 @@ def main():
)
module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True)
- if module._module._name == 'ec2_ami_facts':
- module._module.deprecate("The 'ec2_ami_facts' module has been renamed to 'ec2_ami_info'",
- version='2.13', collection_name='ansible.builtin')
ec2_client = module.client('ec2')
diff --git a/test/support/integration/plugins/modules/locale_gen.py b/test/support/integration/plugins/modules/locale_gen.py
deleted file mode 100644
index 4968b834..00000000
--- a/test/support/integration/plugins/modules/locale_gen.py
+++ /dev/null
@@ -1,237 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
-
-DOCUMENTATION = '''
----
-module: locale_gen
-short_description: Creates or removes locales
-description:
- - Manages locales by editing /etc/locale.gen and invoking locale-gen.
-version_added: "1.6"
-author:
-- Augustus Kling (@AugustusKling)
-options:
- name:
- description:
- - Name and encoding of the locale, such as "en_GB.UTF-8".
- required: true
- state:
- description:
- - Whether the locale shall be present.
- choices: [ absent, present ]
- default: present
-'''
-
-EXAMPLES = '''
-- name: Ensure a locale exists
- locale_gen:
- name: de_CH.UTF-8
- state: present
-'''
-
-import os
-import re
-from subprocess import Popen, PIPE, call
-
-from ansible.module_utils.basic import AnsibleModule
-from ansible.module_utils._text import to_native
-
-LOCALE_NORMALIZATION = {
- ".utf8": ".UTF-8",
- ".eucjp": ".EUC-JP",
- ".iso885915": ".ISO-8859-15",
- ".cp1251": ".CP1251",
- ".koi8r": ".KOI8-R",
- ".armscii8": ".ARMSCII-8",
- ".euckr": ".EUC-KR",
- ".gbk": ".GBK",
- ".gb18030": ".GB18030",
- ".euctw": ".EUC-TW",
-}
-
-
-# ===========================================
-# location module specific support methods.
-#
-
-def is_available(name, ubuntuMode):
- """Check if the given locale is available on the system. This is done by
- checking either :
- * if the locale is present in /etc/locales.gen
- * or if the locale is present in /usr/share/i18n/SUPPORTED"""
- if ubuntuMode:
- __regexp = r'^(?P<locale>\S+_\S+) (?P<charset>\S+)\s*$'
- __locales_available = '/usr/share/i18n/SUPPORTED'
- else:
- __regexp = r'^#{0,1}\s*(?P<locale>\S+_\S+) (?P<charset>\S+)\s*$'
- __locales_available = '/etc/locale.gen'
-
- re_compiled = re.compile(__regexp)
- fd = open(__locales_available, 'r')
- for line in fd:
- result = re_compiled.match(line)
- if result and result.group('locale') == name:
- return True
- fd.close()
- return False
-
-
-def is_present(name):
- """Checks if the given locale is currently installed."""
- output = Popen(["locale", "-a"], stdout=PIPE).communicate()[0]
- output = to_native(output)
- return any(fix_case(name) == fix_case(line) for line in output.splitlines())
-
-
-def fix_case(name):
- """locale -a might return the encoding in either lower or upper case.
- Passing through this function makes them uniform for comparisons."""
- for s, r in LOCALE_NORMALIZATION.items():
- name = name.replace(s, r)
- return name
-
-
-def replace_line(existing_line, new_line):
- """Replaces lines in /etc/locale.gen"""
- try:
- f = open("/etc/locale.gen", "r")
- lines = [line.replace(existing_line, new_line) for line in f]
- finally:
- f.close()
- try:
- f = open("/etc/locale.gen", "w")
- f.write("".join(lines))
- finally:
- f.close()
-
-
-def set_locale(name, enabled=True):
- """ Sets the state of the locale. Defaults to enabled. """
- search_string = r'#{0,1}\s*%s (?P<charset>.+)' % name
- if enabled:
- new_string = r'%s \g<charset>' % (name)
- else:
- new_string = r'# %s \g<charset>' % (name)
- try:
- f = open("/etc/locale.gen", "r")
- lines = [re.sub(search_string, new_string, line) for line in f]
- finally:
- f.close()
- try:
- f = open("/etc/locale.gen", "w")
- f.write("".join(lines))
- finally:
- f.close()
-
-
-def apply_change(targetState, name):
- """Create or remove locale.
-
- Keyword arguments:
- targetState -- Desired state, either present or absent.
- name -- Name including encoding such as de_CH.UTF-8.
- """
- if targetState == "present":
- # Create locale.
- set_locale(name, enabled=True)
- else:
- # Delete locale.
- set_locale(name, enabled=False)
-
- localeGenExitValue = call("locale-gen")
- if localeGenExitValue != 0:
- raise EnvironmentError(localeGenExitValue, "locale.gen failed to execute, it returned " + str(localeGenExitValue))
-
-
-def apply_change_ubuntu(targetState, name):
- """Create or remove locale.
-
- Keyword arguments:
- targetState -- Desired state, either present or absent.
- name -- Name including encoding such as de_CH.UTF-8.
- """
- if targetState == "present":
- # Create locale.
- # Ubuntu's patched locale-gen automatically adds the new locale to /var/lib/locales/supported.d/local
- localeGenExitValue = call(["locale-gen", name])
- else:
- # Delete locale involves discarding the locale from /var/lib/locales/supported.d/local and regenerating all locales.
- try:
- f = open("/var/lib/locales/supported.d/local", "r")
- content = f.readlines()
- finally:
- f.close()
- try:
- f = open("/var/lib/locales/supported.d/local", "w")
- for line in content:
- locale, charset = line.split(' ')
- if locale != name:
- f.write(line)
- finally:
- f.close()
- # Purge locales and regenerate.
- # Please provide a patch if you know how to avoid regenerating the locales to keep!
- localeGenExitValue = call(["locale-gen", "--purge"])
-
- if localeGenExitValue != 0:
- raise EnvironmentError(localeGenExitValue, "locale.gen failed to execute, it returned " + str(localeGenExitValue))
-
-
-def main():
- module = AnsibleModule(
- argument_spec=dict(
- name=dict(type='str', required=True),
- state=dict(type='str', default='present', choices=['absent', 'present']),
- ),
- supports_check_mode=True,
- )
-
- name = module.params['name']
- state = module.params['state']
-
- if not os.path.exists("/etc/locale.gen"):
- if os.path.exists("/var/lib/locales/supported.d/"):
- # Ubuntu created its own system to manage locales.
- ubuntuMode = True
- else:
- module.fail_json(msg="/etc/locale.gen and /var/lib/locales/supported.d/local are missing. Is the package \"locales\" installed?")
- else:
- # We found the common way to manage locales.
- ubuntuMode = False
-
- if not is_available(name, ubuntuMode):
- module.fail_json(msg="The locale you've entered is not available "
- "on your system.")
-
- if is_present(name):
- prev_state = "present"
- else:
- prev_state = "absent"
- changed = (prev_state != state)
-
- if module.check_mode:
- module.exit_json(changed=changed)
- else:
- if changed:
- try:
- if ubuntuMode is False:
- apply_change(state, name)
- else:
- apply_change_ubuntu(state, name)
- except EnvironmentError as e:
- module.fail_json(msg=to_native(e), exitValue=e.errno)
-
- module.exit_json(name=name, changed=changed, msg="OK")
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/lvg.py b/test/support/integration/plugins/modules/lvg.py
deleted file mode 100644
index e2035f68..00000000
--- a/test/support/integration/plugins/modules/lvg.py
+++ /dev/null
@@ -1,295 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# Copyright: (c) 2013, Alexander Bulimov <lazywolf0@gmail.com>
-# Based on lvol module by Jeroen Hoekx <jeroen.hoekx@dsquare.be>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
-
-DOCUMENTATION = r'''
----
-author:
-- Alexander Bulimov (@abulimov)
-module: lvg
-short_description: Configure LVM volume groups
-description:
- - This module creates, removes or resizes volume groups.
-version_added: "1.1"
-options:
- vg:
- description:
- - The name of the volume group.
- type: str
- required: true
- pvs:
- description:
- - List of comma-separated devices to use as physical devices in this volume group.
- - Required when creating or resizing volume group.
- - The module will take care of running pvcreate if needed.
- type: list
- pesize:
- description:
- - "The size of the physical extent. I(pesize) must be a power of 2 of at least 1 sector
- (where the sector size is the largest sector size of the PVs currently used in the VG),
- or at least 128KiB."
- - Since Ansible 2.6, pesize can be optionally suffixed by a UNIT (k/K/m/M/g/G), default unit is megabyte.
- type: str
- default: "4"
- pv_options:
- description:
- - Additional options to pass to C(pvcreate) when creating the volume group.
- type: str
- version_added: "2.4"
- vg_options:
- description:
- - Additional options to pass to C(vgcreate) when creating the volume group.
- type: str
- version_added: "1.6"
- state:
- description:
- - Control if the volume group exists.
- type: str
- choices: [ absent, present ]
- default: present
- force:
- description:
- - If C(yes), allows to remove volume group with logical volumes.
- type: bool
- default: no
-seealso:
-- module: filesystem
-- module: lvol
-- module: parted
-notes:
- - This module does not modify PE size for already present volume group.
-'''
-
-EXAMPLES = r'''
-- name: Create a volume group on top of /dev/sda1 with physical extent size = 32MB
- lvg:
- vg: vg.services
- pvs: /dev/sda1
- pesize: 32
-
-- name: Create a volume group on top of /dev/sdb with physical extent size = 128KiB
- lvg:
- vg: vg.services
- pvs: /dev/sdb
- pesize: 128K
-
-# If, for example, we already have VG vg.services on top of /dev/sdb1,
-# this VG will be extended by /dev/sdc5. Or if vg.services was created on
-# top of /dev/sda5, we first extend it with /dev/sdb1 and /dev/sdc5,
-# and then reduce by /dev/sda5.
-- name: Create or resize a volume group on top of /dev/sdb1 and /dev/sdc5.
- lvg:
- vg: vg.services
- pvs: /dev/sdb1,/dev/sdc5
-
-- name: Remove a volume group with name vg.services
- lvg:
- vg: vg.services
- state: absent
-'''
-
-import itertools
-import os
-
-from ansible.module_utils.basic import AnsibleModule
-
-
-def parse_vgs(data):
- vgs = []
- for line in data.splitlines():
- parts = line.strip().split(';')
- vgs.append({
- 'name': parts[0],
- 'pv_count': int(parts[1]),
- 'lv_count': int(parts[2]),
- })
- return vgs
-
-
-def find_mapper_device_name(module, dm_device):
- dmsetup_cmd = module.get_bin_path('dmsetup', True)
- mapper_prefix = '/dev/mapper/'
- rc, dm_name, err = module.run_command("%s info -C --noheadings -o name %s" % (dmsetup_cmd, dm_device))
- if rc != 0:
- module.fail_json(msg="Failed executing dmsetup command.", rc=rc, err=err)
- mapper_device = mapper_prefix + dm_name.rstrip()
- return mapper_device
-
-
-def parse_pvs(module, data):
- pvs = []
- dm_prefix = '/dev/dm-'
- for line in data.splitlines():
- parts = line.strip().split(';')
- if parts[0].startswith(dm_prefix):
- parts[0] = find_mapper_device_name(module, parts[0])
- pvs.append({
- 'name': parts[0],
- 'vg_name': parts[1],
- })
- return pvs
-
-
-def main():
- module = AnsibleModule(
- argument_spec=dict(
- vg=dict(type='str', required=True),
- pvs=dict(type='list'),
- pesize=dict(type='str', default='4'),
- pv_options=dict(type='str', default=''),
- vg_options=dict(type='str', default=''),
- state=dict(type='str', default='present', choices=['absent', 'present']),
- force=dict(type='bool', default=False),
- ),
- supports_check_mode=True,
- )
-
- vg = module.params['vg']
- state = module.params['state']
- force = module.boolean(module.params['force'])
- pesize = module.params['pesize']
- pvoptions = module.params['pv_options'].split()
- vgoptions = module.params['vg_options'].split()
-
- dev_list = []
- if module.params['pvs']:
- dev_list = list(module.params['pvs'])
- elif state == 'present':
- module.fail_json(msg="No physical volumes given.")
-
- # LVM always uses real paths not symlinks so replace symlinks with actual path
- for idx, dev in enumerate(dev_list):
- dev_list[idx] = os.path.realpath(dev)
-
- if state == 'present':
- # check given devices
- for test_dev in dev_list:
- if not os.path.exists(test_dev):
- module.fail_json(msg="Device %s not found." % test_dev)
-
- # get pv list
- pvs_cmd = module.get_bin_path('pvs', True)
- if dev_list:
- pvs_filter_pv_name = ' || '.join(
- 'pv_name = {0}'.format(x)
- for x in itertools.chain(dev_list, module.params['pvs'])
- )
- pvs_filter_vg_name = 'vg_name = {0}'.format(vg)
- pvs_filter = "--select '{0} || {1}' ".format(pvs_filter_pv_name, pvs_filter_vg_name)
- else:
- pvs_filter = ''
- rc, current_pvs, err = module.run_command("%s --noheadings -o pv_name,vg_name --separator ';' %s" % (pvs_cmd, pvs_filter))
- if rc != 0:
- module.fail_json(msg="Failed executing pvs command.", rc=rc, err=err)
-
- # check pv for devices
- pvs = parse_pvs(module, current_pvs)
- used_pvs = [pv for pv in pvs if pv['name'] in dev_list and pv['vg_name'] and pv['vg_name'] != vg]
- if used_pvs:
- module.fail_json(msg="Device %s is already in %s volume group." % (used_pvs[0]['name'], used_pvs[0]['vg_name']))
-
- vgs_cmd = module.get_bin_path('vgs', True)
- rc, current_vgs, err = module.run_command("%s --noheadings -o vg_name,pv_count,lv_count --separator ';'" % vgs_cmd)
-
- if rc != 0:
- module.fail_json(msg="Failed executing vgs command.", rc=rc, err=err)
-
- changed = False
-
- vgs = parse_vgs(current_vgs)
-
- for test_vg in vgs:
- if test_vg['name'] == vg:
- this_vg = test_vg
- break
- else:
- this_vg = None
-
- if this_vg is None:
- if state == 'present':
- # create VG
- if module.check_mode:
- changed = True
- else:
- # create PV
- pvcreate_cmd = module.get_bin_path('pvcreate', True)
- for current_dev in dev_list:
- rc, _, err = module.run_command([pvcreate_cmd] + pvoptions + ['-f', str(current_dev)])
- if rc == 0:
- changed = True
- else:
- module.fail_json(msg="Creating physical volume '%s' failed" % current_dev, rc=rc, err=err)
- vgcreate_cmd = module.get_bin_path('vgcreate')
- rc, _, err = module.run_command([vgcreate_cmd] + vgoptions + ['-s', pesize, vg] + dev_list)
- if rc == 0:
- changed = True
- else:
- module.fail_json(msg="Creating volume group '%s' failed" % vg, rc=rc, err=err)
- else:
- if state == 'absent':
- if module.check_mode:
- module.exit_json(changed=True)
- else:
- if this_vg['lv_count'] == 0 or force:
- # remove VG
- vgremove_cmd = module.get_bin_path('vgremove', True)
- rc, _, err = module.run_command("%s --force %s" % (vgremove_cmd, vg))
- if rc == 0:
- module.exit_json(changed=True)
- else:
- module.fail_json(msg="Failed to remove volume group %s" % (vg), rc=rc, err=err)
- else:
- module.fail_json(msg="Refuse to remove non-empty volume group %s without force=yes" % (vg))
-
- # resize VG
- current_devs = [os.path.realpath(pv['name']) for pv in pvs if pv['vg_name'] == vg]
- devs_to_remove = list(set(current_devs) - set(dev_list))
- devs_to_add = list(set(dev_list) - set(current_devs))
-
- if devs_to_add or devs_to_remove:
- if module.check_mode:
- changed = True
- else:
- if devs_to_add:
- devs_to_add_string = ' '.join(devs_to_add)
- # create PV
- pvcreate_cmd = module.get_bin_path('pvcreate', True)
- for current_dev in devs_to_add:
- rc, _, err = module.run_command([pvcreate_cmd] + pvoptions + ['-f', str(current_dev)])
- if rc == 0:
- changed = True
- else:
- module.fail_json(msg="Creating physical volume '%s' failed" % current_dev, rc=rc, err=err)
- # add PV to our VG
- vgextend_cmd = module.get_bin_path('vgextend', True)
- rc, _, err = module.run_command("%s %s %s" % (vgextend_cmd, vg, devs_to_add_string))
- if rc == 0:
- changed = True
- else:
- module.fail_json(msg="Unable to extend %s by %s." % (vg, devs_to_add_string), rc=rc, err=err)
-
- # remove some PV from our VG
- if devs_to_remove:
- devs_to_remove_string = ' '.join(devs_to_remove)
- vgreduce_cmd = module.get_bin_path('vgreduce', True)
- rc, _, err = module.run_command("%s --force %s %s" % (vgreduce_cmd, vg, devs_to_remove_string))
- if rc == 0:
- changed = True
- else:
- module.fail_json(msg="Unable to reduce %s by %s." % (vg, devs_to_remove_string), rc=rc, err=err)
-
- module.exit_json(changed=changed)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/mongodb_parameter.py b/test/support/integration/plugins/modules/mongodb_parameter.py
deleted file mode 100644
index 05de42b2..00000000
--- a/test/support/integration/plugins/modules/mongodb_parameter.py
+++ /dev/null
@@ -1,223 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# (c) 2016, Loic Blot <loic.blot@unix-experience.fr>
-# Sponsored by Infopro Digital. http://www.infopro-digital.com/
-# Sponsored by E.T.A.I. http://www.etai.fr/
-#
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
-
-
-DOCUMENTATION = r'''
----
-module: mongodb_parameter
-short_description: Change an administrative parameter on a MongoDB server
-description:
- - Change an administrative parameter on a MongoDB server.
-version_added: "2.1"
-options:
- login_user:
- description:
- - The MongoDB username used to authenticate with.
- type: str
- login_password:
- description:
- - The login user's password used to authenticate with.
- type: str
- login_host:
- description:
- - The host running the database.
- type: str
- default: localhost
- login_port:
- description:
- - The MongoDB port to connect to.
- default: 27017
- type: int
- login_database:
- description:
- - The database where login credentials are stored.
- type: str
- replica_set:
- description:
- - Replica set to connect to (automatically connects to primary for writes).
- type: str
- ssl:
- description:
- - Whether to use an SSL connection when connecting to the database.
- type: bool
- default: no
- param:
- description:
- - MongoDB administrative parameter to modify.
- type: str
- required: true
- value:
- description:
- - MongoDB administrative parameter value to set.
- type: str
- required: true
- param_type:
- description:
- - Define the type of parameter value.
- default: str
- type: str
- choices: [int, str]
-
-notes:
- - Requires the pymongo Python package on the remote host, version 2.4.2+.
- - This can be installed using pip or the OS package manager.
- - See also U(http://api.mongodb.org/python/current/installation.html)
-requirements: [ "pymongo" ]
-author: "Loic Blot (@nerzhul)"
-'''
-
-EXAMPLES = r'''
-- name: Set MongoDB syncdelay to 60 (this is an int)
- mongodb_parameter:
- param: syncdelay
- value: 60
- param_type: int
-'''
-
-RETURN = r'''
-before:
- description: value before modification
- returned: success
- type: str
-after:
- description: value after modification
- returned: success
- type: str
-'''
-
-import os
-import traceback
-
-try:
- from pymongo.errors import ConnectionFailure
- from pymongo.errors import OperationFailure
- from pymongo import version as PyMongoVersion
- from pymongo import MongoClient
-except ImportError:
- try: # for older PyMongo 2.2
- from pymongo import Connection as MongoClient
- except ImportError:
- pymongo_found = False
- else:
- pymongo_found = True
-else:
- pymongo_found = True
-
-from ansible.module_utils.basic import AnsibleModule, missing_required_lib
-from ansible.module_utils.six.moves import configparser
-from ansible.module_utils._text import to_native
-
-
-# =========================================
-# MongoDB module specific support methods.
-#
-
-def load_mongocnf():
- config = configparser.RawConfigParser()
- mongocnf = os.path.expanduser('~/.mongodb.cnf')
-
- try:
- config.readfp(open(mongocnf))
- creds = dict(
- user=config.get('client', 'user'),
- password=config.get('client', 'pass')
- )
- except (configparser.NoOptionError, IOError):
- return False
-
- return creds
-
-
-# =========================================
-# Module execution.
-#
-
-def main():
- module = AnsibleModule(
- argument_spec=dict(
- login_user=dict(default=None),
- login_password=dict(default=None, no_log=True),
- login_host=dict(default='localhost'),
- login_port=dict(default=27017, type='int'),
- login_database=dict(default=None),
- replica_set=dict(default=None),
- param=dict(required=True),
- value=dict(required=True),
- param_type=dict(default="str", choices=['str', 'int']),
- ssl=dict(default=False, type='bool'),
- )
- )
-
- if not pymongo_found:
- module.fail_json(msg=missing_required_lib('pymongo'))
-
- login_user = module.params['login_user']
- login_password = module.params['login_password']
- login_host = module.params['login_host']
- login_port = module.params['login_port']
- login_database = module.params['login_database']
-
- replica_set = module.params['replica_set']
- ssl = module.params['ssl']
-
- param = module.params['param']
- param_type = module.params['param_type']
- value = module.params['value']
-
- # Verify parameter is coherent with specified type
- try:
- if param_type == 'int':
- value = int(value)
- except ValueError:
- module.fail_json(msg="value '%s' is not %s" % (value, param_type))
-
- try:
- if replica_set:
- client = MongoClient(login_host, int(login_port), replicaset=replica_set, ssl=ssl)
- else:
- client = MongoClient(login_host, int(login_port), ssl=ssl)
-
- if login_user is None and login_password is None:
- mongocnf_creds = load_mongocnf()
- if mongocnf_creds is not False:
- login_user = mongocnf_creds['user']
- login_password = mongocnf_creds['password']
- elif login_password is None or login_user is None:
- module.fail_json(msg='when supplying login arguments, both login_user and login_password must be provided')
-
- if login_user is not None and login_password is not None:
- client.admin.authenticate(login_user, login_password, source=login_database)
-
- except ConnectionFailure as e:
- module.fail_json(msg='unable to connect to database: %s' % to_native(e), exception=traceback.format_exc())
-
- db = client.admin
-
- try:
- after_value = db.command("setParameter", **{param: value})
- except OperationFailure as e:
- module.fail_json(msg="unable to change parameter: %s" % to_native(e), exception=traceback.format_exc())
-
- if "was" not in after_value:
- module.exit_json(changed=True, msg="Unable to determine old value, assume it changed.")
- else:
- module.exit_json(changed=(value != after_value["was"]), before=after_value["was"],
- after=value)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/mongodb_user.py b/test/support/integration/plugins/modules/mongodb_user.py
deleted file mode 100644
index 7a18b159..00000000
--- a/test/support/integration/plugins/modules/mongodb_user.py
+++ /dev/null
@@ -1,474 +0,0 @@
-#!/usr/bin/python
-
-# (c) 2012, Elliott Foster <elliott@fourkitchens.com>
-# Sponsored by Four Kitchens http://fourkitchens.com.
-# (c) 2014, Epic Games, Inc.
-#
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
-
-
-DOCUMENTATION = '''
----
-module: mongodb_user
-short_description: Adds or removes a user from a MongoDB database
-description:
- - Adds or removes a user from a MongoDB database.
-version_added: "1.1"
-options:
- login_user:
- description:
- - The MongoDB username used to authenticate with.
- type: str
- login_password:
- description:
- - The login user's password used to authenticate with.
- type: str
- login_host:
- description:
- - The host running the database.
- default: localhost
- type: str
- login_port:
- description:
- - The MongoDB port to connect to.
- default: '27017'
- type: str
- login_database:
- version_added: "2.0"
- description:
- - The database where login credentials are stored.
- type: str
- replica_set:
- version_added: "1.6"
- description:
- - Replica set to connect to (automatically connects to primary for writes).
- type: str
- database:
- description:
- - The name of the database to add/remove the user from.
- required: true
- type: str
- aliases: [db]
- name:
- description:
- - The name of the user to add or remove.
- required: true
- aliases: [user]
- type: str
- password:
- description:
- - The password to use for the user.
- type: str
- aliases: [pass]
- ssl:
- version_added: "1.8"
- description:
- - Whether to use an SSL connection when connecting to the database.
- type: bool
- ssl_cert_reqs:
- version_added: "2.2"
- description:
- - Specifies whether a certificate is required from the other side of the connection,
- and whether it will be validated if provided.
- default: CERT_REQUIRED
- choices: [CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED]
- type: str
- roles:
- version_added: "1.3"
- type: list
- elements: raw
- description:
- - >
- The database user roles valid values could either be one or more of the following strings:
- 'read', 'readWrite', 'dbAdmin', 'userAdmin', 'clusterAdmin', 'readAnyDatabase', 'readWriteAnyDatabase', 'userAdminAnyDatabase',
- 'dbAdminAnyDatabase'
- - "Or the following dictionary '{ db: DATABASE_NAME, role: ROLE_NAME }'."
- - "This param requires pymongo 2.5+. If it is a string, mongodb 2.4+ is also required. If it is a dictionary, mongo 2.6+ is required."
- state:
- description:
- - The database user state.
- default: present
- choices: [absent, present]
- type: str
- update_password:
- default: always
- choices: [always, on_create]
- version_added: "2.1"
- description:
- - C(always) will update passwords if they differ.
- - C(on_create) will only set the password for newly created users.
- type: str
-
-notes:
- - Requires the pymongo Python package on the remote host, version 2.4.2+. This
- can be installed using pip or the OS package manager. @see http://api.mongodb.org/python/current/installation.html
-requirements: [ "pymongo" ]
-author:
- - "Elliott Foster (@elliotttf)"
- - "Julien Thebault (@Lujeni)"
-'''
-
-EXAMPLES = '''
-- name: Create 'burgers' database user with name 'bob' and password '12345'.
- mongodb_user:
- database: burgers
- name: bob
- password: 12345
- state: present
-
-- name: Create a database user via SSL (MongoDB must be compiled with the SSL option and configured properly)
- mongodb_user:
- database: burgers
- name: bob
- password: 12345
- state: present
- ssl: True
-
-- name: Delete 'burgers' database user with name 'bob'.
- mongodb_user:
- database: burgers
- name: bob
- state: absent
-
-- name: Define more users with various specific roles (if not defined, no roles is assigned, and the user will be added via pre mongo 2.2 style)
- mongodb_user:
- database: burgers
- name: ben
- password: 12345
- roles: read
- state: present
-
-- name: Define roles
- mongodb_user:
- database: burgers
- name: jim
- password: 12345
- roles: readWrite,dbAdmin,userAdmin
- state: present
-
-- name: Define roles
- mongodb_user:
- database: burgers
- name: joe
- password: 12345
- roles: readWriteAnyDatabase
- state: present
-
-- name: Add a user to database in a replica set, the primary server is automatically discovered and written to
- mongodb_user:
- database: burgers
- name: bob
- replica_set: belcher
- password: 12345
- roles: readWriteAnyDatabase
- state: present
-
-# add a user 'oplog_reader' with read only access to the 'local' database on the replica_set 'belcher'. This is useful for oplog access (MONGO_OPLOG_URL).
-# please notice the credentials must be added to the 'admin' database because the 'local' database is not synchronized and can't receive user credentials
-# To login with such user, the connection string should be MONGO_OPLOG_URL="mongodb://oplog_reader:oplog_reader_password@server1,server2/local?authSource=admin"
-# This syntax requires mongodb 2.6+ and pymongo 2.5+
-- name: Roles as a dictionary
- mongodb_user:
- login_user: root
- login_password: root_password
- database: admin
- user: oplog_reader
- password: oplog_reader_password
- state: present
- replica_set: belcher
- roles:
- - db: local
- role: read
-
-'''
-
-RETURN = '''
-user:
- description: The name of the user to add or remove.
- returned: success
- type: str
-'''
-
-import os
-import ssl as ssl_lib
-import traceback
-from ansible.module_utils.compat.version import LooseVersion
-from operator import itemgetter
-
-try:
- from pymongo.errors import ConnectionFailure
- from pymongo.errors import OperationFailure
- from pymongo import version as PyMongoVersion
- from pymongo import MongoClient
-except ImportError:
- try: # for older PyMongo 2.2
- from pymongo import Connection as MongoClient
- except ImportError:
- pymongo_found = False
- else:
- pymongo_found = True
-else:
- pymongo_found = True
-
-from ansible.module_utils.basic import AnsibleModule, missing_required_lib
-from ansible.module_utils.six import binary_type, text_type
-from ansible.module_utils.six.moves import configparser
-from ansible.module_utils._text import to_native
-
-
-# =========================================
-# MongoDB module specific support methods.
-#
-
-def check_compatibility(module, client):
- """Check the compatibility between the driver and the database.
-
- See: https://docs.mongodb.com/ecosystem/drivers/driver-compatibility-reference/#python-driver-compatibility
-
- Args:
- module: Ansible module.
- client (cursor): Mongodb cursor on admin database.
- """
- loose_srv_version = LooseVersion(client.server_info()['version'])
- loose_driver_version = LooseVersion(PyMongoVersion)
-
- if loose_srv_version >= LooseVersion('3.2') and loose_driver_version < LooseVersion('3.2'):
- module.fail_json(msg=' (Note: you must use pymongo 3.2+ with MongoDB >= 3.2)')
-
- elif loose_srv_version >= LooseVersion('3.0') and loose_driver_version <= LooseVersion('2.8'):
- module.fail_json(msg=' (Note: you must use pymongo 2.8+ with MongoDB 3.0)')
-
- elif loose_srv_version >= LooseVersion('2.6') and loose_driver_version <= LooseVersion('2.7'):
- module.fail_json(msg=' (Note: you must use pymongo 2.7+ with MongoDB 2.6)')
-
- elif LooseVersion(PyMongoVersion) <= LooseVersion('2.5'):
- module.fail_json(msg=' (Note: you must be on mongodb 2.4+ and pymongo 2.5+ to use the roles param)')
-
-
-def user_find(client, user, db_name):
- """Check if the user exists.
-
- Args:
- client (cursor): Mongodb cursor on admin database.
- user (str): User to check.
- db_name (str): User's database.
-
- Returns:
- dict: when user exists, False otherwise.
- """
- for mongo_user in client["admin"].system.users.find():
- if mongo_user['user'] == user:
- # NOTE: there is no 'db' field in mongo 2.4.
- if 'db' not in mongo_user:
- return mongo_user
-
- if mongo_user["db"] == db_name:
- return mongo_user
- return False
-
-
-def user_add(module, client, db_name, user, password, roles):
- # pymongo's user_add is a _create_or_update_user so we won't know if it was changed or updated
- # without reproducing a lot of the logic in database.py of pymongo
- db = client[db_name]
-
- if roles is None:
- db.add_user(user, password, False)
- else:
- db.add_user(user, password, None, roles=roles)
-
-
-def user_remove(module, client, db_name, user):
- exists = user_find(client, user, db_name)
- if exists:
- if module.check_mode:
- module.exit_json(changed=True, user=user)
- db = client[db_name]
- db.remove_user(user)
- else:
- module.exit_json(changed=False, user=user)
-
-
-def load_mongocnf():
- config = configparser.RawConfigParser()
- mongocnf = os.path.expanduser('~/.mongodb.cnf')
-
- try:
- config.readfp(open(mongocnf))
- creds = dict(
- user=config.get('client', 'user'),
- password=config.get('client', 'pass')
- )
- except (configparser.NoOptionError, IOError):
- return False
-
- return creds
-
-
-def check_if_roles_changed(uinfo, roles, db_name):
- # We must be aware of users which can read the oplog on a replicaset
- # Such users must have access to the local DB, but since this DB does not store users credentials
- # and is not synchronized among replica sets, the user must be stored on the admin db
- # Therefore their structure is the following :
- # {
- # "_id" : "admin.oplog_reader",
- # "user" : "oplog_reader",
- # "db" : "admin", # <-- admin DB
- # "roles" : [
- # {
- # "role" : "read",
- # "db" : "local" # <-- local DB
- # }
- # ]
- # }
-
- def make_sure_roles_are_a_list_of_dict(roles, db_name):
- output = list()
- for role in roles:
- if isinstance(role, (binary_type, text_type)):
- new_role = {"role": role, "db": db_name}
- output.append(new_role)
- else:
- output.append(role)
- return output
-
- roles_as_list_of_dict = make_sure_roles_are_a_list_of_dict(roles, db_name)
- uinfo_roles = uinfo.get('roles', [])
-
- if sorted(roles_as_list_of_dict, key=itemgetter('db')) == sorted(uinfo_roles, key=itemgetter('db')):
- return False
- return True
-
-
-# =========================================
-# Module execution.
-#
-
-def main():
- module = AnsibleModule(
- argument_spec=dict(
- login_user=dict(default=None),
- login_password=dict(default=None, no_log=True),
- login_host=dict(default='localhost'),
- login_port=dict(default='27017'),
- login_database=dict(default=None),
- replica_set=dict(default=None),
- database=dict(required=True, aliases=['db']),
- name=dict(required=True, aliases=['user']),
- password=dict(aliases=['pass'], no_log=True),
- ssl=dict(default=False, type='bool'),
- roles=dict(default=None, type='list', elements='raw'),
- state=dict(default='present', choices=['absent', 'present']),
- update_password=dict(default="always", choices=["always", "on_create"]),
- ssl_cert_reqs=dict(default='CERT_REQUIRED', choices=['CERT_NONE', 'CERT_OPTIONAL', 'CERT_REQUIRED']),
- ),
- supports_check_mode=True
- )
-
- if not pymongo_found:
- module.fail_json(msg=missing_required_lib('pymongo'))
-
- login_user = module.params['login_user']
- login_password = module.params['login_password']
- login_host = module.params['login_host']
- login_port = module.params['login_port']
- login_database = module.params['login_database']
-
- replica_set = module.params['replica_set']
- db_name = module.params['database']
- user = module.params['name']
- password = module.params['password']
- ssl = module.params['ssl']
- roles = module.params['roles'] or []
- state = module.params['state']
- update_password = module.params['update_password']
-
- try:
- connection_params = {
- "host": login_host,
- "port": int(login_port),
- }
-
- if replica_set:
- connection_params["replicaset"] = replica_set
-
- if ssl:
- connection_params["ssl"] = ssl
- connection_params["ssl_cert_reqs"] = getattr(ssl_lib, module.params['ssl_cert_reqs'])
-
- client = MongoClient(**connection_params)
-
- # NOTE: this check must be done ASAP.
- # We doesn't need to be authenticated (this ability has lost in PyMongo 3.6)
- if LooseVersion(PyMongoVersion) <= LooseVersion('3.5'):
- check_compatibility(module, client)
-
- if login_user is None and login_password is None:
- mongocnf_creds = load_mongocnf()
- if mongocnf_creds is not False:
- login_user = mongocnf_creds['user']
- login_password = mongocnf_creds['password']
- elif login_password is None or login_user is None:
- module.fail_json(msg='when supplying login arguments, both login_user and login_password must be provided')
-
- if login_user is not None and login_password is not None:
- client.admin.authenticate(login_user, login_password, source=login_database)
- elif LooseVersion(PyMongoVersion) >= LooseVersion('3.0'):
- if db_name != "admin":
- module.fail_json(msg='The localhost login exception only allows the first admin account to be created')
- # else: this has to be the first admin user added
-
- except Exception as e:
- module.fail_json(msg='unable to connect to database: %s' % to_native(e), exception=traceback.format_exc())
-
- if state == 'present':
- if password is None and update_password == 'always':
- module.fail_json(msg='password parameter required when adding a user unless update_password is set to on_create')
-
- try:
- if update_password != 'always':
- uinfo = user_find(client, user, db_name)
- if uinfo:
- password = None
- if not check_if_roles_changed(uinfo, roles, db_name):
- module.exit_json(changed=False, user=user)
-
- if module.check_mode:
- module.exit_json(changed=True, user=user)
-
- user_add(module, client, db_name, user, password, roles)
- except Exception as e:
- module.fail_json(msg='Unable to add or update user: %s' % to_native(e), exception=traceback.format_exc())
- finally:
- try:
- client.close()
- except Exception:
- pass
- # Here we can check password change if mongo provide a query for that : https://jira.mongodb.org/browse/SERVER-22848
- # newuinfo = user_find(client, user, db_name)
- # if uinfo['role'] == newuinfo['role'] and CheckPasswordHere:
- # module.exit_json(changed=False, user=user)
-
- elif state == 'absent':
- try:
- user_remove(module, client, db_name, user)
- except Exception as e:
- module.fail_json(msg='Unable to remove user: %s' % to_native(e), exception=traceback.format_exc())
- finally:
- try:
- client.close()
- except Exception:
- pass
- module.exit_json(changed=True, user=user)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/pids.py b/test/support/integration/plugins/modules/pids.py
deleted file mode 100644
index 4cbf45a9..00000000
--- a/test/support/integration/plugins/modules/pids.py
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/usr/bin/python
-# Copyright: (c) 2019, Saranya Sridharan
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
-
-DOCUMENTATION = '''
-module: pids
-version_added: 2.8
-description: "Retrieves a list of PIDs of given process name in Ansible controller/controlled machines.Returns an empty list if no process in that name exists."
-short_description: "Retrieves process IDs list if the process is running otherwise return empty list"
-author:
- - Saranya Sridharan (@saranyasridharan)
-requirements:
- - psutil(python module)
-options:
- name:
- description: the name of the process you want to get PID for.
- required: true
- type: str
-'''
-
-EXAMPLES = '''
-# Pass the process name
-- name: Getting process IDs of the process
- pids:
- name: python
- register: pids_of_python
-
-- name: Printing the process IDs obtained
- debug:
- msg: "PIDS of python:{{pids_of_python.pids|join(',')}}"
-'''
-
-RETURN = '''
-pids:
- description: Process IDs of the given process
- returned: list of none, one, or more process IDs
- type: list
- sample: [100,200]
-'''
-
-from ansible.module_utils.basic import AnsibleModule
-try:
- import psutil
- HAS_PSUTIL = True
-except ImportError:
- HAS_PSUTIL = False
-
-
-def compare_lower(a, b):
- if a is None or b is None:
- # this could just be "return False" but would lead to surprising behavior if both a and b are None
- return a == b
-
- return a.lower() == b.lower()
-
-
-def get_pid(name):
- pids = []
-
- for proc in psutil.process_iter(attrs=['name', 'cmdline']):
- if compare_lower(proc.info['name'], name) or \
- proc.info['cmdline'] and compare_lower(proc.info['cmdline'][0], name):
- pids.append(proc.pid)
-
- return pids
-
-
-def main():
- module = AnsibleModule(
- argument_spec=dict(
- name=dict(required=True, type="str"),
- ),
- supports_check_mode=True,
- )
- if not HAS_PSUTIL:
- module.fail_json(msg="Missing required 'psutil' python module. Try installing it with: pip install psutil")
- name = module.params["name"]
- response = dict(pids=get_pid(name))
- module.exit_json(**response)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/postgresql_db.py b/test/support/integration/plugins/modules/postgresql_db.py
deleted file mode 100644
index 40858d99..00000000
--- a/test/support/integration/plugins/modules/postgresql_db.py
+++ /dev/null
@@ -1,657 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# Copyright: Ansible Project
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['stableinterface'],
- 'supported_by': 'community'}
-
-DOCUMENTATION = r'''
----
-module: postgresql_db
-short_description: Add or remove PostgreSQL databases from a remote host.
-description:
- - Add or remove PostgreSQL databases from a remote host.
-version_added: '0.6'
-options:
- name:
- description:
- - Name of the database to add or remove
- type: str
- required: true
- aliases: [ db ]
- port:
- description:
- - Database port to connect (if needed)
- type: int
- default: 5432
- aliases:
- - login_port
- owner:
- description:
- - Name of the role to set as owner of the database
- type: str
- template:
- description:
- - Template used to create the database
- type: str
- encoding:
- description:
- - Encoding of the database
- type: str
- lc_collate:
- description:
- - Collation order (LC_COLLATE) to use in the database. Must match collation order of template database unless C(template0) is used as template.
- type: str
- lc_ctype:
- description:
- - Character classification (LC_CTYPE) to use in the database (e.g. lower, upper, ...) Must match LC_CTYPE of template database unless C(template0)
- is used as template.
- type: str
- session_role:
- description:
- - Switch to session_role after connecting. The specified session_role must be a role that the current login_user is a member of.
- - Permissions checking for SQL commands is carried out as though the session_role were the one that had logged in originally.
- type: str
- version_added: '2.8'
- state:
- description:
- - The database state.
- - C(present) implies that the database should be created if necessary.
- - C(absent) implies that the database should be removed if present.
- - C(dump) requires a target definition to which the database will be backed up. (Added in Ansible 2.4)
- Note that in some PostgreSQL versions of pg_dump, which is an embedded PostgreSQL utility and is used by the module,
- returns rc 0 even when errors occurred (e.g. the connection is forbidden by pg_hba.conf, etc.),
- so the module returns changed=True but the dump has not actually been done. Please, be sure that your version of
- pg_dump returns rc 1 in this case.
- - C(restore) also requires a target definition from which the database will be restored. (Added in Ansible 2.4)
- - The format of the backup will be detected based on the target name.
- - Supported compression formats for dump and restore include C(.pgc), C(.bz2), C(.gz) and C(.xz)
- - Supported formats for dump and restore include C(.sql) and C(.tar)
- type: str
- choices: [ absent, dump, present, restore ]
- default: present
- target:
- description:
- - File to back up or restore from.
- - Used when I(state) is C(dump) or C(restore).
- type: path
- version_added: '2.4'
- target_opts:
- description:
- - Further arguments for pg_dump or pg_restore.
- - Used when I(state) is C(dump) or C(restore).
- type: str
- version_added: '2.4'
- maintenance_db:
- description:
- - The value specifies the initial database (which is also called as maintenance DB) that Ansible connects to.
- type: str
- default: postgres
- version_added: '2.5'
- conn_limit:
- description:
- - Specifies the database connection limit.
- type: str
- version_added: '2.8'
- tablespace:
- description:
- - The tablespace to set for the database
- U(https://www.postgresql.org/docs/current/sql-alterdatabase.html).
- - If you want to move the database back to the default tablespace,
- explicitly set this to pg_default.
- type: path
- version_added: '2.9'
- dump_extra_args:
- description:
- - Provides additional arguments when I(state) is C(dump).
- - Cannot be used with dump-file-format-related arguments like ``--format=d``.
- type: str
- version_added: '2.10'
-seealso:
-- name: CREATE DATABASE reference
- description: Complete reference of the CREATE DATABASE command documentation.
- link: https://www.postgresql.org/docs/current/sql-createdatabase.html
-- name: DROP DATABASE reference
- description: Complete reference of the DROP DATABASE command documentation.
- link: https://www.postgresql.org/docs/current/sql-dropdatabase.html
-- name: pg_dump reference
- description: Complete reference of pg_dump documentation.
- link: https://www.postgresql.org/docs/current/app-pgdump.html
-- name: pg_restore reference
- description: Complete reference of pg_restore documentation.
- link: https://www.postgresql.org/docs/current/app-pgrestore.html
-- module: postgresql_tablespace
-- module: postgresql_info
-- module: postgresql_ping
-notes:
-- State C(dump) and C(restore) don't require I(psycopg2) since version 2.8.
-author: "Ansible Core Team"
-extends_documentation_fragment:
-- postgres
-'''
-
-EXAMPLES = r'''
-- name: Create a new database with name "acme"
- postgresql_db:
- name: acme
-
-# Note: If a template different from "template0" is specified, encoding and locale settings must match those of the template.
-- name: Create a new database with name "acme" and specific encoding and locale # settings.
- postgresql_db:
- name: acme
- encoding: UTF-8
- lc_collate: de_DE.UTF-8
- lc_ctype: de_DE.UTF-8
- template: template0
-
-# Note: Default limit for the number of concurrent connections to a specific database is "-1", which means "unlimited"
-- name: Create a new database with name "acme" which has a limit of 100 concurrent connections
- postgresql_db:
- name: acme
- conn_limit: "100"
-
-- name: Dump an existing database to a file
- postgresql_db:
- name: acme
- state: dump
- target: /tmp/acme.sql
-
-- name: Dump an existing database to a file excluding the test table
- postgresql_db:
- name: acme
- state: dump
- target: /tmp/acme.sql
- dump_extra_args: --exclude-table=test
-
-- name: Dump an existing database to a file (with compression)
- postgresql_db:
- name: acme
- state: dump
- target: /tmp/acme.sql.gz
-
-- name: Dump a single schema for an existing database
- postgresql_db:
- name: acme
- state: dump
- target: /tmp/acme.sql
- target_opts: "-n public"
-
-# Note: In the example below, if database foo exists and has another tablespace
-# the tablespace will be changed to foo. Access to the database will be locked
-# until the copying of database files is finished.
-- name: Create a new database called foo in tablespace bar
- postgresql_db:
- name: foo
- tablespace: bar
-'''
-
-RETURN = r'''
-executed_commands:
- description: List of commands which tried to run.
- returned: always
- type: list
- sample: ["CREATE DATABASE acme"]
- version_added: '2.10'
-'''
-
-
-import os
-import subprocess
-import traceback
-
-try:
- import psycopg2
- import psycopg2.extras
-except ImportError:
- HAS_PSYCOPG2 = False
-else:
- HAS_PSYCOPG2 = True
-
-import ansible.module_utils.postgres as pgutils
-from ansible.module_utils.basic import AnsibleModule
-from ansible.module_utils.database import SQLParseError, pg_quote_identifier
-from ansible.module_utils.six import iteritems
-from ansible.module_utils.six.moves import shlex_quote
-from ansible.module_utils._text import to_native
-
-executed_commands = []
-
-
-class NotSupportedError(Exception):
- pass
-
-# ===========================================
-# PostgreSQL module specific support methods.
-#
-
-
-def set_owner(cursor, db, owner):
- query = 'ALTER DATABASE %s OWNER TO "%s"' % (
- pg_quote_identifier(db, 'database'),
- owner)
- executed_commands.append(query)
- cursor.execute(query)
- return True
-
-
-def set_conn_limit(cursor, db, conn_limit):
- query = "ALTER DATABASE %s CONNECTION LIMIT %s" % (
- pg_quote_identifier(db, 'database'),
- conn_limit)
- executed_commands.append(query)
- cursor.execute(query)
- return True
-
-
-def get_encoding_id(cursor, encoding):
- query = "SELECT pg_char_to_encoding(%(encoding)s) AS encoding_id;"
- cursor.execute(query, {'encoding': encoding})
- return cursor.fetchone()['encoding_id']
-
-
-def get_db_info(cursor, db):
- query = """
- SELECT rolname AS owner,
- pg_encoding_to_char(encoding) AS encoding, encoding AS encoding_id,
- datcollate AS lc_collate, datctype AS lc_ctype, pg_database.datconnlimit AS conn_limit,
- spcname AS tablespace
- FROM pg_database
- JOIN pg_roles ON pg_roles.oid = pg_database.datdba
- JOIN pg_tablespace ON pg_tablespace.oid = pg_database.dattablespace
- WHERE datname = %(db)s
- """
- cursor.execute(query, {'db': db})
- return cursor.fetchone()
-
-
-def db_exists(cursor, db):
- query = "SELECT * FROM pg_database WHERE datname=%(db)s"
- cursor.execute(query, {'db': db})
- return cursor.rowcount == 1
-
-
-def db_delete(cursor, db):
- if db_exists(cursor, db):
- query = "DROP DATABASE %s" % pg_quote_identifier(db, 'database')
- executed_commands.append(query)
- cursor.execute(query)
- return True
- else:
- return False
-
-
-def db_create(cursor, db, owner, template, encoding, lc_collate, lc_ctype, conn_limit, tablespace):
- params = dict(enc=encoding, collate=lc_collate, ctype=lc_ctype, conn_limit=conn_limit, tablespace=tablespace)
- if not db_exists(cursor, db):
- query_fragments = ['CREATE DATABASE %s' % pg_quote_identifier(db, 'database')]
- if owner:
- query_fragments.append('OWNER "%s"' % owner)
- if template:
- query_fragments.append('TEMPLATE %s' % pg_quote_identifier(template, 'database'))
- if encoding:
- query_fragments.append('ENCODING %(enc)s')
- if lc_collate:
- query_fragments.append('LC_COLLATE %(collate)s')
- if lc_ctype:
- query_fragments.append('LC_CTYPE %(ctype)s')
- if tablespace:
- query_fragments.append('TABLESPACE %s' % pg_quote_identifier(tablespace, 'tablespace'))
- if conn_limit:
- query_fragments.append("CONNECTION LIMIT %(conn_limit)s" % {"conn_limit": conn_limit})
- query = ' '.join(query_fragments)
- executed_commands.append(cursor.mogrify(query, params))
- cursor.execute(query, params)
- return True
- else:
- db_info = get_db_info(cursor, db)
- if (encoding and get_encoding_id(cursor, encoding) != db_info['encoding_id']):
- raise NotSupportedError(
- 'Changing database encoding is not supported. '
- 'Current encoding: %s' % db_info['encoding']
- )
- elif lc_collate and lc_collate != db_info['lc_collate']:
- raise NotSupportedError(
- 'Changing LC_COLLATE is not supported. '
- 'Current LC_COLLATE: %s' % db_info['lc_collate']
- )
- elif lc_ctype and lc_ctype != db_info['lc_ctype']:
- raise NotSupportedError(
- 'Changing LC_CTYPE is not supported.'
- 'Current LC_CTYPE: %s' % db_info['lc_ctype']
- )
- else:
- changed = False
-
- if owner and owner != db_info['owner']:
- changed = set_owner(cursor, db, owner)
-
- if conn_limit and conn_limit != str(db_info['conn_limit']):
- changed = set_conn_limit(cursor, db, conn_limit)
-
- if tablespace and tablespace != db_info['tablespace']:
- changed = set_tablespace(cursor, db, tablespace)
-
- return changed
-
-
-def db_matches(cursor, db, owner, template, encoding, lc_collate, lc_ctype, conn_limit, tablespace):
- if not db_exists(cursor, db):
- return False
- else:
- db_info = get_db_info(cursor, db)
- if (encoding and get_encoding_id(cursor, encoding) != db_info['encoding_id']):
- return False
- elif lc_collate and lc_collate != db_info['lc_collate']:
- return False
- elif lc_ctype and lc_ctype != db_info['lc_ctype']:
- return False
- elif owner and owner != db_info['owner']:
- return False
- elif conn_limit and conn_limit != str(db_info['conn_limit']):
- return False
- elif tablespace and tablespace != db_info['tablespace']:
- return False
- else:
- return True
-
-
-def db_dump(module, target, target_opts="",
- db=None,
- dump_extra_args=None,
- user=None,
- password=None,
- host=None,
- port=None,
- **kw):
-
- flags = login_flags(db, host, port, user, db_prefix=False)
- cmd = module.get_bin_path('pg_dump', True)
- comp_prog_path = None
-
- if os.path.splitext(target)[-1] == '.tar':
- flags.append(' --format=t')
- elif os.path.splitext(target)[-1] == '.pgc':
- flags.append(' --format=c')
- if os.path.splitext(target)[-1] == '.gz':
- if module.get_bin_path('pigz'):
- comp_prog_path = module.get_bin_path('pigz', True)
- else:
- comp_prog_path = module.get_bin_path('gzip', True)
- elif os.path.splitext(target)[-1] == '.bz2':
- comp_prog_path = module.get_bin_path('bzip2', True)
- elif os.path.splitext(target)[-1] == '.xz':
- comp_prog_path = module.get_bin_path('xz', True)
-
- cmd += "".join(flags)
-
- if dump_extra_args:
- cmd += " {0} ".format(dump_extra_args)
-
- if target_opts:
- cmd += " {0} ".format(target_opts)
-
- if comp_prog_path:
- # Use a fifo to be notified of an error in pg_dump
- # Using shell pipe has no way to return the code of the first command
- # in a portable way.
- fifo = os.path.join(module.tmpdir, 'pg_fifo')
- os.mkfifo(fifo)
- cmd = '{1} <{3} > {2} & {0} >{3}'.format(cmd, comp_prog_path, shlex_quote(target), fifo)
- else:
- cmd = '{0} > {1}'.format(cmd, shlex_quote(target))
-
- return do_with_password(module, cmd, password)
-
-
-def db_restore(module, target, target_opts="",
- db=None,
- user=None,
- password=None,
- host=None,
- port=None,
- **kw):
-
- flags = login_flags(db, host, port, user)
- comp_prog_path = None
- cmd = module.get_bin_path('psql', True)
-
- if os.path.splitext(target)[-1] == '.sql':
- flags.append(' --file={0}'.format(target))
-
- elif os.path.splitext(target)[-1] == '.tar':
- flags.append(' --format=Tar')
- cmd = module.get_bin_path('pg_restore', True)
-
- elif os.path.splitext(target)[-1] == '.pgc':
- flags.append(' --format=Custom')
- cmd = module.get_bin_path('pg_restore', True)
-
- elif os.path.splitext(target)[-1] == '.gz':
- comp_prog_path = module.get_bin_path('zcat', True)
-
- elif os.path.splitext(target)[-1] == '.bz2':
- comp_prog_path = module.get_bin_path('bzcat', True)
-
- elif os.path.splitext(target)[-1] == '.xz':
- comp_prog_path = module.get_bin_path('xzcat', True)
-
- cmd += "".join(flags)
- if target_opts:
- cmd += " {0} ".format(target_opts)
-
- if comp_prog_path:
- env = os.environ.copy()
- if password:
- env = {"PGPASSWORD": password}
- p1 = subprocess.Popen([comp_prog_path, target], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- p2 = subprocess.Popen(cmd, stdin=p1.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, env=env)
- (stdout2, stderr2) = p2.communicate()
- p1.stdout.close()
- p1.wait()
- if p1.returncode != 0:
- stderr1 = p1.stderr.read()
- return p1.returncode, '', stderr1, 'cmd: ****'
- else:
- return p2.returncode, '', stderr2, 'cmd: ****'
- else:
- cmd = '{0} < {1}'.format(cmd, shlex_quote(target))
-
- return do_with_password(module, cmd, password)
-
-
-def login_flags(db, host, port, user, db_prefix=True):
- """
- returns a list of connection argument strings each prefixed
- with a space and quoted where necessary to later be combined
- in a single shell string with `"".join(rv)`
-
- db_prefix determines if "--dbname" is prefixed to the db argument,
- since the argument was introduced in 9.3.
- """
- flags = []
- if db:
- if db_prefix:
- flags.append(' --dbname={0}'.format(shlex_quote(db)))
- else:
- flags.append(' {0}'.format(shlex_quote(db)))
- if host:
- flags.append(' --host={0}'.format(host))
- if port:
- flags.append(' --port={0}'.format(port))
- if user:
- flags.append(' --username={0}'.format(user))
- return flags
-
-
-def do_with_password(module, cmd, password):
- env = {}
- if password:
- env = {"PGPASSWORD": password}
- executed_commands.append(cmd)
- rc, stderr, stdout = module.run_command(cmd, use_unsafe_shell=True, environ_update=env)
- return rc, stderr, stdout, cmd
-
-
-def set_tablespace(cursor, db, tablespace):
- query = "ALTER DATABASE %s SET TABLESPACE %s" % (
- pg_quote_identifier(db, 'database'),
- pg_quote_identifier(tablespace, 'tablespace'))
- executed_commands.append(query)
- cursor.execute(query)
- return True
-
-# ===========================================
-# Module execution.
-#
-
-
-def main():
- argument_spec = pgutils.postgres_common_argument_spec()
- argument_spec.update(
- db=dict(type='str', required=True, aliases=['name']),
- owner=dict(type='str', default=''),
- template=dict(type='str', default=''),
- encoding=dict(type='str', default=''),
- lc_collate=dict(type='str', default=''),
- lc_ctype=dict(type='str', default=''),
- state=dict(type='str', default='present', choices=['absent', 'dump', 'present', 'restore']),
- target=dict(type='path', default=''),
- target_opts=dict(type='str', default=''),
- maintenance_db=dict(type='str', default="postgres"),
- session_role=dict(type='str'),
- conn_limit=dict(type='str', default=''),
- tablespace=dict(type='path', default=''),
- dump_extra_args=dict(type='str', default=None),
- )
-
- module = AnsibleModule(
- argument_spec=argument_spec,
- supports_check_mode=True
- )
-
- db = module.params["db"]
- owner = module.params["owner"]
- template = module.params["template"]
- encoding = module.params["encoding"]
- lc_collate = module.params["lc_collate"]
- lc_ctype = module.params["lc_ctype"]
- target = module.params["target"]
- target_opts = module.params["target_opts"]
- state = module.params["state"]
- changed = False
- maintenance_db = module.params['maintenance_db']
- session_role = module.params["session_role"]
- conn_limit = module.params['conn_limit']
- tablespace = module.params['tablespace']
- dump_extra_args = module.params['dump_extra_args']
-
- raw_connection = state in ("dump", "restore")
-
- if not raw_connection:
- pgutils.ensure_required_libs(module)
-
- # To use defaults values, keyword arguments must be absent, so
- # check which values are empty and don't include in the **kw
- # dictionary
- params_map = {
- "login_host": "host",
- "login_user": "user",
- "login_password": "password",
- "port": "port",
- "ssl_mode": "sslmode",
- "ca_cert": "sslrootcert"
- }
- kw = dict((params_map[k], v) for (k, v) in iteritems(module.params)
- if k in params_map and v != '' and v is not None)
-
- # If a login_unix_socket is specified, incorporate it here.
- is_localhost = "host" not in kw or kw["host"] == "" or kw["host"] == "localhost"
-
- if is_localhost and module.params["login_unix_socket"] != "":
- kw["host"] = module.params["login_unix_socket"]
-
- if target == "":
- target = "{0}/{1}.sql".format(os.getcwd(), db)
- target = os.path.expanduser(target)
-
- if not raw_connection:
- try:
- db_connection = psycopg2.connect(database=maintenance_db, **kw)
-
- # Enable autocommit so we can create databases
- if psycopg2.__version__ >= '2.4.2':
- db_connection.autocommit = True
- else:
- db_connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
- cursor = db_connection.cursor(cursor_factory=psycopg2.extras.DictCursor)
-
- except TypeError as e:
- if 'sslrootcert' in e.args[0]:
- module.fail_json(msg='Postgresql server must be at least version 8.4 to support sslrootcert. Exception: {0}'.format(to_native(e)),
- exception=traceback.format_exc())
- module.fail_json(msg="unable to connect to database: %s" % to_native(e), exception=traceback.format_exc())
-
- except Exception as e:
- module.fail_json(msg="unable to connect to database: %s" % to_native(e), exception=traceback.format_exc())
-
- if session_role:
- try:
- cursor.execute('SET ROLE "%s"' % session_role)
- except Exception as e:
- module.fail_json(msg="Could not switch role: %s" % to_native(e), exception=traceback.format_exc())
-
- try:
- if module.check_mode:
- if state == "absent":
- changed = db_exists(cursor, db)
- elif state == "present":
- changed = not db_matches(cursor, db, owner, template, encoding, lc_collate, lc_ctype, conn_limit, tablespace)
- module.exit_json(changed=changed, db=db, executed_commands=executed_commands)
-
- if state == "absent":
- try:
- changed = db_delete(cursor, db)
- except SQLParseError as e:
- module.fail_json(msg=to_native(e), exception=traceback.format_exc())
-
- elif state == "present":
- try:
- changed = db_create(cursor, db, owner, template, encoding, lc_collate, lc_ctype, conn_limit, tablespace)
- except SQLParseError as e:
- module.fail_json(msg=to_native(e), exception=traceback.format_exc())
-
- elif state in ("dump", "restore"):
- method = state == "dump" and db_dump or db_restore
- try:
- if state == 'dump':
- rc, stdout, stderr, cmd = method(module, target, target_opts, db, dump_extra_args, **kw)
- else:
- rc, stdout, stderr, cmd = method(module, target, target_opts, db, **kw)
-
- if rc != 0:
- module.fail_json(msg=stderr, stdout=stdout, rc=rc, cmd=cmd)
- else:
- module.exit_json(changed=True, msg=stdout, stderr=stderr, rc=rc, cmd=cmd,
- executed_commands=executed_commands)
- except SQLParseError as e:
- module.fail_json(msg=to_native(e), exception=traceback.format_exc())
-
- except NotSupportedError as e:
- module.fail_json(msg=to_native(e), exception=traceback.format_exc())
- except SystemExit:
- # Avoid catching this on Python 2.4
- raise
- except Exception as e:
- module.fail_json(msg="Database query failed: %s" % to_native(e), exception=traceback.format_exc())
-
- module.exit_json(changed=changed, db=db, executed_commands=executed_commands)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/postgresql_privs.py b/test/support/integration/plugins/modules/postgresql_privs.py
deleted file mode 100644
index ba8324dd..00000000
--- a/test/support/integration/plugins/modules/postgresql_privs.py
+++ /dev/null
@@ -1,1097 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# Copyright: Ansible Project
-# Copyright: (c) 2019, Tobias Birkefeld (@tcraxs) <t@craxs.de>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['stableinterface'],
- 'supported_by': 'community'}
-
-DOCUMENTATION = r'''
----
-module: postgresql_privs
-version_added: '1.2'
-short_description: Grant or revoke privileges on PostgreSQL database objects
-description:
-- Grant or revoke privileges on PostgreSQL database objects.
-- This module is basically a wrapper around most of the functionality of
- PostgreSQL's GRANT and REVOKE statements with detection of changes
- (GRANT/REVOKE I(privs) ON I(type) I(objs) TO/FROM I(roles)).
-options:
- database:
- description:
- - Name of database to connect to.
- required: yes
- type: str
- aliases:
- - db
- - login_db
- state:
- description:
- - If C(present), the specified privileges are granted, if C(absent) they are revoked.
- type: str
- default: present
- choices: [ absent, present ]
- privs:
- description:
- - Comma separated list of privileges to grant/revoke.
- type: str
- aliases:
- - priv
- type:
- description:
- - Type of database object to set privileges on.
- - The C(default_privs) choice is available starting at version 2.7.
- - The C(foreign_data_wrapper) and C(foreign_server) object types are available from Ansible version '2.8'.
- - The C(type) choice is available from Ansible version '2.10'.
- type: str
- default: table
- choices: [ database, default_privs, foreign_data_wrapper, foreign_server, function,
- group, language, table, tablespace, schema, sequence, type ]
- objs:
- description:
- - Comma separated list of database objects to set privileges on.
- - If I(type) is C(table), C(partition table), C(sequence) or C(function),
- the special valueC(ALL_IN_SCHEMA) can be provided instead to specify all
- database objects of type I(type) in the schema specified via I(schema).
- (This also works with PostgreSQL < 9.0.) (C(ALL_IN_SCHEMA) is available
- for C(function) and C(partition table) from version 2.8)
- - If I(type) is C(database), this parameter can be omitted, in which case
- privileges are set for the database specified via I(database).
- - 'If I(type) is I(function), colons (":") in object names will be
- replaced with commas (needed to specify function signatures, see examples)'
- type: str
- aliases:
- - obj
- schema:
- description:
- - Schema that contains the database objects specified via I(objs).
- - May only be provided if I(type) is C(table), C(sequence), C(function), C(type),
- or C(default_privs). Defaults to C(public) in these cases.
- - Pay attention, for embedded types when I(type=type)
- I(schema) can be C(pg_catalog) or C(information_schema) respectively.
- type: str
- roles:
- description:
- - Comma separated list of role (user/group) names to set permissions for.
- - The special value C(PUBLIC) can be provided instead to set permissions
- for the implicitly defined PUBLIC group.
- type: str
- required: yes
- aliases:
- - role
- fail_on_role:
- version_added: '2.8'
- description:
- - If C(yes), fail when target role (for whom privs need to be granted) does not exist.
- Otherwise just warn and continue.
- default: yes
- type: bool
- session_role:
- version_added: '2.8'
- description:
- - Switch to session_role after connecting.
- - The specified session_role must be a role that the current login_user is a member of.
- - Permissions checking for SQL commands is carried out as though the session_role were the one that had logged in originally.
- type: str
- target_roles:
- description:
- - A list of existing role (user/group) names to set as the
- default permissions for database objects subsequently created by them.
- - Parameter I(target_roles) is only available with C(type=default_privs).
- type: str
- version_added: '2.8'
- grant_option:
- description:
- - Whether C(role) may grant/revoke the specified privileges/group memberships to others.
- - Set to C(no) to revoke GRANT OPTION, leave unspecified to make no changes.
- - I(grant_option) only has an effect if I(state) is C(present).
- type: bool
- aliases:
- - admin_option
- host:
- description:
- - Database host address. If unspecified, connect via Unix socket.
- type: str
- aliases:
- - login_host
- port:
- description:
- - Database port to connect to.
- type: int
- default: 5432
- aliases:
- - login_port
- unix_socket:
- description:
- - Path to a Unix domain socket for local connections.
- type: str
- aliases:
- - login_unix_socket
- login:
- description:
- - The username to authenticate with.
- type: str
- default: postgres
- aliases:
- - login_user
- password:
- description:
- - The password to authenticate with.
- type: str
- aliases:
- - login_password
- ssl_mode:
- description:
- - Determines whether or with what priority a secure SSL TCP/IP connection will be negotiated with the server.
- - See https://www.postgresql.org/docs/current/static/libpq-ssl.html for more information on the modes.
- - Default of C(prefer) matches libpq default.
- type: str
- default: prefer
- choices: [ allow, disable, prefer, require, verify-ca, verify-full ]
- version_added: '2.3'
- ca_cert:
- description:
- - Specifies the name of a file containing SSL certificate authority (CA) certificate(s).
- - If the file exists, the server's certificate will be verified to be signed by one of these authorities.
- version_added: '2.3'
- type: str
- aliases:
- - ssl_rootcert
-
-notes:
-- Parameters that accept comma separated lists (I(privs), I(objs), I(roles))
- have singular alias names (I(priv), I(obj), I(role)).
-- To revoke only C(GRANT OPTION) for a specific object, set I(state) to
- C(present) and I(grant_option) to C(no) (see examples).
-- Note that when revoking privileges from a role R, this role may still have
- access via privileges granted to any role R is a member of including C(PUBLIC).
-- Note that when revoking privileges from a role R, you do so as the user
- specified via I(login). If R has been granted the same privileges by
- another user also, R can still access database objects via these privileges.
-- When revoking privileges, C(RESTRICT) is assumed (see PostgreSQL docs).
-
-seealso:
-- module: postgresql_user
-- module: postgresql_owner
-- module: postgresql_membership
-- name: PostgreSQL privileges
- description: General information about PostgreSQL privileges.
- link: https://www.postgresql.org/docs/current/ddl-priv.html
-- name: PostgreSQL GRANT command reference
- description: Complete reference of the PostgreSQL GRANT command documentation.
- link: https://www.postgresql.org/docs/current/sql-grant.html
-- name: PostgreSQL REVOKE command reference
- description: Complete reference of the PostgreSQL REVOKE command documentation.
- link: https://www.postgresql.org/docs/current/sql-revoke.html
-
-extends_documentation_fragment:
-- postgres
-
-author:
-- Bernhard Weitzhofer (@b6d)
-- Tobias Birkefeld (@tcraxs)
-'''
-
-EXAMPLES = r'''
-# On database "library":
-# GRANT SELECT, INSERT, UPDATE ON TABLE public.books, public.authors
-# TO librarian, reader WITH GRANT OPTION
-- name: Grant privs to librarian and reader on database library
- postgresql_privs:
- database: library
- state: present
- privs: SELECT,INSERT,UPDATE
- type: table
- objs: books,authors
- schema: public
- roles: librarian,reader
- grant_option: yes
-
-- name: Same as above leveraging default values
- postgresql_privs:
- db: library
- privs: SELECT,INSERT,UPDATE
- objs: books,authors
- roles: librarian,reader
- grant_option: yes
-
-# REVOKE GRANT OPTION FOR INSERT ON TABLE books FROM reader
-# Note that role "reader" will be *granted* INSERT privilege itself if this
-# isn't already the case (since state: present).
-- name: Revoke privs from reader
- postgresql_privs:
- db: library
- state: present
- priv: INSERT
- obj: books
- role: reader
- grant_option: no
-
-# "public" is the default schema. This also works for PostgreSQL 8.x.
-- name: REVOKE INSERT, UPDATE ON ALL TABLES IN SCHEMA public FROM reader
- postgresql_privs:
- db: library
- state: absent
- privs: INSERT,UPDATE
- objs: ALL_IN_SCHEMA
- role: reader
-
-- name: GRANT ALL PRIVILEGES ON SCHEMA public, math TO librarian
- postgresql_privs:
- db: library
- privs: ALL
- type: schema
- objs: public,math
- role: librarian
-
-# Note the separation of arguments with colons.
-- name: GRANT ALL PRIVILEGES ON FUNCTION math.add(int, int) TO librarian, reader
- postgresql_privs:
- db: library
- privs: ALL
- type: function
- obj: add(int:int)
- schema: math
- roles: librarian,reader
-
-# Note that group role memberships apply cluster-wide and therefore are not
-# restricted to database "library" here.
-- name: GRANT librarian, reader TO alice, bob WITH ADMIN OPTION
- postgresql_privs:
- db: library
- type: group
- objs: librarian,reader
- roles: alice,bob
- admin_option: yes
-
-# Note that here "db: postgres" specifies the database to connect to, not the
-# database to grant privileges on (which is specified via the "objs" param)
-- name: GRANT ALL PRIVILEGES ON DATABASE library TO librarian
- postgresql_privs:
- db: postgres
- privs: ALL
- type: database
- obj: library
- role: librarian
-
-# If objs is omitted for type "database", it defaults to the database
-# to which the connection is established
-- name: GRANT ALL PRIVILEGES ON DATABASE library TO librarian
- postgresql_privs:
- db: library
- privs: ALL
- type: database
- role: librarian
-
-# Available since version 2.7
-# Objs must be set, ALL_DEFAULT to TABLES/SEQUENCES/TYPES/FUNCTIONS
-# ALL_DEFAULT works only with privs=ALL
-# For specific
-- name: ALTER DEFAULT PRIVILEGES ON DATABASE library TO librarian
- postgresql_privs:
- db: library
- objs: ALL_DEFAULT
- privs: ALL
- type: default_privs
- role: librarian
- grant_option: yes
-
-# Available since version 2.7
-# Objs must be set, ALL_DEFAULT to TABLES/SEQUENCES/TYPES/FUNCTIONS
-# ALL_DEFAULT works only with privs=ALL
-# For specific
-- name: ALTER DEFAULT PRIVILEGES ON DATABASE library TO reader, step 1
- postgresql_privs:
- db: library
- objs: TABLES,SEQUENCES
- privs: SELECT
- type: default_privs
- role: reader
-
-- name: ALTER DEFAULT PRIVILEGES ON DATABASE library TO reader, step 2
- postgresql_privs:
- db: library
- objs: TYPES
- privs: USAGE
- type: default_privs
- role: reader
-
-# Available since version 2.8
-- name: GRANT ALL PRIVILEGES ON FOREIGN DATA WRAPPER fdw TO reader
- postgresql_privs:
- db: test
- objs: fdw
- privs: ALL
- type: foreign_data_wrapper
- role: reader
-
-# Available since version 2.10
-- name: GRANT ALL PRIVILEGES ON TYPE customtype TO reader
- postgresql_privs:
- db: test
- objs: customtype
- privs: ALL
- type: type
- role: reader
-
-# Available since version 2.8
-- name: GRANT ALL PRIVILEGES ON FOREIGN SERVER fdw_server TO reader
- postgresql_privs:
- db: test
- objs: fdw_server
- privs: ALL
- type: foreign_server
- role: reader
-
-# Available since version 2.8
-# Grant 'execute' permissions on all functions in schema 'common' to role 'caller'
-- name: GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA common TO caller
- postgresql_privs:
- type: function
- state: present
- privs: EXECUTE
- roles: caller
- objs: ALL_IN_SCHEMA
- schema: common
-
-# Available since version 2.8
-# ALTER DEFAULT PRIVILEGES FOR ROLE librarian IN SCHEMA library GRANT SELECT ON TABLES TO reader
-# GRANT SELECT privileges for new TABLES objects created by librarian as
-# default to the role reader.
-# For specific
-- name: ALTER privs
- postgresql_privs:
- db: library
- schema: library
- objs: TABLES
- privs: SELECT
- type: default_privs
- role: reader
- target_roles: librarian
-
-# Available since version 2.8
-# ALTER DEFAULT PRIVILEGES FOR ROLE librarian IN SCHEMA library REVOKE SELECT ON TABLES FROM reader
-# REVOKE SELECT privileges for new TABLES objects created by librarian as
-# default from the role reader.
-# For specific
-- name: ALTER privs
- postgresql_privs:
- db: library
- state: absent
- schema: library
- objs: TABLES
- privs: SELECT
- type: default_privs
- role: reader
- target_roles: librarian
-
-# Available since version 2.10
-- name: Grant type privileges for pg_catalog.numeric type to alice
- postgresql_privs:
- type: type
- roles: alice
- privs: ALL
- objs: numeric
- schema: pg_catalog
- db: acme
-'''
-
-RETURN = r'''
-queries:
- description: List of executed queries.
- returned: always
- type: list
- sample: ['REVOKE GRANT OPTION FOR INSERT ON TABLE "books" FROM "reader";']
- version_added: '2.8'
-'''
-
-import traceback
-
-PSYCOPG2_IMP_ERR = None
-try:
- import psycopg2
- import psycopg2.extensions
-except ImportError:
- PSYCOPG2_IMP_ERR = traceback.format_exc()
- psycopg2 = None
-
-# import module snippets
-from ansible.module_utils.basic import AnsibleModule, missing_required_lib
-from ansible.module_utils.database import pg_quote_identifier
-from ansible.module_utils.postgres import postgres_common_argument_spec
-from ansible.module_utils._text import to_native
-
-VALID_PRIVS = frozenset(('SELECT', 'INSERT', 'UPDATE', 'DELETE', 'TRUNCATE',
- 'REFERENCES', 'TRIGGER', 'CREATE', 'CONNECT',
- 'TEMPORARY', 'TEMP', 'EXECUTE', 'USAGE', 'ALL', 'USAGE'))
-VALID_DEFAULT_OBJS = {'TABLES': ('ALL', 'SELECT', 'INSERT', 'UPDATE', 'DELETE', 'TRUNCATE', 'REFERENCES', 'TRIGGER'),
- 'SEQUENCES': ('ALL', 'SELECT', 'UPDATE', 'USAGE'),
- 'FUNCTIONS': ('ALL', 'EXECUTE'),
- 'TYPES': ('ALL', 'USAGE')}
-
-executed_queries = []
-
-
-class Error(Exception):
- pass
-
-
-def role_exists(module, cursor, rolname):
- """Check user exists or not"""
- query = "SELECT 1 FROM pg_roles WHERE rolname = '%s'" % rolname
- try:
- cursor.execute(query)
- return cursor.rowcount > 0
-
- except Exception as e:
- module.fail_json(msg="Cannot execute SQL '%s': %s" % (query, to_native(e)))
-
- return False
-
-
-# We don't have functools.partial in Python < 2.5
-def partial(f, *args, **kwargs):
- """Partial function application"""
-
- def g(*g_args, **g_kwargs):
- new_kwargs = kwargs.copy()
- new_kwargs.update(g_kwargs)
- return f(*(args + g_args), **g_kwargs)
-
- g.f = f
- g.args = args
- g.kwargs = kwargs
- return g
-
-
-class Connection(object):
- """Wrapper around a psycopg2 connection with some convenience methods"""
-
- def __init__(self, params, module):
- self.database = params.database
- self.module = module
- # To use defaults values, keyword arguments must be absent, so
- # check which values are empty and don't include in the **kw
- # dictionary
- params_map = {
- "host": "host",
- "login": "user",
- "password": "password",
- "port": "port",
- "database": "database",
- "ssl_mode": "sslmode",
- "ca_cert": "sslrootcert"
- }
-
- kw = dict((params_map[k], getattr(params, k)) for k in params_map
- if getattr(params, k) != '' and getattr(params, k) is not None)
-
- # If a unix_socket is specified, incorporate it here.
- is_localhost = "host" not in kw or kw["host"] == "" or kw["host"] == "localhost"
- if is_localhost and params.unix_socket != "":
- kw["host"] = params.unix_socket
-
- sslrootcert = params.ca_cert
- if psycopg2.__version__ < '2.4.3' and sslrootcert is not None:
- raise ValueError('psycopg2 must be at least 2.4.3 in order to user the ca_cert parameter')
-
- self.connection = psycopg2.connect(**kw)
- self.cursor = self.connection.cursor()
-
- def commit(self):
- self.connection.commit()
-
- def rollback(self):
- self.connection.rollback()
-
- @property
- def encoding(self):
- """Connection encoding in Python-compatible form"""
- return psycopg2.extensions.encodings[self.connection.encoding]
-
- # Methods for querying database objects
-
- # PostgreSQL < 9.0 doesn't support "ALL TABLES IN SCHEMA schema"-like
- # phrases in GRANT or REVOKE statements, therefore alternative methods are
- # provided here.
-
- def schema_exists(self, schema):
- query = """SELECT count(*)
- FROM pg_catalog.pg_namespace WHERE nspname = %s"""
- self.cursor.execute(query, (schema,))
- return self.cursor.fetchone()[0] > 0
-
- def get_all_tables_in_schema(self, schema):
- if not self.schema_exists(schema):
- raise Error('Schema "%s" does not exist.' % schema)
- query = """SELECT relname
- FROM pg_catalog.pg_class c
- JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
- WHERE nspname = %s AND relkind in ('r', 'v', 'm', 'p')"""
- self.cursor.execute(query, (schema,))
- return [t[0] for t in self.cursor.fetchall()]
-
- def get_all_sequences_in_schema(self, schema):
- if not self.schema_exists(schema):
- raise Error('Schema "%s" does not exist.' % schema)
- query = """SELECT relname
- FROM pg_catalog.pg_class c
- JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
- WHERE nspname = %s AND relkind = 'S'"""
- self.cursor.execute(query, (schema,))
- return [t[0] for t in self.cursor.fetchall()]
-
- def get_all_functions_in_schema(self, schema):
- if not self.schema_exists(schema):
- raise Error('Schema "%s" does not exist.' % schema)
- query = """SELECT p.proname, oidvectortypes(p.proargtypes)
- FROM pg_catalog.pg_proc p
- JOIN pg_namespace n ON n.oid = p.pronamespace
- WHERE nspname = %s"""
- self.cursor.execute(query, (schema,))
- return ["%s(%s)" % (t[0], t[1]) for t in self.cursor.fetchall()]
-
- # Methods for getting access control lists and group membership info
-
- # To determine whether anything has changed after granting/revoking
- # privileges, we compare the access control lists of the specified database
- # objects before and afterwards. Python's list/string comparison should
- # suffice for change detection, we should not actually have to parse ACLs.
- # The same should apply to group membership information.
-
- def get_table_acls(self, schema, tables):
- query = """SELECT relacl
- FROM pg_catalog.pg_class c
- JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
- WHERE nspname = %s AND relkind in ('r','p','v','m') AND relname = ANY (%s)
- ORDER BY relname"""
- self.cursor.execute(query, (schema, tables))
- return [t[0] for t in self.cursor.fetchall()]
-
- def get_sequence_acls(self, schema, sequences):
- query = """SELECT relacl
- FROM pg_catalog.pg_class c
- JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
- WHERE nspname = %s AND relkind = 'S' AND relname = ANY (%s)
- ORDER BY relname"""
- self.cursor.execute(query, (schema, sequences))
- return [t[0] for t in self.cursor.fetchall()]
-
- def get_function_acls(self, schema, function_signatures):
- funcnames = [f.split('(', 1)[0] for f in function_signatures]
- query = """SELECT proacl
- FROM pg_catalog.pg_proc p
- JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace
- WHERE nspname = %s AND proname = ANY (%s)
- ORDER BY proname, proargtypes"""
- self.cursor.execute(query, (schema, funcnames))
- return [t[0] for t in self.cursor.fetchall()]
-
- def get_schema_acls(self, schemas):
- query = """SELECT nspacl FROM pg_catalog.pg_namespace
- WHERE nspname = ANY (%s) ORDER BY nspname"""
- self.cursor.execute(query, (schemas,))
- return [t[0] for t in self.cursor.fetchall()]
-
- def get_language_acls(self, languages):
- query = """SELECT lanacl FROM pg_catalog.pg_language
- WHERE lanname = ANY (%s) ORDER BY lanname"""
- self.cursor.execute(query, (languages,))
- return [t[0] for t in self.cursor.fetchall()]
-
- def get_tablespace_acls(self, tablespaces):
- query = """SELECT spcacl FROM pg_catalog.pg_tablespace
- WHERE spcname = ANY (%s) ORDER BY spcname"""
- self.cursor.execute(query, (tablespaces,))
- return [t[0] for t in self.cursor.fetchall()]
-
- def get_database_acls(self, databases):
- query = """SELECT datacl FROM pg_catalog.pg_database
- WHERE datname = ANY (%s) ORDER BY datname"""
- self.cursor.execute(query, (databases,))
- return [t[0] for t in self.cursor.fetchall()]
-
- def get_group_memberships(self, groups):
- query = """SELECT roleid, grantor, member, admin_option
- FROM pg_catalog.pg_auth_members am
- JOIN pg_catalog.pg_roles r ON r.oid = am.roleid
- WHERE r.rolname = ANY(%s)
- ORDER BY roleid, grantor, member"""
- self.cursor.execute(query, (groups,))
- return self.cursor.fetchall()
-
- def get_default_privs(self, schema, *args):
- query = """SELECT defaclacl
- FROM pg_default_acl a
- JOIN pg_namespace b ON a.defaclnamespace=b.oid
- WHERE b.nspname = %s;"""
- self.cursor.execute(query, (schema,))
- return [t[0] for t in self.cursor.fetchall()]
-
- def get_foreign_data_wrapper_acls(self, fdws):
- query = """SELECT fdwacl FROM pg_catalog.pg_foreign_data_wrapper
- WHERE fdwname = ANY (%s) ORDER BY fdwname"""
- self.cursor.execute(query, (fdws,))
- return [t[0] for t in self.cursor.fetchall()]
-
- def get_foreign_server_acls(self, fs):
- query = """SELECT srvacl FROM pg_catalog.pg_foreign_server
- WHERE srvname = ANY (%s) ORDER BY srvname"""
- self.cursor.execute(query, (fs,))
- return [t[0] for t in self.cursor.fetchall()]
-
- def get_type_acls(self, schema, types):
- query = """SELECT t.typacl FROM pg_catalog.pg_type t
- JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace
- WHERE n.nspname = %s AND t.typname = ANY (%s) ORDER BY typname"""
- self.cursor.execute(query, (schema, types))
- return [t[0] for t in self.cursor.fetchall()]
-
- # Manipulating privileges
-
- def manipulate_privs(self, obj_type, privs, objs, roles, target_roles,
- state, grant_option, schema_qualifier=None, fail_on_role=True):
- """Manipulate database object privileges.
-
- :param obj_type: Type of database object to grant/revoke
- privileges for.
- :param privs: Either a list of privileges to grant/revoke
- or None if type is "group".
- :param objs: List of database objects to grant/revoke
- privileges for.
- :param roles: Either a list of role names or "PUBLIC"
- for the implicitly defined "PUBLIC" group
- :param target_roles: List of role names to grant/revoke
- default privileges as.
- :param state: "present" to grant privileges, "absent" to revoke.
- :param grant_option: Only for state "present": If True, set
- grant/admin option. If False, revoke it.
- If None, don't change grant option.
- :param schema_qualifier: Some object types ("TABLE", "SEQUENCE",
- "FUNCTION") must be qualified by schema.
- Ignored for other Types.
- """
- # get_status: function to get current status
- if obj_type == 'table':
- get_status = partial(self.get_table_acls, schema_qualifier)
- elif obj_type == 'sequence':
- get_status = partial(self.get_sequence_acls, schema_qualifier)
- elif obj_type == 'function':
- get_status = partial(self.get_function_acls, schema_qualifier)
- elif obj_type == 'schema':
- get_status = self.get_schema_acls
- elif obj_type == 'language':
- get_status = self.get_language_acls
- elif obj_type == 'tablespace':
- get_status = self.get_tablespace_acls
- elif obj_type == 'database':
- get_status = self.get_database_acls
- elif obj_type == 'group':
- get_status = self.get_group_memberships
- elif obj_type == 'default_privs':
- get_status = partial(self.get_default_privs, schema_qualifier)
- elif obj_type == 'foreign_data_wrapper':
- get_status = self.get_foreign_data_wrapper_acls
- elif obj_type == 'foreign_server':
- get_status = self.get_foreign_server_acls
- elif obj_type == 'type':
- get_status = partial(self.get_type_acls, schema_qualifier)
- else:
- raise Error('Unsupported database object type "%s".' % obj_type)
-
- # Return False (nothing has changed) if there are no objs to work on.
- if not objs:
- return False
-
- # obj_ids: quoted db object identifiers (sometimes schema-qualified)
- if obj_type == 'function':
- obj_ids = []
- for obj in objs:
- try:
- f, args = obj.split('(', 1)
- except Exception:
- raise Error('Illegal function signature: "%s".' % obj)
- obj_ids.append('"%s"."%s"(%s' % (schema_qualifier, f, args))
- elif obj_type in ['table', 'sequence', 'type']:
- obj_ids = ['"%s"."%s"' % (schema_qualifier, o) for o in objs]
- else:
- obj_ids = ['"%s"' % o for o in objs]
-
- # set_what: SQL-fragment specifying what to set for the target roles:
- # Either group membership or privileges on objects of a certain type
- if obj_type == 'group':
- set_what = ','.join('"%s"' % i for i in obj_ids)
- elif obj_type == 'default_privs':
- # We don't want privs to be quoted here
- set_what = ','.join(privs)
- else:
- # function types are already quoted above
- if obj_type != 'function':
- obj_ids = [pg_quote_identifier(i, 'table') for i in obj_ids]
- # Note: obj_type has been checked against a set of string literals
- # and privs was escaped when it was parsed
- # Note: Underscores are replaced with spaces to support multi-word obj_type
- set_what = '%s ON %s %s' % (','.join(privs), obj_type.replace('_', ' '),
- ','.join(obj_ids))
-
- # for_whom: SQL-fragment specifying for whom to set the above
- if roles == 'PUBLIC':
- for_whom = 'PUBLIC'
- else:
- for_whom = []
- for r in roles:
- if not role_exists(self.module, self.cursor, r):
- if fail_on_role:
- self.module.fail_json(msg="Role '%s' does not exist" % r.strip())
-
- else:
- self.module.warn("Role '%s' does not exist, pass it" % r.strip())
- else:
- for_whom.append('"%s"' % r)
-
- if not for_whom:
- return False
-
- for_whom = ','.join(for_whom)
-
- # as_who:
- as_who = None
- if target_roles:
- as_who = ','.join('"%s"' % r for r in target_roles)
-
- status_before = get_status(objs)
-
- query = QueryBuilder(state) \
- .for_objtype(obj_type) \
- .with_grant_option(grant_option) \
- .for_whom(for_whom) \
- .as_who(as_who) \
- .for_schema(schema_qualifier) \
- .set_what(set_what) \
- .for_objs(objs) \
- .build()
-
- executed_queries.append(query)
- self.cursor.execute(query)
- status_after = get_status(objs)
-
- def nonesorted(e):
- # For python 3+ that can fail trying
- # to compare NoneType elements by sort method.
- if e is None:
- return ''
- return e
-
- status_before.sort(key=nonesorted)
- status_after.sort(key=nonesorted)
- return status_before != status_after
-
-
-class QueryBuilder(object):
- def __init__(self, state):
- self._grant_option = None
- self._for_whom = None
- self._as_who = None
- self._set_what = None
- self._obj_type = None
- self._state = state
- self._schema = None
- self._objs = None
- self.query = []
-
- def for_objs(self, objs):
- self._objs = objs
- return self
-
- def for_schema(self, schema):
- self._schema = schema
- return self
-
- def with_grant_option(self, option):
- self._grant_option = option
- return self
-
- def for_whom(self, who):
- self._for_whom = who
- return self
-
- def as_who(self, target_roles):
- self._as_who = target_roles
- return self
-
- def set_what(self, what):
- self._set_what = what
- return self
-
- def for_objtype(self, objtype):
- self._obj_type = objtype
- return self
-
- def build(self):
- if self._state == 'present':
- self.build_present()
- elif self._state == 'absent':
- self.build_absent()
- else:
- self.build_absent()
- return '\n'.join(self.query)
-
- def add_default_revoke(self):
- for obj in self._objs:
- if self._as_who:
- self.query.append(
- 'ALTER DEFAULT PRIVILEGES FOR ROLE {0} IN SCHEMA {1} REVOKE ALL ON {2} FROM {3};'.format(self._as_who,
- self._schema, obj,
- self._for_whom))
- else:
- self.query.append(
- 'ALTER DEFAULT PRIVILEGES IN SCHEMA {0} REVOKE ALL ON {1} FROM {2};'.format(self._schema, obj,
- self._for_whom))
-
- def add_grant_option(self):
- if self._grant_option:
- if self._obj_type == 'group':
- self.query[-1] += ' WITH ADMIN OPTION;'
- else:
- self.query[-1] += ' WITH GRANT OPTION;'
- else:
- self.query[-1] += ';'
- if self._obj_type == 'group':
- self.query.append('REVOKE ADMIN OPTION FOR {0} FROM {1};'.format(self._set_what, self._for_whom))
- elif not self._obj_type == 'default_privs':
- self.query.append('REVOKE GRANT OPTION FOR {0} FROM {1};'.format(self._set_what, self._for_whom))
-
- def add_default_priv(self):
- for obj in self._objs:
- if self._as_who:
- self.query.append(
- 'ALTER DEFAULT PRIVILEGES FOR ROLE {0} IN SCHEMA {1} GRANT {2} ON {3} TO {4}'.format(self._as_who,
- self._schema,
- self._set_what,
- obj,
- self._for_whom))
- else:
- self.query.append(
- 'ALTER DEFAULT PRIVILEGES IN SCHEMA {0} GRANT {1} ON {2} TO {3}'.format(self._schema,
- self._set_what,
- obj,
- self._for_whom))
- self.add_grant_option()
- if self._as_who:
- self.query.append(
- 'ALTER DEFAULT PRIVILEGES FOR ROLE {0} IN SCHEMA {1} GRANT USAGE ON TYPES TO {2}'.format(self._as_who,
- self._schema,
- self._for_whom))
- else:
- self.query.append(
- 'ALTER DEFAULT PRIVILEGES IN SCHEMA {0} GRANT USAGE ON TYPES TO {1}'.format(self._schema, self._for_whom))
- self.add_grant_option()
-
- def build_present(self):
- if self._obj_type == 'default_privs':
- self.add_default_revoke()
- self.add_default_priv()
- else:
- self.query.append('GRANT {0} TO {1}'.format(self._set_what, self._for_whom))
- self.add_grant_option()
-
- def build_absent(self):
- if self._obj_type == 'default_privs':
- self.query = []
- for obj in ['TABLES', 'SEQUENCES', 'TYPES']:
- if self._as_who:
- self.query.append(
- 'ALTER DEFAULT PRIVILEGES FOR ROLE {0} IN SCHEMA {1} REVOKE ALL ON {2} FROM {3};'.format(self._as_who,
- self._schema, obj,
- self._for_whom))
- else:
- self.query.append(
- 'ALTER DEFAULT PRIVILEGES IN SCHEMA {0} REVOKE ALL ON {1} FROM {2};'.format(self._schema, obj,
- self._for_whom))
- else:
- self.query.append('REVOKE {0} FROM {1};'.format(self._set_what, self._for_whom))
-
-
-def main():
- argument_spec = postgres_common_argument_spec()
- argument_spec.update(
- database=dict(required=True, aliases=['db', 'login_db']),
- state=dict(default='present', choices=['present', 'absent']),
- privs=dict(required=False, aliases=['priv']),
- type=dict(default='table',
- choices=['table',
- 'sequence',
- 'function',
- 'database',
- 'schema',
- 'language',
- 'tablespace',
- 'group',
- 'default_privs',
- 'foreign_data_wrapper',
- 'foreign_server',
- 'type', ]),
- objs=dict(required=False, aliases=['obj']),
- schema=dict(required=False),
- roles=dict(required=True, aliases=['role']),
- session_role=dict(required=False),
- target_roles=dict(required=False),
- grant_option=dict(required=False, type='bool',
- aliases=['admin_option']),
- host=dict(default='', aliases=['login_host']),
- unix_socket=dict(default='', aliases=['login_unix_socket']),
- login=dict(default='postgres', aliases=['login_user']),
- password=dict(default='', aliases=['login_password'], no_log=True),
- fail_on_role=dict(type='bool', default=True),
- )
-
- module = AnsibleModule(
- argument_spec=argument_spec,
- supports_check_mode=True,
- )
-
- fail_on_role = module.params['fail_on_role']
-
- # Create type object as namespace for module params
- p = type('Params', (), module.params)
- # param "schema": default, allowed depends on param "type"
- if p.type in ['table', 'sequence', 'function', 'type', 'default_privs']:
- p.schema = p.schema or 'public'
- elif p.schema:
- module.fail_json(msg='Argument "schema" is not allowed '
- 'for type "%s".' % p.type)
-
- # param "objs": default, required depends on param "type"
- if p.type == 'database':
- p.objs = p.objs or p.database
- elif not p.objs:
- module.fail_json(msg='Argument "objs" is required '
- 'for type "%s".' % p.type)
-
- # param "privs": allowed, required depends on param "type"
- if p.type == 'group':
- if p.privs:
- module.fail_json(msg='Argument "privs" is not allowed '
- 'for type "group".')
- elif not p.privs:
- module.fail_json(msg='Argument "privs" is required '
- 'for type "%s".' % p.type)
-
- # Connect to Database
- if not psycopg2:
- module.fail_json(msg=missing_required_lib('psycopg2'), exception=PSYCOPG2_IMP_ERR)
- try:
- conn = Connection(p, module)
- except psycopg2.Error as e:
- module.fail_json(msg='Could not connect to database: %s' % to_native(e), exception=traceback.format_exc())
- except TypeError as e:
- if 'sslrootcert' in e.args[0]:
- module.fail_json(msg='Postgresql server must be at least version 8.4 to support sslrootcert')
- module.fail_json(msg="unable to connect to database: %s" % to_native(e), exception=traceback.format_exc())
- except ValueError as e:
- # We raise this when the psycopg library is too old
- module.fail_json(msg=to_native(e))
-
- if p.session_role:
- try:
- conn.cursor.execute('SET ROLE "%s"' % p.session_role)
- except Exception as e:
- module.fail_json(msg="Could not switch to role %s: %s" % (p.session_role, to_native(e)), exception=traceback.format_exc())
-
- try:
- # privs
- if p.privs:
- privs = frozenset(pr.upper() for pr in p.privs.split(','))
- if not privs.issubset(VALID_PRIVS):
- module.fail_json(msg='Invalid privileges specified: %s' % privs.difference(VALID_PRIVS))
- else:
- privs = None
- # objs:
- if p.type == 'table' and p.objs == 'ALL_IN_SCHEMA':
- objs = conn.get_all_tables_in_schema(p.schema)
- elif p.type == 'sequence' and p.objs == 'ALL_IN_SCHEMA':
- objs = conn.get_all_sequences_in_schema(p.schema)
- elif p.type == 'function' and p.objs == 'ALL_IN_SCHEMA':
- objs = conn.get_all_functions_in_schema(p.schema)
- elif p.type == 'default_privs':
- if p.objs == 'ALL_DEFAULT':
- objs = frozenset(VALID_DEFAULT_OBJS.keys())
- else:
- objs = frozenset(obj.upper() for obj in p.objs.split(','))
- if not objs.issubset(VALID_DEFAULT_OBJS):
- module.fail_json(
- msg='Invalid Object set specified: %s' % objs.difference(VALID_DEFAULT_OBJS.keys()))
- # Again, do we have valid privs specified for object type:
- valid_objects_for_priv = frozenset(obj for obj in objs if privs.issubset(VALID_DEFAULT_OBJS[obj]))
- if not valid_objects_for_priv == objs:
- module.fail_json(
- msg='Invalid priv specified. Valid object for priv: {0}. Objects: {1}'.format(
- valid_objects_for_priv, objs))
- else:
- objs = p.objs.split(',')
-
- # function signatures are encoded using ':' to separate args
- if p.type == 'function':
- objs = [obj.replace(':', ',') for obj in objs]
-
- # roles
- if p.roles == 'PUBLIC':
- roles = 'PUBLIC'
- else:
- roles = p.roles.split(',')
-
- if len(roles) == 1 and not role_exists(module, conn.cursor, roles[0]):
- module.exit_json(changed=False)
-
- if fail_on_role:
- module.fail_json(msg="Role '%s' does not exist" % roles[0].strip())
-
- else:
- module.warn("Role '%s' does not exist, nothing to do" % roles[0].strip())
-
- # check if target_roles is set with type: default_privs
- if p.target_roles and not p.type == 'default_privs':
- module.warn('"target_roles" will be ignored '
- 'Argument "type: default_privs" is required for usage of "target_roles".')
-
- # target roles
- if p.target_roles:
- target_roles = p.target_roles.split(',')
- else:
- target_roles = None
-
- changed = conn.manipulate_privs(
- obj_type=p.type,
- privs=privs,
- objs=objs,
- roles=roles,
- target_roles=target_roles,
- state=p.state,
- grant_option=p.grant_option,
- schema_qualifier=p.schema,
- fail_on_role=fail_on_role,
- )
-
- except Error as e:
- conn.rollback()
- module.fail_json(msg=e.message, exception=traceback.format_exc())
-
- except psycopg2.Error as e:
- conn.rollback()
- module.fail_json(msg=to_native(e.message))
-
- if module.check_mode:
- conn.rollback()
- else:
- conn.commit()
- module.exit_json(changed=changed, queries=executed_queries)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/postgresql_query.py b/test/support/integration/plugins/modules/postgresql_query.py
deleted file mode 100644
index 18d63e33..00000000
--- a/test/support/integration/plugins/modules/postgresql_query.py
+++ /dev/null
@@ -1,364 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# Copyright: (c) 2017, Felix Archambault
-# Copyright: (c) 2019, Andrew Klychkov (@Andersson007) <aaklychkov@mail.ru>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-ANSIBLE_METADATA = {
- 'metadata_version': '1.1',
- 'supported_by': 'community',
- 'status': ['preview']
-}
-
-DOCUMENTATION = r'''
----
-module: postgresql_query
-short_description: Run PostgreSQL queries
-description:
-- Runs arbitrary PostgreSQL queries.
-- Can run queries from SQL script files.
-- Does not run against backup files. Use M(postgresql_db) with I(state=restore)
- to run queries on files made by pg_dump/pg_dumpall utilities.
-version_added: '2.8'
-options:
- query:
- description:
- - SQL query to run. Variables can be escaped with psycopg2 syntax
- U(http://initd.org/psycopg/docs/usage.html).
- type: str
- positional_args:
- description:
- - List of values to be passed as positional arguments to the query.
- When the value is a list, it will be converted to PostgreSQL array.
- - Mutually exclusive with I(named_args).
- type: list
- elements: raw
- named_args:
- description:
- - Dictionary of key-value arguments to pass to the query.
- When the value is a list, it will be converted to PostgreSQL array.
- - Mutually exclusive with I(positional_args).
- type: dict
- path_to_script:
- description:
- - Path to SQL script on the remote host.
- - Returns result of the last query in the script.
- - Mutually exclusive with I(query).
- type: path
- session_role:
- description:
- - Switch to session_role after connecting. The specified session_role must
- be a role that the current login_user is a member of.
- - Permissions checking for SQL commands is carried out as though
- the session_role were the one that had logged in originally.
- type: str
- db:
- description:
- - Name of database to connect to and run queries against.
- type: str
- aliases:
- - login_db
- autocommit:
- description:
- - Execute in autocommit mode when the query can't be run inside a transaction block
- (e.g., VACUUM).
- - Mutually exclusive with I(check_mode).
- type: bool
- default: no
- version_added: '2.9'
- encoding:
- description:
- - Set the client encoding for the current session (e.g. C(UTF-8)).
- - The default is the encoding defined by the database.
- type: str
- version_added: '2.10'
-seealso:
-- module: postgresql_db
-author:
-- Felix Archambault (@archf)
-- Andrew Klychkov (@Andersson007)
-- Will Rouesnel (@wrouesnel)
-extends_documentation_fragment: postgres
-'''
-
-EXAMPLES = r'''
-- name: Simple select query to acme db
- postgresql_query:
- db: acme
- query: SELECT version()
-
-- name: Select query to db acme with positional arguments and non-default credentials
- postgresql_query:
- db: acme
- login_user: django
- login_password: mysecretpass
- query: SELECT * FROM acme WHERE id = %s AND story = %s
- positional_args:
- - 1
- - test
-
-- name: Select query to test_db with named_args
- postgresql_query:
- db: test_db
- query: SELECT * FROM test WHERE id = %(id_val)s AND story = %(story_val)s
- named_args:
- id_val: 1
- story_val: test
-
-- name: Insert query to test_table in db test_db
- postgresql_query:
- db: test_db
- query: INSERT INTO test_table (id, story) VALUES (2, 'my_long_story')
-
-- name: Run queries from SQL script using UTF-8 client encoding for session
- postgresql_query:
- db: test_db
- path_to_script: /var/lib/pgsql/test.sql
- positional_args:
- - 1
- encoding: UTF-8
-
-- name: Example of using autocommit parameter
- postgresql_query:
- db: test_db
- query: VACUUM
- autocommit: yes
-
-- name: >
- Insert data to the column of array type using positional_args.
- Note that we use quotes here, the same as for passing JSON, etc.
- postgresql_query:
- query: INSERT INTO test_table (array_column) VALUES (%s)
- positional_args:
- - '{1,2,3}'
-
-# Pass list and string vars as positional_args
-- name: Set vars
- set_fact:
- my_list:
- - 1
- - 2
- - 3
- my_arr: '{1, 2, 3}'
-
-- name: Select from test table by passing positional_args as arrays
- postgresql_query:
- query: SELECT * FROM test_array_table WHERE arr_col1 = %s AND arr_col2 = %s
- positional_args:
- - '{{ my_list }}'
- - '{{ my_arr|string }}'
-'''
-
-RETURN = r'''
-query:
- description: Query that was tried to be executed.
- returned: always
- type: str
- sample: 'SELECT * FROM bar'
-statusmessage:
- description: Attribute containing the message returned by the command.
- returned: always
- type: str
- sample: 'INSERT 0 1'
-query_result:
- description:
- - List of dictionaries in column:value form representing returned rows.
- returned: changed
- type: list
- sample: [{"Column": "Value1"},{"Column": "Value2"}]
-rowcount:
- description: Number of affected rows.
- returned: changed
- type: int
- sample: 5
-'''
-
-try:
- from psycopg2 import ProgrammingError as Psycopg2ProgrammingError
- from psycopg2.extras import DictCursor
-except ImportError:
- # it is needed for checking 'no result to fetch' in main(),
- # psycopg2 availability will be checked by connect_to_db() into
- # ansible.module_utils.postgres
- pass
-
-from ansible.module_utils.basic import AnsibleModule
-from ansible.module_utils.postgres import (
- connect_to_db,
- get_conn_params,
- postgres_common_argument_spec,
-)
-from ansible.module_utils._text import to_native
-from ansible.module_utils.six import iteritems
-
-
-# ===========================================
-# Module execution.
-#
-
-def list_to_pg_array(elem):
- """Convert the passed list to PostgreSQL array
- represented as a string.
-
- Args:
- elem (list): List that needs to be converted.
-
- Returns:
- elem (str): String representation of PostgreSQL array.
- """
- elem = str(elem).strip('[]')
- elem = '{' + elem + '}'
- return elem
-
-
-def convert_elements_to_pg_arrays(obj):
- """Convert list elements of the passed object
- to PostgreSQL arrays represented as strings.
-
- Args:
- obj (dict or list): Object whose elements need to be converted.
-
- Returns:
- obj (dict or list): Object with converted elements.
- """
- if isinstance(obj, dict):
- for (key, elem) in iteritems(obj):
- if isinstance(elem, list):
- obj[key] = list_to_pg_array(elem)
-
- elif isinstance(obj, list):
- for i, elem in enumerate(obj):
- if isinstance(elem, list):
- obj[i] = list_to_pg_array(elem)
-
- return obj
-
-
-def main():
- argument_spec = postgres_common_argument_spec()
- argument_spec.update(
- query=dict(type='str'),
- db=dict(type='str', aliases=['login_db']),
- positional_args=dict(type='list', elements='raw'),
- named_args=dict(type='dict'),
- session_role=dict(type='str'),
- path_to_script=dict(type='path'),
- autocommit=dict(type='bool', default=False),
- encoding=dict(type='str'),
- )
-
- module = AnsibleModule(
- argument_spec=argument_spec,
- mutually_exclusive=(('positional_args', 'named_args'),),
- supports_check_mode=True,
- )
-
- query = module.params["query"]
- positional_args = module.params["positional_args"]
- named_args = module.params["named_args"]
- path_to_script = module.params["path_to_script"]
- autocommit = module.params["autocommit"]
- encoding = module.params["encoding"]
-
- if autocommit and module.check_mode:
- module.fail_json(msg="Using autocommit is mutually exclusive with check_mode")
-
- if path_to_script and query:
- module.fail_json(msg="path_to_script is mutually exclusive with query")
-
- if positional_args:
- positional_args = convert_elements_to_pg_arrays(positional_args)
-
- elif named_args:
- named_args = convert_elements_to_pg_arrays(named_args)
-
- if path_to_script:
- try:
- with open(path_to_script, 'rb') as f:
- query = to_native(f.read())
- except Exception as e:
- module.fail_json(msg="Cannot read file '%s' : %s" % (path_to_script, to_native(e)))
-
- conn_params = get_conn_params(module, module.params)
- db_connection = connect_to_db(module, conn_params, autocommit=autocommit)
- if encoding is not None:
- db_connection.set_client_encoding(encoding)
- cursor = db_connection.cursor(cursor_factory=DictCursor)
-
- # Prepare args:
- if module.params.get("positional_args"):
- arguments = module.params["positional_args"]
- elif module.params.get("named_args"):
- arguments = module.params["named_args"]
- else:
- arguments = None
-
- # Set defaults:
- changed = False
-
- # Execute query:
- try:
- cursor.execute(query, arguments)
- except Exception as e:
- if not autocommit:
- db_connection.rollback()
-
- cursor.close()
- db_connection.close()
- module.fail_json(msg="Cannot execute SQL '%s' %s: %s" % (query, arguments, to_native(e)))
-
- statusmessage = cursor.statusmessage
- rowcount = cursor.rowcount
-
- try:
- query_result = [dict(row) for row in cursor.fetchall()]
- except Psycopg2ProgrammingError as e:
- if to_native(e) == 'no results to fetch':
- query_result = {}
-
- except Exception as e:
- module.fail_json(msg="Cannot fetch rows from cursor: %s" % to_native(e))
-
- if 'SELECT' not in statusmessage:
- if 'UPDATE' in statusmessage or 'INSERT' in statusmessage or 'DELETE' in statusmessage:
- s = statusmessage.split()
- if len(s) == 3:
- if statusmessage.split()[2] != '0':
- changed = True
-
- elif len(s) == 2:
- if statusmessage.split()[1] != '0':
- changed = True
-
- else:
- changed = True
-
- else:
- changed = True
-
- if module.check_mode:
- db_connection.rollback()
- else:
- if not autocommit:
- db_connection.commit()
-
- kw = dict(
- changed=changed,
- query=cursor.query,
- statusmessage=statusmessage,
- query_result=query_result,
- rowcount=rowcount if rowcount >= 0 else 0,
- )
-
- cursor.close()
- db_connection.close()
-
- module.exit_json(**kw)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/postgresql_set.py b/test/support/integration/plugins/modules/postgresql_set.py
deleted file mode 100644
index cfbdae64..00000000
--- a/test/support/integration/plugins/modules/postgresql_set.py
+++ /dev/null
@@ -1,434 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# Copyright: (c) 2018, Andrew Klychkov (@Andersson007) <aaklychkov@mail.ru>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-ANSIBLE_METADATA = {
- 'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'
-}
-
-DOCUMENTATION = r'''
----
-module: postgresql_set
-short_description: Change a PostgreSQL server configuration parameter
-description:
- - Allows to change a PostgreSQL server configuration parameter.
- - The module uses ALTER SYSTEM command and applies changes by reload server configuration.
- - ALTER SYSTEM is used for changing server configuration parameters across the entire database cluster.
- - It can be more convenient and safe than the traditional method of manually editing the postgresql.conf file.
- - ALTER SYSTEM writes the given parameter setting to the $PGDATA/postgresql.auto.conf file,
- which is read in addition to postgresql.conf.
- - The module allows to reset parameter to boot_val (cluster initial value) by I(reset=yes) or remove parameter
- string from postgresql.auto.conf and reload I(value=default) (for settings with postmaster context restart is required).
- - After change you can see in the ansible output the previous and
- the new parameter value and other information using returned values and M(debug) module.
-version_added: '2.8'
-options:
- name:
- description:
- - Name of PostgreSQL server parameter.
- type: str
- required: true
- value:
- description:
- - Parameter value to set.
- - To remove parameter string from postgresql.auto.conf and
- reload the server configuration you must pass I(value=default).
- With I(value=default) the playbook always returns changed is true.
- type: str
- reset:
- description:
- - Restore parameter to initial state (boot_val). Mutually exclusive with I(value).
- type: bool
- default: false
- session_role:
- description:
- - Switch to session_role after connecting. The specified session_role must
- be a role that the current login_user is a member of.
- - Permissions checking for SQL commands is carried out as though
- the session_role were the one that had logged in originally.
- type: str
- db:
- description:
- - Name of database to connect.
- type: str
- aliases:
- - login_db
-notes:
-- Supported version of PostgreSQL is 9.4 and later.
-- Pay attention, change setting with 'postmaster' context can return changed is true
- when actually nothing changes because the same value may be presented in
- several different form, for example, 1024MB, 1GB, etc. However in pg_settings
- system view it can be defined like 131072 number of 8kB pages.
- The final check of the parameter value cannot compare it because the server was
- not restarted and the value in pg_settings is not updated yet.
-- For some parameters restart of PostgreSQL server is required.
- See official documentation U(https://www.postgresql.org/docs/current/view-pg-settings.html).
-seealso:
-- module: postgresql_info
-- name: PostgreSQL server configuration
- description: General information about PostgreSQL server configuration.
- link: https://www.postgresql.org/docs/current/runtime-config.html
-- name: PostgreSQL view pg_settings reference
- description: Complete reference of the pg_settings view documentation.
- link: https://www.postgresql.org/docs/current/view-pg-settings.html
-- name: PostgreSQL ALTER SYSTEM command reference
- description: Complete reference of the ALTER SYSTEM command documentation.
- link: https://www.postgresql.org/docs/current/sql-altersystem.html
-author:
-- Andrew Klychkov (@Andersson007)
-extends_documentation_fragment: postgres
-'''
-
-EXAMPLES = r'''
-- name: Restore wal_keep_segments parameter to initial state
- postgresql_set:
- name: wal_keep_segments
- reset: yes
-
-# Set work_mem parameter to 32MB and show what's been changed and restart is required or not
-# (output example: "msg": "work_mem 4MB >> 64MB restart_req: False")
-- name: Set work mem parameter
- postgresql_set:
- name: work_mem
- value: 32mb
- register: set
-
-- debug:
- msg: "{{ set.name }} {{ set.prev_val_pretty }} >> {{ set.value_pretty }} restart_req: {{ set.restart_required }}"
- when: set.changed
-# Ensure that the restart of PostgreSQL server must be required for some parameters.
-# In this situation you see the same parameter in prev_val and value_prettyue, but 'changed=True'
-# (If you passed the value that was different from the current server setting).
-
-- name: Set log_min_duration_statement parameter to 1 second
- postgresql_set:
- name: log_min_duration_statement
- value: 1s
-
-- name: Set wal_log_hints parameter to default value (remove parameter from postgresql.auto.conf)
- postgresql_set:
- name: wal_log_hints
- value: default
-'''
-
-RETURN = r'''
-name:
- description: Name of PostgreSQL server parameter.
- returned: always
- type: str
- sample: 'shared_buffers'
-restart_required:
- description: Information about parameter current state.
- returned: always
- type: bool
- sample: true
-prev_val_pretty:
- description: Information about previous state of the parameter.
- returned: always
- type: str
- sample: '4MB'
-value_pretty:
- description: Information about current state of the parameter.
- returned: always
- type: str
- sample: '64MB'
-value:
- description:
- - Dictionary that contains the current parameter value (at the time of playbook finish).
- - Pay attention that for real change some parameters restart of PostgreSQL server is required.
- - Returns the current value in the check mode.
- returned: always
- type: dict
- sample: { "value": 67108864, "unit": "b" }
-context:
- description:
- - PostgreSQL setting context.
- returned: always
- type: str
- sample: user
-'''
-
-try:
- from psycopg2.extras import DictCursor
-except Exception:
- # psycopg2 is checked by connect_to_db()
- # from ansible.module_utils.postgres
- pass
-
-from copy import deepcopy
-
-from ansible.module_utils.basic import AnsibleModule
-from ansible.module_utils.postgres import (
- connect_to_db,
- get_conn_params,
- postgres_common_argument_spec,
-)
-from ansible.module_utils._text import to_native
-
-PG_REQ_VER = 90400
-
-# To allow to set value like 1mb instead of 1MB, etc:
-POSSIBLE_SIZE_UNITS = ("mb", "gb", "tb")
-
-# ===========================================
-# PostgreSQL module specific support methods.
-#
-
-
-def param_get(cursor, module, name):
- query = ("SELECT name, setting, unit, context, boot_val "
- "FROM pg_settings WHERE name = %(name)s")
- try:
- cursor.execute(query, {'name': name})
- info = cursor.fetchall()
- cursor.execute("SHOW %s" % name)
- val = cursor.fetchone()
-
- except Exception as e:
- module.fail_json(msg="Unable to get %s value due to : %s" % (name, to_native(e)))
-
- raw_val = info[0][1]
- unit = info[0][2]
- context = info[0][3]
- boot_val = info[0][4]
-
- if val[0] == 'True':
- val[0] = 'on'
- elif val[0] == 'False':
- val[0] = 'off'
-
- if unit == 'kB':
- if int(raw_val) > 0:
- raw_val = int(raw_val) * 1024
- if int(boot_val) > 0:
- boot_val = int(boot_val) * 1024
-
- unit = 'b'
-
- elif unit == 'MB':
- if int(raw_val) > 0:
- raw_val = int(raw_val) * 1024 * 1024
- if int(boot_val) > 0:
- boot_val = int(boot_val) * 1024 * 1024
-
- unit = 'b'
-
- return (val[0], raw_val, unit, boot_val, context)
-
-
-def pretty_to_bytes(pretty_val):
- # The function returns a value in bytes
- # if the value contains 'B', 'kB', 'MB', 'GB', 'TB'.
- # Otherwise it returns the passed argument.
-
- val_in_bytes = None
-
- if 'kB' in pretty_val:
- num_part = int(''.join(d for d in pretty_val if d.isdigit()))
- val_in_bytes = num_part * 1024
-
- elif 'MB' in pretty_val.upper():
- num_part = int(''.join(d for d in pretty_val if d.isdigit()))
- val_in_bytes = num_part * 1024 * 1024
-
- elif 'GB' in pretty_val.upper():
- num_part = int(''.join(d for d in pretty_val if d.isdigit()))
- val_in_bytes = num_part * 1024 * 1024 * 1024
-
- elif 'TB' in pretty_val.upper():
- num_part = int(''.join(d for d in pretty_val if d.isdigit()))
- val_in_bytes = num_part * 1024 * 1024 * 1024 * 1024
-
- elif 'B' in pretty_val.upper():
- num_part = int(''.join(d for d in pretty_val if d.isdigit()))
- val_in_bytes = num_part
-
- else:
- return pretty_val
-
- return val_in_bytes
-
-
-def param_set(cursor, module, name, value, context):
- try:
- if str(value).lower() == 'default':
- query = "ALTER SYSTEM SET %s = DEFAULT" % name
- else:
- query = "ALTER SYSTEM SET %s = '%s'" % (name, value)
- cursor.execute(query)
-
- if context != 'postmaster':
- cursor.execute("SELECT pg_reload_conf()")
-
- except Exception as e:
- module.fail_json(msg="Unable to get %s value due to : %s" % (name, to_native(e)))
-
- return True
-
-
-# ===========================================
-# Module execution.
-#
-
-
-def main():
- argument_spec = postgres_common_argument_spec()
- argument_spec.update(
- name=dict(type='str', required=True),
- db=dict(type='str', aliases=['login_db']),
- value=dict(type='str'),
- reset=dict(type='bool'),
- session_role=dict(type='str'),
- )
- module = AnsibleModule(
- argument_spec=argument_spec,
- supports_check_mode=True,
- )
-
- name = module.params["name"]
- value = module.params["value"]
- reset = module.params["reset"]
-
- # Allow to pass values like 1mb instead of 1MB, etc:
- if value:
- for unit in POSSIBLE_SIZE_UNITS:
- if value[:-2].isdigit() and unit in value[-2:]:
- value = value.upper()
-
- if value and reset:
- module.fail_json(msg="%s: value and reset params are mutually exclusive" % name)
-
- if not value and not reset:
- module.fail_json(msg="%s: at least one of value or reset param must be specified" % name)
-
- conn_params = get_conn_params(module, module.params, warn_db_default=False)
- db_connection = connect_to_db(module, conn_params, autocommit=True)
- cursor = db_connection.cursor(cursor_factory=DictCursor)
-
- kw = {}
- # Check server version (needs 9.4 or later):
- ver = db_connection.server_version
- if ver < PG_REQ_VER:
- module.warn("PostgreSQL is %s version but %s or later is required" % (ver, PG_REQ_VER))
- kw = dict(
- changed=False,
- restart_required=False,
- value_pretty="",
- prev_val_pretty="",
- value={"value": "", "unit": ""},
- )
- kw['name'] = name
- db_connection.close()
- module.exit_json(**kw)
-
- # Set default returned values:
- restart_required = False
- changed = False
- kw['name'] = name
- kw['restart_required'] = False
-
- # Get info about param state:
- res = param_get(cursor, module, name)
- current_value = res[0]
- raw_val = res[1]
- unit = res[2]
- boot_val = res[3]
- context = res[4]
-
- if value == 'True':
- value = 'on'
- elif value == 'False':
- value = 'off'
-
- kw['prev_val_pretty'] = current_value
- kw['value_pretty'] = deepcopy(kw['prev_val_pretty'])
- kw['context'] = context
-
- # Do job
- if context == "internal":
- module.fail_json(msg="%s: cannot be changed (internal context). See "
- "https://www.postgresql.org/docs/current/runtime-config-preset.html" % name)
-
- if context == "postmaster":
- restart_required = True
-
- # If check_mode, just compare and exit:
- if module.check_mode:
- if pretty_to_bytes(value) == pretty_to_bytes(current_value):
- kw['changed'] = False
-
- else:
- kw['value_pretty'] = value
- kw['changed'] = True
-
- # Anyway returns current raw value in the check_mode:
- kw['value'] = dict(
- value=raw_val,
- unit=unit,
- )
- kw['restart_required'] = restart_required
- module.exit_json(**kw)
-
- # Set param:
- if value and value != current_value:
- changed = param_set(cursor, module, name, value, context)
-
- kw['value_pretty'] = value
-
- # Reset param:
- elif reset:
- if raw_val == boot_val:
- # nothing to change, exit:
- kw['value'] = dict(
- value=raw_val,
- unit=unit,
- )
- module.exit_json(**kw)
-
- changed = param_set(cursor, module, name, boot_val, context)
-
- if restart_required:
- module.warn("Restart of PostgreSQL is required for setting %s" % name)
-
- cursor.close()
- db_connection.close()
-
- # Reconnect and recheck current value:
- if context in ('sighup', 'superuser-backend', 'backend', 'superuser', 'user'):
- db_connection = connect_to_db(module, conn_params, autocommit=True)
- cursor = db_connection.cursor(cursor_factory=DictCursor)
-
- res = param_get(cursor, module, name)
- # f_ means 'final'
- f_value = res[0]
- f_raw_val = res[1]
-
- if raw_val == f_raw_val:
- changed = False
-
- else:
- changed = True
-
- kw['value_pretty'] = f_value
- kw['value'] = dict(
- value=f_raw_val,
- unit=unit,
- )
-
- cursor.close()
- db_connection.close()
-
- kw['changed'] = changed
- kw['restart_required'] = restart_required
- module.exit_json(**kw)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/postgresql_table.py b/test/support/integration/plugins/modules/postgresql_table.py
deleted file mode 100644
index 3bef03b0..00000000
--- a/test/support/integration/plugins/modules/postgresql_table.py
+++ /dev/null
@@ -1,601 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# Copyright: (c) 2019, Andrew Klychkov (@Andersson007) <aaklychkov@mail.ru>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-ANSIBLE_METADATA = {
- 'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'
-}
-
-DOCUMENTATION = r'''
----
-module: postgresql_table
-short_description: Create, drop, or modify a PostgreSQL table
-description:
-- Allows to create, drop, rename, truncate a table, or change some table attributes.
-version_added: '2.8'
-options:
- table:
- description:
- - Table name.
- required: true
- aliases:
- - name
- type: str
- state:
- description:
- - The table state. I(state=absent) is mutually exclusive with I(tablespace), I(owner), I(unlogged),
- I(like), I(including), I(columns), I(truncate), I(storage_params) and, I(rename).
- type: str
- default: present
- choices: [ absent, present ]
- tablespace:
- description:
- - Set a tablespace for the table.
- required: false
- type: str
- owner:
- description:
- - Set a table owner.
- type: str
- unlogged:
- description:
- - Create an unlogged table.
- type: bool
- default: no
- like:
- description:
- - Create a table like another table (with similar DDL).
- Mutually exclusive with I(columns), I(rename), and I(truncate).
- type: str
- including:
- description:
- - Keywords that are used with like parameter, may be DEFAULTS, CONSTRAINTS, INDEXES, STORAGE, COMMENTS or ALL.
- Needs I(like) specified. Mutually exclusive with I(columns), I(rename), and I(truncate).
- type: str
- columns:
- description:
- - Columns that are needed.
- type: list
- elements: str
- rename:
- description:
- - New table name. Mutually exclusive with I(tablespace), I(owner),
- I(unlogged), I(like), I(including), I(columns), I(truncate), and I(storage_params).
- type: str
- truncate:
- description:
- - Truncate a table. Mutually exclusive with I(tablespace), I(owner), I(unlogged),
- I(like), I(including), I(columns), I(rename), and I(storage_params).
- type: bool
- default: no
- storage_params:
- description:
- - Storage parameters like fillfactor, autovacuum_vacuum_treshold, etc.
- Mutually exclusive with I(rename) and I(truncate).
- type: list
- elements: str
- db:
- description:
- - Name of database to connect and where the table will be created.
- type: str
- aliases:
- - login_db
- session_role:
- description:
- - Switch to session_role after connecting.
- The specified session_role must be a role that the current login_user is a member of.
- - Permissions checking for SQL commands is carried out as though
- the session_role were the one that had logged in originally.
- type: str
- cascade:
- description:
- - Automatically drop objects that depend on the table (such as views).
- Used with I(state=absent) only.
- type: bool
- default: no
- version_added: '2.9'
-notes:
-- If you do not pass db parameter, tables will be created in the database
- named postgres.
-- PostgreSQL allows to create columnless table, so columns param is optional.
-- Unlogged tables are available from PostgreSQL server version 9.1.
-seealso:
-- module: postgresql_sequence
-- module: postgresql_idx
-- module: postgresql_info
-- module: postgresql_tablespace
-- module: postgresql_owner
-- module: postgresql_privs
-- module: postgresql_copy
-- name: CREATE TABLE reference
- description: Complete reference of the CREATE TABLE command documentation.
- link: https://www.postgresql.org/docs/current/sql-createtable.html
-- name: ALTER TABLE reference
- description: Complete reference of the ALTER TABLE command documentation.
- link: https://www.postgresql.org/docs/current/sql-altertable.html
-- name: DROP TABLE reference
- description: Complete reference of the DROP TABLE command documentation.
- link: https://www.postgresql.org/docs/current/sql-droptable.html
-- name: PostgreSQL data types
- description: Complete reference of the PostgreSQL data types documentation.
- link: https://www.postgresql.org/docs/current/datatype.html
-author:
-- Andrei Klychkov (@Andersson007)
-extends_documentation_fragment: postgres
-'''
-
-EXAMPLES = r'''
-- name: Create tbl2 in the acme database with the DDL like tbl1 with testuser as an owner
- postgresql_table:
- db: acme
- name: tbl2
- like: tbl1
- owner: testuser
-
-- name: Create tbl2 in the acme database and tablespace ssd with the DDL like tbl1 including comments and indexes
- postgresql_table:
- db: acme
- table: tbl2
- like: tbl1
- including: comments, indexes
- tablespace: ssd
-
-- name: Create test_table with several columns in ssd tablespace with fillfactor=10 and autovacuum_analyze_threshold=1
- postgresql_table:
- name: test_table
- columns:
- - id bigserial primary key
- - num bigint
- - stories text
- tablespace: ssd
- storage_params:
- - fillfactor=10
- - autovacuum_analyze_threshold=1
-
-- name: Create an unlogged table in schema acme
- postgresql_table:
- name: acme.useless_data
- columns: waste_id int
- unlogged: true
-
-- name: Rename table foo to bar
- postgresql_table:
- table: foo
- rename: bar
-
-- name: Rename table foo from schema acme to bar
- postgresql_table:
- name: acme.foo
- rename: bar
-
-- name: Set owner to someuser
- postgresql_table:
- name: foo
- owner: someuser
-
-- name: Change tablespace of foo table to new_tablespace and set owner to new_user
- postgresql_table:
- name: foo
- tablespace: new_tablespace
- owner: new_user
-
-- name: Truncate table foo
- postgresql_table:
- name: foo
- truncate: yes
-
-- name: Drop table foo from schema acme
- postgresql_table:
- name: acme.foo
- state: absent
-
-- name: Drop table bar cascade
- postgresql_table:
- name: bar
- state: absent
- cascade: yes
-'''
-
-RETURN = r'''
-table:
- description: Name of a table.
- returned: always
- type: str
- sample: 'foo'
-state:
- description: Table state.
- returned: always
- type: str
- sample: 'present'
-owner:
- description: Table owner.
- returned: always
- type: str
- sample: 'postgres'
-tablespace:
- description: Tablespace.
- returned: always
- type: str
- sample: 'ssd_tablespace'
-queries:
- description: List of executed queries.
- returned: always
- type: str
- sample: [ 'CREATE TABLE "test_table" (id bigint)' ]
-storage_params:
- description: Storage parameters.
- returned: always
- type: list
- sample: [ "fillfactor=100", "autovacuum_analyze_threshold=1" ]
-'''
-
-try:
- from psycopg2.extras import DictCursor
-except ImportError:
- # psycopg2 is checked by connect_to_db()
- # from ansible.module_utils.postgres
- pass
-
-from ansible.module_utils.basic import AnsibleModule
-from ansible.module_utils.database import pg_quote_identifier
-from ansible.module_utils.postgres import (
- connect_to_db,
- exec_sql,
- get_conn_params,
- postgres_common_argument_spec,
-)
-
-
-# ===========================================
-# PostgreSQL module specific support methods.
-#
-
-class Table(object):
- def __init__(self, name, module, cursor):
- self.name = name
- self.module = module
- self.cursor = cursor
- self.info = {
- 'owner': '',
- 'tblspace': '',
- 'storage_params': [],
- }
- self.exists = False
- self.__exists_in_db()
- self.executed_queries = []
-
- def get_info(self):
- """Getter to refresh and get table info"""
- self.__exists_in_db()
-
- def __exists_in_db(self):
- """Check table exists and refresh info"""
- if "." in self.name:
- schema = self.name.split('.')[-2]
- tblname = self.name.split('.')[-1]
- else:
- schema = 'public'
- tblname = self.name
-
- query = ("SELECT t.tableowner, t.tablespace, c.reloptions "
- "FROM pg_tables AS t "
- "INNER JOIN pg_class AS c ON c.relname = t.tablename "
- "INNER JOIN pg_namespace AS n ON c.relnamespace = n.oid "
- "WHERE t.tablename = %(tblname)s "
- "AND n.nspname = %(schema)s")
- res = exec_sql(self, query, query_params={'tblname': tblname, 'schema': schema},
- add_to_executed=False)
- if res:
- self.exists = True
- self.info = dict(
- owner=res[0][0],
- tblspace=res[0][1] if res[0][1] else '',
- storage_params=res[0][2] if res[0][2] else [],
- )
-
- return True
- else:
- self.exists = False
- return False
-
- def create(self, columns='', params='', tblspace='',
- unlogged=False, owner=''):
- """
- Create table.
- If table exists, check passed args (params, tblspace, owner) and,
- if they're different from current, change them.
- Arguments:
- params - storage params (passed by "WITH (...)" in SQL),
- comma separated.
- tblspace - tablespace.
- owner - table owner.
- unlogged - create unlogged table.
- columns - column string (comma separated).
- """
- name = pg_quote_identifier(self.name, 'table')
-
- changed = False
-
- if self.exists:
- if tblspace == 'pg_default' and self.info['tblspace'] is None:
- pass # Because they have the same meaning
- elif tblspace and self.info['tblspace'] != tblspace:
- self.set_tblspace(tblspace)
- changed = True
-
- if owner and self.info['owner'] != owner:
- self.set_owner(owner)
- changed = True
-
- if params:
- param_list = [p.strip(' ') for p in params.split(',')]
-
- new_param = False
- for p in param_list:
- if p not in self.info['storage_params']:
- new_param = True
-
- if new_param:
- self.set_stor_params(params)
- changed = True
-
- if changed:
- return True
- return False
-
- query = "CREATE"
- if unlogged:
- query += " UNLOGGED TABLE %s" % name
- else:
- query += " TABLE %s" % name
-
- if columns:
- query += " (%s)" % columns
- else:
- query += " ()"
-
- if params:
- query += " WITH (%s)" % params
-
- if tblspace:
- query += " TABLESPACE %s" % pg_quote_identifier(tblspace, 'database')
-
- if exec_sql(self, query, ddl=True):
- changed = True
-
- if owner:
- changed = self.set_owner(owner)
-
- return changed
-
- def create_like(self, src_table, including='', tblspace='',
- unlogged=False, params='', owner=''):
- """
- Create table like another table (with similar DDL).
- Arguments:
- src_table - source table.
- including - corresponds to optional INCLUDING expression
- in CREATE TABLE ... LIKE statement.
- params - storage params (passed by "WITH (...)" in SQL),
- comma separated.
- tblspace - tablespace.
- owner - table owner.
- unlogged - create unlogged table.
- """
- changed = False
-
- name = pg_quote_identifier(self.name, 'table')
-
- query = "CREATE"
- if unlogged:
- query += " UNLOGGED TABLE %s" % name
- else:
- query += " TABLE %s" % name
-
- query += " (LIKE %s" % pg_quote_identifier(src_table, 'table')
-
- if including:
- including = including.split(',')
- for i in including:
- query += " INCLUDING %s" % i
-
- query += ')'
-
- if params:
- query += " WITH (%s)" % params
-
- if tblspace:
- query += " TABLESPACE %s" % pg_quote_identifier(tblspace, 'database')
-
- if exec_sql(self, query, ddl=True):
- changed = True
-
- if owner:
- changed = self.set_owner(owner)
-
- return changed
-
- def truncate(self):
- query = "TRUNCATE TABLE %s" % pg_quote_identifier(self.name, 'table')
- return exec_sql(self, query, ddl=True)
-
- def rename(self, newname):
- query = "ALTER TABLE %s RENAME TO %s" % (pg_quote_identifier(self.name, 'table'),
- pg_quote_identifier(newname, 'table'))
- return exec_sql(self, query, ddl=True)
-
- def set_owner(self, username):
- query = "ALTER TABLE %s OWNER TO %s" % (pg_quote_identifier(self.name, 'table'),
- pg_quote_identifier(username, 'role'))
- return exec_sql(self, query, ddl=True)
-
- def drop(self, cascade=False):
- if not self.exists:
- return False
-
- query = "DROP TABLE %s" % pg_quote_identifier(self.name, 'table')
- if cascade:
- query += " CASCADE"
- return exec_sql(self, query, ddl=True)
-
- def set_tblspace(self, tblspace):
- query = "ALTER TABLE %s SET TABLESPACE %s" % (pg_quote_identifier(self.name, 'table'),
- pg_quote_identifier(tblspace, 'database'))
- return exec_sql(self, query, ddl=True)
-
- def set_stor_params(self, params):
- query = "ALTER TABLE %s SET (%s)" % (pg_quote_identifier(self.name, 'table'), params)
- return exec_sql(self, query, ddl=True)
-
-
-# ===========================================
-# Module execution.
-#
-
-
-def main():
- argument_spec = postgres_common_argument_spec()
- argument_spec.update(
- table=dict(type='str', required=True, aliases=['name']),
- state=dict(type='str', default="present", choices=["absent", "present"]),
- db=dict(type='str', default='', aliases=['login_db']),
- tablespace=dict(type='str'),
- owner=dict(type='str'),
- unlogged=dict(type='bool', default=False),
- like=dict(type='str'),
- including=dict(type='str'),
- rename=dict(type='str'),
- truncate=dict(type='bool', default=False),
- columns=dict(type='list', elements='str'),
- storage_params=dict(type='list', elements='str'),
- session_role=dict(type='str'),
- cascade=dict(type='bool', default=False),
- )
- module = AnsibleModule(
- argument_spec=argument_spec,
- supports_check_mode=True,
- )
-
- table = module.params["table"]
- state = module.params["state"]
- tablespace = module.params["tablespace"]
- owner = module.params["owner"]
- unlogged = module.params["unlogged"]
- like = module.params["like"]
- including = module.params["including"]
- newname = module.params["rename"]
- storage_params = module.params["storage_params"]
- truncate = module.params["truncate"]
- columns = module.params["columns"]
- cascade = module.params["cascade"]
-
- if state == 'present' and cascade:
- module.warn("cascade=true is ignored when state=present")
-
- # Check mutual exclusive parameters:
- if state == 'absent' and (truncate or newname or columns or tablespace or like or storage_params or unlogged or owner or including):
- module.fail_json(msg="%s: state=absent is mutually exclusive with: "
- "truncate, rename, columns, tablespace, "
- "including, like, storage_params, unlogged, owner" % table)
-
- if truncate and (newname or columns or like or unlogged or storage_params or owner or tablespace or including):
- module.fail_json(msg="%s: truncate is mutually exclusive with: "
- "rename, columns, like, unlogged, including, "
- "storage_params, owner, tablespace" % table)
-
- if newname and (columns or like or unlogged or storage_params or owner or tablespace or including):
- module.fail_json(msg="%s: rename is mutually exclusive with: "
- "columns, like, unlogged, including, "
- "storage_params, owner, tablespace" % table)
-
- if like and columns:
- module.fail_json(msg="%s: like and columns params are mutually exclusive" % table)
- if including and not like:
- module.fail_json(msg="%s: including param needs like param specified" % table)
-
- conn_params = get_conn_params(module, module.params)
- db_connection = connect_to_db(module, conn_params, autocommit=False)
- cursor = db_connection.cursor(cursor_factory=DictCursor)
-
- if storage_params:
- storage_params = ','.join(storage_params)
-
- if columns:
- columns = ','.join(columns)
-
- ##############
- # Do main job:
- table_obj = Table(table, module, cursor)
-
- # Set default returned values:
- changed = False
- kw = {}
- kw['table'] = table
- kw['state'] = ''
- if table_obj.exists:
- kw = dict(
- table=table,
- state='present',
- owner=table_obj.info['owner'],
- tablespace=table_obj.info['tblspace'],
- storage_params=table_obj.info['storage_params'],
- )
-
- if state == 'absent':
- changed = table_obj.drop(cascade=cascade)
-
- elif truncate:
- changed = table_obj.truncate()
-
- elif newname:
- changed = table_obj.rename(newname)
- q = table_obj.executed_queries
- table_obj = Table(newname, module, cursor)
- table_obj.executed_queries = q
-
- elif state == 'present' and not like:
- changed = table_obj.create(columns, storage_params,
- tablespace, unlogged, owner)
-
- elif state == 'present' and like:
- changed = table_obj.create_like(like, including, tablespace,
- unlogged, storage_params)
-
- if changed:
- if module.check_mode:
- db_connection.rollback()
- else:
- db_connection.commit()
-
- # Refresh table info for RETURN.
- # Note, if table has been renamed, it gets info by newname:
- table_obj.get_info()
- db_connection.commit()
- if table_obj.exists:
- kw = dict(
- table=table,
- state='present',
- owner=table_obj.info['owner'],
- tablespace=table_obj.info['tblspace'],
- storage_params=table_obj.info['storage_params'],
- )
- else:
- # We just change the table state here
- # to keep other information about the dropped table:
- kw['state'] = 'absent'
-
- kw['queries'] = table_obj.executed_queries
- kw['changed'] = changed
- db_connection.close()
- module.exit_json(**kw)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/postgresql_user.py b/test/support/integration/plugins/modules/postgresql_user.py
deleted file mode 100644
index 10afd0a0..00000000
--- a/test/support/integration/plugins/modules/postgresql_user.py
+++ /dev/null
@@ -1,927 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# Copyright: Ansible Project
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-ANSIBLE_METADATA = {
- 'metadata_version': '1.1',
- 'status': ['stableinterface'],
- 'supported_by': 'community'
-}
-
-DOCUMENTATION = r'''
----
-module: postgresql_user
-short_description: Add or remove a user (role) from a PostgreSQL server instance
-description:
-- Adds or removes a user (role) from a PostgreSQL server instance
- ("cluster" in PostgreSQL terminology) and, optionally,
- grants the user access to an existing database or tables.
-- A user is a role with login privilege.
-- The fundamental function of the module is to create, or delete, users from
- a PostgreSQL instances. Privilege assignment, or removal, is an optional
- step, which works on one database at a time. This allows for the module to
- be called several times in the same module to modify the permissions on
- different databases, or to grant permissions to already existing users.
-- A user cannot be removed until all the privileges have been stripped from
- the user. In such situation, if the module tries to remove the user it
- will fail. To avoid this from happening the fail_on_user option signals
- the module to try to remove the user, but if not possible keep going; the
- module will report if changes happened and separately if the user was
- removed or not.
-version_added: '0.6'
-options:
- name:
- description:
- - Name of the user (role) to add or remove.
- type: str
- required: true
- aliases:
- - user
- password:
- description:
- - Set the user's password, before 1.4 this was required.
- - Password can be passed unhashed or hashed (MD5-hashed).
- - Unhashed password will automatically be hashed when saved into the
- database if C(encrypted) parameter is set, otherwise it will be save in
- plain text format.
- - When passing a hashed password it must be generated with the format
- C('str["md5"] + md5[ password + username ]'), resulting in a total of
- 35 characters. An easy way to do this is C(echo "md5$(echo -n
- 'verysecretpasswordJOE' | md5sum | awk '{print $1}')").
- - Note that if the provided password string is already in MD5-hashed
- format, then it is used as-is, regardless of C(encrypted) parameter.
- type: str
- db:
- description:
- - Name of database to connect to and where user's permissions will be granted.
- type: str
- aliases:
- - login_db
- fail_on_user:
- description:
- - If C(yes), fail when user (role) can't be removed. Otherwise just log and continue.
- default: 'yes'
- type: bool
- aliases:
- - fail_on_role
- priv:
- description:
- - "Slash-separated PostgreSQL privileges string: C(priv1/priv2), where
- privileges can be defined for database ( allowed options - 'CREATE',
- 'CONNECT', 'TEMPORARY', 'TEMP', 'ALL'. For example C(CONNECT) ) or
- for table ( allowed options - 'SELECT', 'INSERT', 'UPDATE', 'DELETE',
- 'TRUNCATE', 'REFERENCES', 'TRIGGER', 'ALL'. For example
- C(table:SELECT) ). Mixed example of this string:
- C(CONNECT/CREATE/table1:SELECT/table2:INSERT)."
- type: str
- role_attr_flags:
- description:
- - "PostgreSQL user attributes string in the format: CREATEDB,CREATEROLE,SUPERUSER."
- - Note that '[NO]CREATEUSER' is deprecated.
- - To create a simple role for using it like a group, use C(NOLOGIN) flag.
- type: str
- choices: [ '[NO]SUPERUSER', '[NO]CREATEROLE', '[NO]CREATEDB',
- '[NO]INHERIT', '[NO]LOGIN', '[NO]REPLICATION', '[NO]BYPASSRLS' ]
- session_role:
- version_added: '2.8'
- description:
- - Switch to session_role after connecting.
- - The specified session_role must be a role that the current login_user is a member of.
- - Permissions checking for SQL commands is carried out as though the session_role were the one that had logged in originally.
- type: str
- state:
- description:
- - The user (role) state.
- type: str
- default: present
- choices: [ absent, present ]
- encrypted:
- description:
- - Whether the password is stored hashed in the database.
- - Passwords can be passed already hashed or unhashed, and postgresql
- ensures the stored password is hashed when C(encrypted) is set.
- - "Note: Postgresql 10 and newer doesn't support unhashed passwords."
- - Previous to Ansible 2.6, this was C(no) by default.
- default: 'yes'
- type: bool
- version_added: '1.4'
- expires:
- description:
- - The date at which the user's password is to expire.
- - If set to C('infinity'), user's password never expire.
- - Note that this value should be a valid SQL date and time type.
- type: str
- version_added: '1.4'
- no_password_changes:
- description:
- - If C(yes), don't inspect database for password changes. Effective when
- C(pg_authid) is not accessible (such as AWS RDS). Otherwise, make
- password changes as necessary.
- default: 'no'
- type: bool
- version_added: '2.0'
- conn_limit:
- description:
- - Specifies the user (role) connection limit.
- type: int
- version_added: '2.4'
- ssl_mode:
- description:
- - Determines whether or with what priority a secure SSL TCP/IP connection will be negotiated with the server.
- - See https://www.postgresql.org/docs/current/static/libpq-ssl.html for more information on the modes.
- - Default of C(prefer) matches libpq default.
- type: str
- default: prefer
- choices: [ allow, disable, prefer, require, verify-ca, verify-full ]
- version_added: '2.3'
- ca_cert:
- description:
- - Specifies the name of a file containing SSL certificate authority (CA) certificate(s).
- - If the file exists, the server's certificate will be verified to be signed by one of these authorities.
- type: str
- aliases: [ ssl_rootcert ]
- version_added: '2.3'
- groups:
- description:
- - The list of groups (roles) that need to be granted to the user.
- type: list
- elements: str
- version_added: '2.9'
- comment:
- description:
- - Add a comment on the user (equal to the COMMENT ON ROLE statement result).
- type: str
- version_added: '2.10'
-notes:
-- The module creates a user (role) with login privilege by default.
- Use NOLOGIN role_attr_flags to change this behaviour.
-- If you specify PUBLIC as the user (role), then the privilege changes will apply to all users (roles).
- You may not specify password or role_attr_flags when the PUBLIC user is specified.
-seealso:
-- module: postgresql_privs
-- module: postgresql_membership
-- module: postgresql_owner
-- name: PostgreSQL database roles
- description: Complete reference of the PostgreSQL database roles documentation.
- link: https://www.postgresql.org/docs/current/user-manag.html
-author:
-- Ansible Core Team
-extends_documentation_fragment: postgres
-'''
-
-EXAMPLES = r'''
-- name: Connect to acme database, create django user, and grant access to database and products table
- postgresql_user:
- db: acme
- name: django
- password: ceec4eif7ya
- priv: "CONNECT/products:ALL"
- expires: "Jan 31 2020"
-
-- name: Add a comment on django user
- postgresql_user:
- db: acme
- name: django
- comment: This is a test user
-
-# Connect to default database, create rails user, set its password (MD5-hashed),
-# and grant privilege to create other databases and demote rails from super user status if user exists
-- name: Create rails user, set MD5-hashed password, grant privs
- postgresql_user:
- name: rails
- password: md59543f1d82624df2b31672ec0f7050460
- role_attr_flags: CREATEDB,NOSUPERUSER
-
-- name: Connect to acme database and remove test user privileges from there
- postgresql_user:
- db: acme
- name: test
- priv: "ALL/products:ALL"
- state: absent
- fail_on_user: no
-
-- name: Connect to test database, remove test user from cluster
- postgresql_user:
- db: test
- name: test
- priv: ALL
- state: absent
-
-- name: Connect to acme database and set user's password with no expire date
- postgresql_user:
- db: acme
- name: django
- password: mysupersecretword
- priv: "CONNECT/products:ALL"
- expires: infinity
-
-# Example privileges string format
-# INSERT,UPDATE/table:SELECT/anothertable:ALL
-
-- name: Connect to test database and remove an existing user's password
- postgresql_user:
- db: test
- user: test
- password: ""
-
-- name: Create user test and grant group user_ro and user_rw to it
- postgresql_user:
- name: test
- groups:
- - user_ro
- - user_rw
-'''
-
-RETURN = r'''
-queries:
- description: List of executed queries.
- returned: always
- type: list
- sample: ['CREATE USER "alice"', 'GRANT CONNECT ON DATABASE "acme" TO "alice"']
- version_added: '2.8'
-'''
-
-import itertools
-import re
-import traceback
-from hashlib import md5
-
-try:
- import psycopg2
- from psycopg2.extras import DictCursor
-except ImportError:
- # psycopg2 is checked by connect_to_db()
- # from ansible.module_utils.postgres
- pass
-
-from ansible.module_utils.basic import AnsibleModule
-from ansible.module_utils.database import pg_quote_identifier, SQLParseError
-from ansible.module_utils.postgres import (
- connect_to_db,
- get_conn_params,
- PgMembership,
- postgres_common_argument_spec,
-)
-from ansible.module_utils._text import to_bytes, to_native
-from ansible.module_utils.six import iteritems
-
-
-FLAGS = ('SUPERUSER', 'CREATEROLE', 'CREATEDB', 'INHERIT', 'LOGIN', 'REPLICATION')
-FLAGS_BY_VERSION = {'BYPASSRLS': 90500}
-
-VALID_PRIVS = dict(table=frozenset(('SELECT', 'INSERT', 'UPDATE', 'DELETE', 'TRUNCATE', 'REFERENCES', 'TRIGGER', 'ALL')),
- database=frozenset(
- ('CREATE', 'CONNECT', 'TEMPORARY', 'TEMP', 'ALL')),
- )
-
-# map to cope with idiosyncracies of SUPERUSER and LOGIN
-PRIV_TO_AUTHID_COLUMN = dict(SUPERUSER='rolsuper', CREATEROLE='rolcreaterole',
- CREATEDB='rolcreatedb', INHERIT='rolinherit', LOGIN='rolcanlogin',
- REPLICATION='rolreplication', BYPASSRLS='rolbypassrls')
-
-executed_queries = []
-
-
-class InvalidFlagsError(Exception):
- pass
-
-
-class InvalidPrivsError(Exception):
- pass
-
-# ===========================================
-# PostgreSQL module specific support methods.
-#
-
-
-def user_exists(cursor, user):
- # The PUBLIC user is a special case that is always there
- if user == 'PUBLIC':
- return True
- query = "SELECT rolname FROM pg_roles WHERE rolname=%(user)s"
- cursor.execute(query, {'user': user})
- return cursor.rowcount > 0
-
-
-def user_add(cursor, user, password, role_attr_flags, encrypted, expires, conn_limit):
- """Create a new database user (role)."""
- # Note: role_attr_flags escaped by parse_role_attrs and encrypted is a
- # literal
- query_password_data = dict(password=password, expires=expires)
- query = ['CREATE USER "%(user)s"' %
- {"user": user}]
- if password is not None and password != '':
- query.append("WITH %(crypt)s" % {"crypt": encrypted})
- query.append("PASSWORD %(password)s")
- if expires is not None:
- query.append("VALID UNTIL %(expires)s")
- if conn_limit is not None:
- query.append("CONNECTION LIMIT %(conn_limit)s" % {"conn_limit": conn_limit})
- query.append(role_attr_flags)
- query = ' '.join(query)
- executed_queries.append(query)
- cursor.execute(query, query_password_data)
- return True
-
-
-def user_should_we_change_password(current_role_attrs, user, password, encrypted):
- """Check if we should change the user's password.
-
- Compare the proposed password with the existing one, comparing
- hashes if encrypted. If we can't access it assume yes.
- """
-
- if current_role_attrs is None:
- # on some databases, E.g. AWS RDS instances, there is no access to
- # the pg_authid relation to check the pre-existing password, so we
- # just assume password is different
- return True
-
- # Do we actually need to do anything?
- pwchanging = False
- if password is not None:
- # Empty password means that the role shouldn't have a password, which
- # means we need to check if the current password is None.
- if password == '':
- if current_role_attrs['rolpassword'] is not None:
- pwchanging = True
- # 32: MD5 hashes are represented as a sequence of 32 hexadecimal digits
- # 3: The size of the 'md5' prefix
- # When the provided password looks like a MD5-hash, value of
- # 'encrypted' is ignored.
- elif (password.startswith('md5') and len(password) == 32 + 3) or encrypted == 'UNENCRYPTED':
- if password != current_role_attrs['rolpassword']:
- pwchanging = True
- elif encrypted == 'ENCRYPTED':
- hashed_password = 'md5{0}'.format(md5(to_bytes(password) + to_bytes(user)).hexdigest())
- if hashed_password != current_role_attrs['rolpassword']:
- pwchanging = True
-
- return pwchanging
-
-
-def user_alter(db_connection, module, user, password, role_attr_flags, encrypted, expires, no_password_changes, conn_limit):
- """Change user password and/or attributes. Return True if changed, False otherwise."""
- changed = False
-
- cursor = db_connection.cursor(cursor_factory=DictCursor)
- # Note: role_attr_flags escaped by parse_role_attrs and encrypted is a
- # literal
- if user == 'PUBLIC':
- if password is not None:
- module.fail_json(msg="cannot change the password for PUBLIC user")
- elif role_attr_flags != '':
- module.fail_json(msg="cannot change the role_attr_flags for PUBLIC user")
- else:
- return False
-
- # Handle passwords.
- if not no_password_changes and (password is not None or role_attr_flags != '' or expires is not None or conn_limit is not None):
- # Select password and all flag-like columns in order to verify changes.
- try:
- select = "SELECT * FROM pg_authid where rolname=%(user)s"
- cursor.execute(select, {"user": user})
- # Grab current role attributes.
- current_role_attrs = cursor.fetchone()
- except psycopg2.ProgrammingError:
- current_role_attrs = None
- db_connection.rollback()
-
- pwchanging = user_should_we_change_password(current_role_attrs, user, password, encrypted)
-
- if current_role_attrs is None:
- try:
- # AWS RDS instances does not allow user to access pg_authid
- # so try to get current_role_attrs from pg_roles tables
- select = "SELECT * FROM pg_roles where rolname=%(user)s"
- cursor.execute(select, {"user": user})
- # Grab current role attributes from pg_roles
- current_role_attrs = cursor.fetchone()
- except psycopg2.ProgrammingError as e:
- db_connection.rollback()
- module.fail_json(msg="Failed to get role details for current user %s: %s" % (user, e))
-
- role_attr_flags_changing = False
- if role_attr_flags:
- role_attr_flags_dict = {}
- for r in role_attr_flags.split(' '):
- if r.startswith('NO'):
- role_attr_flags_dict[r.replace('NO', '', 1)] = False
- else:
- role_attr_flags_dict[r] = True
-
- for role_attr_name, role_attr_value in role_attr_flags_dict.items():
- if current_role_attrs[PRIV_TO_AUTHID_COLUMN[role_attr_name]] != role_attr_value:
- role_attr_flags_changing = True
-
- if expires is not None:
- cursor.execute("SELECT %s::timestamptz;", (expires,))
- expires_with_tz = cursor.fetchone()[0]
- expires_changing = expires_with_tz != current_role_attrs.get('rolvaliduntil')
- else:
- expires_changing = False
-
- conn_limit_changing = (conn_limit is not None and conn_limit != current_role_attrs['rolconnlimit'])
-
- if not pwchanging and not role_attr_flags_changing and not expires_changing and not conn_limit_changing:
- return False
-
- alter = ['ALTER USER "%(user)s"' % {"user": user}]
- if pwchanging:
- if password != '':
- alter.append("WITH %(crypt)s" % {"crypt": encrypted})
- alter.append("PASSWORD %(password)s")
- else:
- alter.append("WITH PASSWORD NULL")
- alter.append(role_attr_flags)
- elif role_attr_flags:
- alter.append('WITH %s' % role_attr_flags)
- if expires is not None:
- alter.append("VALID UNTIL %(expires)s")
- if conn_limit is not None:
- alter.append("CONNECTION LIMIT %(conn_limit)s" % {"conn_limit": conn_limit})
-
- query_password_data = dict(password=password, expires=expires)
- try:
- cursor.execute(' '.join(alter), query_password_data)
- changed = True
- except psycopg2.InternalError as e:
- if e.pgcode == '25006':
- # Handle errors due to read-only transactions indicated by pgcode 25006
- # ERROR: cannot execute ALTER ROLE in a read-only transaction
- changed = False
- module.fail_json(msg=e.pgerror, exception=traceback.format_exc())
- return changed
- else:
- raise psycopg2.InternalError(e)
- except psycopg2.NotSupportedError as e:
- module.fail_json(msg=e.pgerror, exception=traceback.format_exc())
-
- elif no_password_changes and role_attr_flags != '':
- # Grab role information from pg_roles instead of pg_authid
- select = "SELECT * FROM pg_roles where rolname=%(user)s"
- cursor.execute(select, {"user": user})
- # Grab current role attributes.
- current_role_attrs = cursor.fetchone()
-
- role_attr_flags_changing = False
-
- if role_attr_flags:
- role_attr_flags_dict = {}
- for r in role_attr_flags.split(' '):
- if r.startswith('NO'):
- role_attr_flags_dict[r.replace('NO', '', 1)] = False
- else:
- role_attr_flags_dict[r] = True
-
- for role_attr_name, role_attr_value in role_attr_flags_dict.items():
- if current_role_attrs[PRIV_TO_AUTHID_COLUMN[role_attr_name]] != role_attr_value:
- role_attr_flags_changing = True
-
- if not role_attr_flags_changing:
- return False
-
- alter = ['ALTER USER "%(user)s"' %
- {"user": user}]
- if role_attr_flags:
- alter.append('WITH %s' % role_attr_flags)
-
- try:
- cursor.execute(' '.join(alter))
- except psycopg2.InternalError as e:
- if e.pgcode == '25006':
- # Handle errors due to read-only transactions indicated by pgcode 25006
- # ERROR: cannot execute ALTER ROLE in a read-only transaction
- changed = False
- module.fail_json(msg=e.pgerror, exception=traceback.format_exc())
- return changed
- else:
- raise psycopg2.InternalError(e)
-
- # Grab new role attributes.
- cursor.execute(select, {"user": user})
- new_role_attrs = cursor.fetchone()
-
- # Detect any differences between current_ and new_role_attrs.
- changed = current_role_attrs != new_role_attrs
-
- return changed
-
-
-def user_delete(cursor, user):
- """Try to remove a user. Returns True if successful otherwise False"""
- cursor.execute("SAVEPOINT ansible_pgsql_user_delete")
- try:
- query = 'DROP USER "%s"' % user
- executed_queries.append(query)
- cursor.execute(query)
- except Exception:
- cursor.execute("ROLLBACK TO SAVEPOINT ansible_pgsql_user_delete")
- cursor.execute("RELEASE SAVEPOINT ansible_pgsql_user_delete")
- return False
-
- cursor.execute("RELEASE SAVEPOINT ansible_pgsql_user_delete")
- return True
-
-
-def has_table_privileges(cursor, user, table, privs):
- """
- Return the difference between the privileges that a user already has and
- the privileges that they desire to have.
-
- :returns: tuple of:
- * privileges that they have and were requested
- * privileges they currently hold but were not requested
- * privileges requested that they do not hold
- """
- cur_privs = get_table_privileges(cursor, user, table)
- have_currently = cur_privs.intersection(privs)
- other_current = cur_privs.difference(privs)
- desired = privs.difference(cur_privs)
- return (have_currently, other_current, desired)
-
-
-def get_table_privileges(cursor, user, table):
- if '.' in table:
- schema, table = table.split('.', 1)
- else:
- schema = 'public'
- query = ("SELECT privilege_type FROM information_schema.role_table_grants "
- "WHERE grantee=%(user)s AND table_name=%(table)s AND table_schema=%(schema)s")
- cursor.execute(query, {'user': user, 'table': table, 'schema': schema})
- return frozenset([x[0] for x in cursor.fetchall()])
-
-
-def grant_table_privileges(cursor, user, table, privs):
- # Note: priv escaped by parse_privs
- privs = ', '.join(privs)
- query = 'GRANT %s ON TABLE %s TO "%s"' % (
- privs, pg_quote_identifier(table, 'table'), user)
- executed_queries.append(query)
- cursor.execute(query)
-
-
-def revoke_table_privileges(cursor, user, table, privs):
- # Note: priv escaped by parse_privs
- privs = ', '.join(privs)
- query = 'REVOKE %s ON TABLE %s FROM "%s"' % (
- privs, pg_quote_identifier(table, 'table'), user)
- executed_queries.append(query)
- cursor.execute(query)
-
-
-def get_database_privileges(cursor, user, db):
- priv_map = {
- 'C': 'CREATE',
- 'T': 'TEMPORARY',
- 'c': 'CONNECT',
- }
- query = 'SELECT datacl FROM pg_database WHERE datname = %s'
- cursor.execute(query, (db,))
- datacl = cursor.fetchone()[0]
- if datacl is None:
- return set()
- r = re.search(r'%s\\?"?=(C?T?c?)/[^,]+,?' % user, datacl)
- if r is None:
- return set()
- o = set()
- for v in r.group(1):
- o.add(priv_map[v])
- return normalize_privileges(o, 'database')
-
-
-def has_database_privileges(cursor, user, db, privs):
- """
- Return the difference between the privileges that a user already has and
- the privileges that they desire to have.
-
- :returns: tuple of:
- * privileges that they have and were requested
- * privileges they currently hold but were not requested
- * privileges requested that they do not hold
- """
- cur_privs = get_database_privileges(cursor, user, db)
- have_currently = cur_privs.intersection(privs)
- other_current = cur_privs.difference(privs)
- desired = privs.difference(cur_privs)
- return (have_currently, other_current, desired)
-
-
-def grant_database_privileges(cursor, user, db, privs):
- # Note: priv escaped by parse_privs
- privs = ', '.join(privs)
- if user == "PUBLIC":
- query = 'GRANT %s ON DATABASE %s TO PUBLIC' % (
- privs, pg_quote_identifier(db, 'database'))
- else:
- query = 'GRANT %s ON DATABASE %s TO "%s"' % (
- privs, pg_quote_identifier(db, 'database'), user)
-
- executed_queries.append(query)
- cursor.execute(query)
-
-
-def revoke_database_privileges(cursor, user, db, privs):
- # Note: priv escaped by parse_privs
- privs = ', '.join(privs)
- if user == "PUBLIC":
- query = 'REVOKE %s ON DATABASE %s FROM PUBLIC' % (
- privs, pg_quote_identifier(db, 'database'))
- else:
- query = 'REVOKE %s ON DATABASE %s FROM "%s"' % (
- privs, pg_quote_identifier(db, 'database'), user)
-
- executed_queries.append(query)
- cursor.execute(query)
-
-
-def revoke_privileges(cursor, user, privs):
- if privs is None:
- return False
-
- revoke_funcs = dict(table=revoke_table_privileges,
- database=revoke_database_privileges)
- check_funcs = dict(table=has_table_privileges,
- database=has_database_privileges)
-
- changed = False
- for type_ in privs:
- for name, privileges in iteritems(privs[type_]):
- # Check that any of the privileges requested to be removed are
- # currently granted to the user
- differences = check_funcs[type_](cursor, user, name, privileges)
- if differences[0]:
- revoke_funcs[type_](cursor, user, name, privileges)
- changed = True
- return changed
-
-
-def grant_privileges(cursor, user, privs):
- if privs is None:
- return False
-
- grant_funcs = dict(table=grant_table_privileges,
- database=grant_database_privileges)
- check_funcs = dict(table=has_table_privileges,
- database=has_database_privileges)
-
- changed = False
- for type_ in privs:
- for name, privileges in iteritems(privs[type_]):
- # Check that any of the privileges requested for the user are
- # currently missing
- differences = check_funcs[type_](cursor, user, name, privileges)
- if differences[2]:
- grant_funcs[type_](cursor, user, name, privileges)
- changed = True
- return changed
-
-
-def parse_role_attrs(cursor, role_attr_flags):
- """
- Parse role attributes string for user creation.
- Format:
-
- attributes[,attributes,...]
-
- Where:
-
- attributes := CREATEDB,CREATEROLE,NOSUPERUSER,...
- [ "[NO]SUPERUSER","[NO]CREATEROLE", "[NO]CREATEDB",
- "[NO]INHERIT", "[NO]LOGIN", "[NO]REPLICATION",
- "[NO]BYPASSRLS" ]
-
- Note: "[NO]BYPASSRLS" role attribute introduced in 9.5
- Note: "[NO]CREATEUSER" role attribute is deprecated.
-
- """
- flags = frozenset(role.upper() for role in role_attr_flags.split(',') if role)
-
- valid_flags = frozenset(itertools.chain(FLAGS, get_valid_flags_by_version(cursor)))
- valid_flags = frozenset(itertools.chain(valid_flags, ('NO%s' % flag for flag in valid_flags)))
-
- if not flags.issubset(valid_flags):
- raise InvalidFlagsError('Invalid role_attr_flags specified: %s' %
- ' '.join(flags.difference(valid_flags)))
-
- return ' '.join(flags)
-
-
-def normalize_privileges(privs, type_):
- new_privs = set(privs)
- if 'ALL' in new_privs:
- new_privs.update(VALID_PRIVS[type_])
- new_privs.remove('ALL')
- if 'TEMP' in new_privs:
- new_privs.add('TEMPORARY')
- new_privs.remove('TEMP')
-
- return new_privs
-
-
-def parse_privs(privs, db):
- """
- Parse privilege string to determine permissions for database db.
- Format:
-
- privileges[/privileges/...]
-
- Where:
-
- privileges := DATABASE_PRIVILEGES[,DATABASE_PRIVILEGES,...] |
- TABLE_NAME:TABLE_PRIVILEGES[,TABLE_PRIVILEGES,...]
- """
- if privs is None:
- return privs
-
- o_privs = {
- 'database': {},
- 'table': {}
- }
- for token in privs.split('/'):
- if ':' not in token:
- type_ = 'database'
- name = db
- priv_set = frozenset(x.strip().upper()
- for x in token.split(',') if x.strip())
- else:
- type_ = 'table'
- name, privileges = token.split(':', 1)
- priv_set = frozenset(x.strip().upper()
- for x in privileges.split(',') if x.strip())
-
- if not priv_set.issubset(VALID_PRIVS[type_]):
- raise InvalidPrivsError('Invalid privs specified for %s: %s' %
- (type_, ' '.join(priv_set.difference(VALID_PRIVS[type_]))))
-
- priv_set = normalize_privileges(priv_set, type_)
- o_privs[type_][name] = priv_set
-
- return o_privs
-
-
-def get_valid_flags_by_version(cursor):
- """
- Some role attributes were introduced after certain versions. We want to
- compile a list of valid flags against the current Postgres version.
- """
- current_version = cursor.connection.server_version
-
- return [
- flag
- for flag, version_introduced in FLAGS_BY_VERSION.items()
- if current_version >= version_introduced
- ]
-
-
-def get_comment(cursor, user):
- """Get user's comment."""
- query = ("SELECT pg_catalog.shobj_description(r.oid, 'pg_authid') "
- "FROM pg_catalog.pg_roles r "
- "WHERE r.rolname = %(user)s")
- cursor.execute(query, {'user': user})
- return cursor.fetchone()[0]
-
-
-def add_comment(cursor, user, comment):
- """Add comment on user."""
- if comment != get_comment(cursor, user):
- query = 'COMMENT ON ROLE "%s" IS ' % user
- cursor.execute(query + '%(comment)s', {'comment': comment})
- executed_queries.append(cursor.mogrify(query + '%(comment)s', {'comment': comment}))
- return True
- else:
- return False
-
-
-# ===========================================
-# Module execution.
-#
-
-def main():
- argument_spec = postgres_common_argument_spec()
- argument_spec.update(
- user=dict(type='str', required=True, aliases=['name']),
- password=dict(type='str', default=None, no_log=True),
- state=dict(type='str', default='present', choices=['absent', 'present']),
- priv=dict(type='str', default=None),
- db=dict(type='str', default='', aliases=['login_db']),
- fail_on_user=dict(type='bool', default='yes', aliases=['fail_on_role']),
- role_attr_flags=dict(type='str', default=''),
- encrypted=dict(type='bool', default='yes'),
- no_password_changes=dict(type='bool', default='no'),
- expires=dict(type='str', default=None),
- conn_limit=dict(type='int', default=None),
- session_role=dict(type='str'),
- groups=dict(type='list', elements='str'),
- comment=dict(type='str', default=None),
- )
- module = AnsibleModule(
- argument_spec=argument_spec,
- supports_check_mode=True
- )
-
- user = module.params["user"]
- password = module.params["password"]
- state = module.params["state"]
- fail_on_user = module.params["fail_on_user"]
- if module.params['db'] == '' and module.params["priv"] is not None:
- module.fail_json(msg="privileges require a database to be specified")
- privs = parse_privs(module.params["priv"], module.params["db"])
- no_password_changes = module.params["no_password_changes"]
- if module.params["encrypted"]:
- encrypted = "ENCRYPTED"
- else:
- encrypted = "UNENCRYPTED"
- expires = module.params["expires"]
- conn_limit = module.params["conn_limit"]
- role_attr_flags = module.params["role_attr_flags"]
- groups = module.params["groups"]
- if groups:
- groups = [e.strip() for e in groups]
- comment = module.params["comment"]
-
- conn_params = get_conn_params(module, module.params, warn_db_default=False)
- db_connection = connect_to_db(module, conn_params)
- cursor = db_connection.cursor(cursor_factory=DictCursor)
-
- try:
- role_attr_flags = parse_role_attrs(cursor, role_attr_flags)
- except InvalidFlagsError as e:
- module.fail_json(msg=to_native(e), exception=traceback.format_exc())
-
- kw = dict(user=user)
- changed = False
- user_removed = False
-
- if state == "present":
- if user_exists(cursor, user):
- try:
- changed = user_alter(db_connection, module, user, password,
- role_attr_flags, encrypted, expires, no_password_changes, conn_limit)
- except SQLParseError as e:
- module.fail_json(msg=to_native(e), exception=traceback.format_exc())
- else:
- try:
- changed = user_add(cursor, user, password,
- role_attr_flags, encrypted, expires, conn_limit)
- except psycopg2.ProgrammingError as e:
- module.fail_json(msg="Unable to add user with given requirement "
- "due to : %s" % to_native(e),
- exception=traceback.format_exc())
- except SQLParseError as e:
- module.fail_json(msg=to_native(e), exception=traceback.format_exc())
- try:
- changed = grant_privileges(cursor, user, privs) or changed
- except SQLParseError as e:
- module.fail_json(msg=to_native(e), exception=traceback.format_exc())
-
- if groups:
- target_roles = []
- target_roles.append(user)
- pg_membership = PgMembership(module, cursor, groups, target_roles)
- changed = pg_membership.grant() or changed
- executed_queries.extend(pg_membership.executed_queries)
-
- if comment is not None:
- try:
- changed = add_comment(cursor, user, comment) or changed
- except Exception as e:
- module.fail_json(msg='Unable to add comment on role: %s' % to_native(e),
- exception=traceback.format_exc())
-
- else:
- if user_exists(cursor, user):
- if module.check_mode:
- changed = True
- kw['user_removed'] = True
- else:
- try:
- changed = revoke_privileges(cursor, user, privs)
- user_removed = user_delete(cursor, user)
- except SQLParseError as e:
- module.fail_json(msg=to_native(e), exception=traceback.format_exc())
- changed = changed or user_removed
- if fail_on_user and not user_removed:
- msg = "Unable to remove user"
- module.fail_json(msg=msg)
- kw['user_removed'] = user_removed
-
- if changed:
- if module.check_mode:
- db_connection.rollback()
- else:
- db_connection.commit()
-
- kw['changed'] = changed
- kw['queries'] = executed_queries
- module.exit_json(**kw)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/rabbitmq_plugin.py b/test/support/integration/plugins/modules/rabbitmq_plugin.py
deleted file mode 100644
index 301bbfe2..00000000
--- a/test/support/integration/plugins/modules/rabbitmq_plugin.py
+++ /dev/null
@@ -1,180 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# Copyright: (c) 2013, Chatham Financial <oss@chathamfinancial.com>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-
-ANSIBLE_METADATA = {
- 'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'
-}
-
-
-DOCUMENTATION = '''
----
-module: rabbitmq_plugin
-short_description: Manage RabbitMQ plugins
-description:
- - This module can be used to enable or disable RabbitMQ plugins.
-version_added: "1.1"
-author:
- - Chris Hoffman (@chrishoffman)
-options:
- names:
- description:
- - Comma-separated list of plugin names. Also, accepts plugin name.
- required: true
- aliases: [name]
- new_only:
- description:
- - Only enable missing plugins.
- - Does not disable plugins that are not in the names list.
- type: bool
- default: "no"
- state:
- description:
- - Specify if plugins are to be enabled or disabled.
- default: enabled
- choices: [enabled, disabled]
- prefix:
- description:
- - Specify a custom install prefix to a Rabbit.
- version_added: "1.3"
-'''
-
-EXAMPLES = '''
-- name: Enables the rabbitmq_management plugin
- rabbitmq_plugin:
- names: rabbitmq_management
- state: enabled
-
-- name: Enable multiple rabbitmq plugins
- rabbitmq_plugin:
- names: rabbitmq_management,rabbitmq_management_visualiser
- state: enabled
-
-- name: Disable plugin
- rabbitmq_plugin:
- names: rabbitmq_management
- state: disabled
-
-- name: Enable every plugin in list with existing plugins
- rabbitmq_plugin:
- names: rabbitmq_management,rabbitmq_management_visualiser,rabbitmq_shovel,rabbitmq_shovel_management
- state: enabled
- new_only: 'yes'
-'''
-
-RETURN = '''
-enabled:
- description: list of plugins enabled during task run
- returned: always
- type: list
- sample: ["rabbitmq_management"]
-disabled:
- description: list of plugins disabled during task run
- returned: always
- type: list
- sample: ["rabbitmq_management"]
-'''
-
-import os
-from ansible.module_utils.basic import AnsibleModule
-
-
-class RabbitMqPlugins(object):
-
- def __init__(self, module):
- self.module = module
- bin_path = ''
- if module.params['prefix']:
- if os.path.isdir(os.path.join(module.params['prefix'], 'bin')):
- bin_path = os.path.join(module.params['prefix'], 'bin')
- elif os.path.isdir(os.path.join(module.params['prefix'], 'sbin')):
- bin_path = os.path.join(module.params['prefix'], 'sbin')
- else:
- # No such path exists.
- module.fail_json(msg="No binary folder in prefix %s" % module.params['prefix'])
-
- self._rabbitmq_plugins = os.path.join(bin_path, "rabbitmq-plugins")
- else:
- self._rabbitmq_plugins = module.get_bin_path('rabbitmq-plugins', True)
-
- def _exec(self, args, run_in_check_mode=False):
- if not self.module.check_mode or (self.module.check_mode and run_in_check_mode):
- cmd = [self._rabbitmq_plugins]
- rc, out, err = self.module.run_command(cmd + args, check_rc=True)
- return out.splitlines()
- return list()
-
- def get_all(self):
- list_output = self._exec(['list', '-E', '-m'], True)
- plugins = []
- for plugin in list_output:
- if not plugin:
- break
- plugins.append(plugin)
-
- return plugins
-
- def enable(self, name):
- self._exec(['enable', name])
-
- def disable(self, name):
- self._exec(['disable', name])
-
-
-def main():
- arg_spec = dict(
- names=dict(required=True, aliases=['name']),
- new_only=dict(default='no', type='bool'),
- state=dict(default='enabled', choices=['enabled', 'disabled']),
- prefix=dict(required=False, default=None)
- )
- module = AnsibleModule(
- argument_spec=arg_spec,
- supports_check_mode=True
- )
-
- result = dict()
- names = module.params['names'].split(',')
- new_only = module.params['new_only']
- state = module.params['state']
-
- rabbitmq_plugins = RabbitMqPlugins(module)
- enabled_plugins = rabbitmq_plugins.get_all()
-
- enabled = []
- disabled = []
- if state == 'enabled':
- if not new_only:
- for plugin in enabled_plugins:
- if " " in plugin:
- continue
- if plugin not in names:
- rabbitmq_plugins.disable(plugin)
- disabled.append(plugin)
-
- for name in names:
- if name not in enabled_plugins:
- rabbitmq_plugins.enable(name)
- enabled.append(name)
- else:
- for plugin in enabled_plugins:
- if plugin in names:
- rabbitmq_plugins.disable(plugin)
- disabled.append(plugin)
-
- result['changed'] = len(enabled) > 0 or len(disabled) > 0
- result['enabled'] = enabled
- result['disabled'] = disabled
- module.exit_json(**result)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/rabbitmq_queue.py b/test/support/integration/plugins/modules/rabbitmq_queue.py
deleted file mode 100644
index 567ec813..00000000
--- a/test/support/integration/plugins/modules/rabbitmq_queue.py
+++ /dev/null
@@ -1,257 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# Copyright: (c) 2015, Manuel Sousa <manuel.sousa@gmail.com>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
-
-
-DOCUMENTATION = '''
----
-module: rabbitmq_queue
-author: Manuel Sousa (@manuel-sousa)
-version_added: "2.0"
-
-short_description: Manage rabbitMQ queues
-description:
- - This module uses rabbitMQ Rest API to create/delete queues
-requirements: [ "requests >= 1.0.0" ]
-options:
- name:
- description:
- - Name of the queue
- required: true
- state:
- description:
- - Whether the queue should be present or absent
- choices: [ "present", "absent" ]
- default: present
- durable:
- description:
- - whether queue is durable or not
- type: bool
- default: 'yes'
- auto_delete:
- description:
- - if the queue should delete itself after all queues/queues unbound from it
- type: bool
- default: 'no'
- message_ttl:
- description:
- - How long a message can live in queue before it is discarded (milliseconds)
- default: forever
- auto_expires:
- description:
- - How long a queue can be unused before it is automatically deleted (milliseconds)
- default: forever
- max_length:
- description:
- - How many messages can the queue contain before it starts rejecting
- default: no limit
- dead_letter_exchange:
- description:
- - Optional name of an exchange to which messages will be republished if they
- - are rejected or expire
- dead_letter_routing_key:
- description:
- - Optional replacement routing key to use when a message is dead-lettered.
- - Original routing key will be used if unset
- max_priority:
- description:
- - Maximum number of priority levels for the queue to support.
- - If not set, the queue will not support message priorities.
- - Larger numbers indicate higher priority.
- version_added: "2.4"
- arguments:
- description:
- - extra arguments for queue. If defined this argument is a key/value dictionary
- default: {}
-extends_documentation_fragment:
- - rabbitmq
-'''
-
-EXAMPLES = '''
-# Create a queue
-- rabbitmq_queue:
- name: myQueue
-
-# Create a queue on remote host
-- rabbitmq_queue:
- name: myRemoteQueue
- login_user: user
- login_password: secret
- login_host: remote.example.org
-'''
-
-import json
-import traceback
-
-REQUESTS_IMP_ERR = None
-try:
- import requests
- HAS_REQUESTS = True
-except ImportError:
- REQUESTS_IMP_ERR = traceback.format_exc()
- HAS_REQUESTS = False
-
-from ansible.module_utils.basic import AnsibleModule, missing_required_lib
-from ansible.module_utils.six.moves.urllib import parse as urllib_parse
-from ansible.module_utils.rabbitmq import rabbitmq_argument_spec
-
-
-def main():
-
- argument_spec = rabbitmq_argument_spec()
- argument_spec.update(
- dict(
- state=dict(default='present', choices=['present', 'absent'], type='str'),
- name=dict(required=True, type='str'),
- durable=dict(default=True, type='bool'),
- auto_delete=dict(default=False, type='bool'),
- message_ttl=dict(default=None, type='int'),
- auto_expires=dict(default=None, type='int'),
- max_length=dict(default=None, type='int'),
- dead_letter_exchange=dict(default=None, type='str'),
- dead_letter_routing_key=dict(default=None, type='str'),
- arguments=dict(default=dict(), type='dict'),
- max_priority=dict(default=None, type='int')
- )
- )
- module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
-
- url = "%s://%s:%s/api/queues/%s/%s" % (
- module.params['login_protocol'],
- module.params['login_host'],
- module.params['login_port'],
- urllib_parse.quote(module.params['vhost'], ''),
- module.params['name']
- )
-
- if not HAS_REQUESTS:
- module.fail_json(msg=missing_required_lib("requests"), exception=REQUESTS_IMP_ERR)
-
- result = dict(changed=False, name=module.params['name'])
-
- # Check if queue already exists
- r = requests.get(url, auth=(module.params['login_user'], module.params['login_password']),
- verify=module.params['ca_cert'], cert=(module.params['client_cert'], module.params['client_key']))
-
- if r.status_code == 200:
- queue_exists = True
- response = r.json()
- elif r.status_code == 404:
- queue_exists = False
- response = r.text
- else:
- module.fail_json(
- msg="Invalid response from RESTAPI when trying to check if queue exists",
- details=r.text
- )
-
- if module.params['state'] == 'present':
- change_required = not queue_exists
- else:
- change_required = queue_exists
-
- # Check if attributes change on existing queue
- if not change_required and r.status_code == 200 and module.params['state'] == 'present':
- if not (
- response['durable'] == module.params['durable'] and
- response['auto_delete'] == module.params['auto_delete'] and
- (
- ('x-message-ttl' in response['arguments'] and response['arguments']['x-message-ttl'] == module.params['message_ttl']) or
- ('x-message-ttl' not in response['arguments'] and module.params['message_ttl'] is None)
- ) and
- (
- ('x-expires' in response['arguments'] and response['arguments']['x-expires'] == module.params['auto_expires']) or
- ('x-expires' not in response['arguments'] and module.params['auto_expires'] is None)
- ) and
- (
- ('x-max-length' in response['arguments'] and response['arguments']['x-max-length'] == module.params['max_length']) or
- ('x-max-length' not in response['arguments'] and module.params['max_length'] is None)
- ) and
- (
- ('x-dead-letter-exchange' in response['arguments'] and
- response['arguments']['x-dead-letter-exchange'] == module.params['dead_letter_exchange']) or
- ('x-dead-letter-exchange' not in response['arguments'] and module.params['dead_letter_exchange'] is None)
- ) and
- (
- ('x-dead-letter-routing-key' in response['arguments'] and
- response['arguments']['x-dead-letter-routing-key'] == module.params['dead_letter_routing_key']) or
- ('x-dead-letter-routing-key' not in response['arguments'] and module.params['dead_letter_routing_key'] is None)
- ) and
- (
- ('x-max-priority' in response['arguments'] and
- response['arguments']['x-max-priority'] == module.params['max_priority']) or
- ('x-max-priority' not in response['arguments'] and module.params['max_priority'] is None)
- )
- ):
- module.fail_json(
- msg="RabbitMQ RESTAPI doesn't support attribute changes for existing queues",
- )
-
- # Copy parameters to arguments as used by RabbitMQ
- for k, v in {
- 'message_ttl': 'x-message-ttl',
- 'auto_expires': 'x-expires',
- 'max_length': 'x-max-length',
- 'dead_letter_exchange': 'x-dead-letter-exchange',
- 'dead_letter_routing_key': 'x-dead-letter-routing-key',
- 'max_priority': 'x-max-priority'
- }.items():
- if module.params[k] is not None:
- module.params['arguments'][v] = module.params[k]
-
- # Exit if check_mode
- if module.check_mode:
- result['changed'] = change_required
- result['details'] = response
- result['arguments'] = module.params['arguments']
- module.exit_json(**result)
-
- # Do changes
- if change_required:
- if module.params['state'] == 'present':
- r = requests.put(
- url,
- auth=(module.params['login_user'], module.params['login_password']),
- headers={"content-type": "application/json"},
- data=json.dumps({
- "durable": module.params['durable'],
- "auto_delete": module.params['auto_delete'],
- "arguments": module.params['arguments']
- }),
- verify=module.params['ca_cert'],
- cert=(module.params['client_cert'], module.params['client_key'])
- )
- elif module.params['state'] == 'absent':
- r = requests.delete(url, auth=(module.params['login_user'], module.params['login_password']),
- verify=module.params['ca_cert'], cert=(module.params['client_cert'], module.params['client_key']))
-
- # RabbitMQ 3.6.7 changed this response code from 204 to 201
- if r.status_code == 204 or r.status_code == 201:
- result['changed'] = True
- module.exit_json(**result)
- else:
- module.fail_json(
- msg="Error creating queue",
- status=r.status_code,
- details=r.text
- )
-
- else:
- module.exit_json(
- changed=False,
- name=module.params['name']
- )
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/s3_bucket.py b/test/support/integration/plugins/modules/s3_bucket.py
deleted file mode 100644
index f35cf53b..00000000
--- a/test/support/integration/plugins/modules/s3_bucket.py
+++ /dev/null
@@ -1,740 +0,0 @@
-#!/usr/bin/python
-#
-# This is a free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This Ansible library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this library. If not, see <http://www.gnu.org/licenses/>.
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['stableinterface'],
- 'supported_by': 'core'}
-
-
-DOCUMENTATION = '''
----
-module: s3_bucket
-short_description: Manage S3 buckets in AWS, DigitalOcean, Ceph, Walrus, FakeS3 and StorageGRID
-description:
- - Manage S3 buckets in AWS, DigitalOcean, Ceph, Walrus, FakeS3 and StorageGRID
-version_added: "2.0"
-requirements: [ boto3 ]
-author: "Rob White (@wimnat)"
-options:
- force:
- description:
- - When trying to delete a bucket, delete all keys (including versions and delete markers)
- in the bucket first (an s3 bucket must be empty for a successful deletion)
- type: bool
- default: 'no'
- name:
- description:
- - Name of the s3 bucket
- required: true
- type: str
- policy:
- description:
- - The JSON policy as a string.
- type: json
- s3_url:
- description:
- - S3 URL endpoint for usage with DigitalOcean, Ceph, Eucalyptus and fakes3 etc.
- - Assumes AWS if not specified.
- - For Walrus, use FQDN of the endpoint without scheme nor path.
- aliases: [ S3_URL ]
- type: str
- ceph:
- description:
- - Enable API compatibility with Ceph. It takes into account the S3 API subset working
- with Ceph in order to provide the same module behaviour where possible.
- type: bool
- version_added: "2.2"
- requester_pays:
- description:
- - With Requester Pays buckets, the requester instead of the bucket owner pays the cost
- of the request and the data download from the bucket.
- type: bool
- default: False
- state:
- description:
- - Create or remove the s3 bucket
- required: false
- default: present
- choices: [ 'present', 'absent' ]
- type: str
- tags:
- description:
- - tags dict to apply to bucket
- type: dict
- purge_tags:
- description:
- - whether to remove tags that aren't present in the C(tags) parameter
- type: bool
- default: True
- version_added: "2.9"
- versioning:
- description:
- - Whether versioning is enabled or disabled (note that once versioning is enabled, it can only be suspended)
- type: bool
- encryption:
- description:
- - Describes the default server-side encryption to apply to new objects in the bucket.
- In order to remove the server-side encryption, the encryption needs to be set to 'none' explicitly.
- choices: [ 'none', 'AES256', 'aws:kms' ]
- version_added: "2.9"
- type: str
- encryption_key_id:
- description: KMS master key ID to use for the default encryption. This parameter is allowed if encryption is aws:kms. If
- not specified then it will default to the AWS provided KMS key.
- version_added: "2.9"
- type: str
-extends_documentation_fragment:
- - aws
- - ec2
-notes:
- - If C(requestPayment), C(policy), C(tagging) or C(versioning)
- operations/API aren't implemented by the endpoint, module doesn't fail
- if each parameter satisfies the following condition.
- I(requester_pays) is C(False), I(policy), I(tags), and I(versioning) are C(None).
-'''
-
-EXAMPLES = '''
-# Note: These examples do not set authentication details, see the AWS Guide for details.
-
-# Create a simple s3 bucket
-- s3_bucket:
- name: mys3bucket
- state: present
-
-# Create a simple s3 bucket on Ceph Rados Gateway
-- s3_bucket:
- name: mys3bucket
- s3_url: http://your-ceph-rados-gateway-server.xxx
- ceph: true
-
-# Remove an s3 bucket and any keys it contains
-- s3_bucket:
- name: mys3bucket
- state: absent
- force: yes
-
-# Create a bucket, add a policy from a file, enable requester pays, enable versioning and tag
-- s3_bucket:
- name: mys3bucket
- policy: "{{ lookup('file','policy.json') }}"
- requester_pays: yes
- versioning: yes
- tags:
- example: tag1
- another: tag2
-
-# Create a simple DigitalOcean Spaces bucket using their provided regional endpoint
-- s3_bucket:
- name: mydobucket
- s3_url: 'https://nyc3.digitaloceanspaces.com'
-
-# Create a bucket with AES256 encryption
-- s3_bucket:
- name: mys3bucket
- state: present
- encryption: "AES256"
-
-# Create a bucket with aws:kms encryption, KMS key
-- s3_bucket:
- name: mys3bucket
- state: present
- encryption: "aws:kms"
- encryption_key_id: "arn:aws:kms:us-east-1:1234/5678example"
-
-# Create a bucket with aws:kms encryption, default key
-- s3_bucket:
- name: mys3bucket
- state: present
- encryption: "aws:kms"
-'''
-
-import json
-import os
-import time
-
-from ansible.module_utils.six.moves.urllib.parse import urlparse
-from ansible.module_utils.six import string_types
-from ansible.module_utils.basic import to_text
-from ansible.module_utils.aws.core import AnsibleAWSModule, is_boto3_error_code
-from ansible.module_utils.ec2 import compare_policies, ec2_argument_spec, boto3_tag_list_to_ansible_dict, ansible_dict_to_boto3_tag_list
-from ansible.module_utils.ec2 import get_aws_connection_info, boto3_conn, AWSRetry
-
-try:
- from botocore.exceptions import BotoCoreError, ClientError, EndpointConnectionError, WaiterError
-except ImportError:
- pass # handled by AnsibleAWSModule
-
-
-def create_or_update_bucket(s3_client, module, location):
-
- policy = module.params.get("policy")
- name = module.params.get("name")
- requester_pays = module.params.get("requester_pays")
- tags = module.params.get("tags")
- purge_tags = module.params.get("purge_tags")
- versioning = module.params.get("versioning")
- encryption = module.params.get("encryption")
- encryption_key_id = module.params.get("encryption_key_id")
- changed = False
- result = {}
-
- try:
- bucket_is_present = bucket_exists(s3_client, name)
- except EndpointConnectionError as e:
- module.fail_json_aws(e, msg="Invalid endpoint provided: %s" % to_text(e))
- except (BotoCoreError, ClientError) as e:
- module.fail_json_aws(e, msg="Failed to check bucket presence")
-
- if not bucket_is_present:
- try:
- bucket_changed = create_bucket(s3_client, name, location)
- s3_client.get_waiter('bucket_exists').wait(Bucket=name)
- changed = changed or bucket_changed
- except WaiterError as e:
- module.fail_json_aws(e, msg='An error occurred waiting for the bucket to become available')
- except (BotoCoreError, ClientError) as e:
- module.fail_json_aws(e, msg="Failed while creating bucket")
-
- # Versioning
- try:
- versioning_status = get_bucket_versioning(s3_client, name)
- except BotoCoreError as exp:
- module.fail_json_aws(exp, msg="Failed to get bucket versioning")
- except ClientError as exp:
- if exp.response['Error']['Code'] != 'NotImplemented' or versioning is not None:
- module.fail_json_aws(exp, msg="Failed to get bucket versioning")
- else:
- if versioning is not None:
- required_versioning = None
- if versioning and versioning_status.get('Status') != "Enabled":
- required_versioning = 'Enabled'
- elif not versioning and versioning_status.get('Status') == "Enabled":
- required_versioning = 'Suspended'
-
- if required_versioning:
- try:
- put_bucket_versioning(s3_client, name, required_versioning)
- changed = True
- except (BotoCoreError, ClientError) as e:
- module.fail_json_aws(e, msg="Failed to update bucket versioning")
-
- versioning_status = wait_versioning_is_applied(module, s3_client, name, required_versioning)
-
- # This output format is there to ensure compatibility with previous versions of the module
- result['versioning'] = {
- 'Versioning': versioning_status.get('Status', 'Disabled'),
- 'MfaDelete': versioning_status.get('MFADelete', 'Disabled'),
- }
-
- # Requester pays
- try:
- requester_pays_status = get_bucket_request_payment(s3_client, name)
- except BotoCoreError as exp:
- module.fail_json_aws(exp, msg="Failed to get bucket request payment")
- except ClientError as exp:
- if exp.response['Error']['Code'] not in ('NotImplemented', 'XNotImplemented') or requester_pays:
- module.fail_json_aws(exp, msg="Failed to get bucket request payment")
- else:
- if requester_pays:
- payer = 'Requester' if requester_pays else 'BucketOwner'
- if requester_pays_status != payer:
- put_bucket_request_payment(s3_client, name, payer)
- requester_pays_status = wait_payer_is_applied(module, s3_client, name, payer, should_fail=False)
- if requester_pays_status is None:
- # We have seen that it happens quite a lot of times that the put request was not taken into
- # account, so we retry one more time
- put_bucket_request_payment(s3_client, name, payer)
- requester_pays_status = wait_payer_is_applied(module, s3_client, name, payer, should_fail=True)
- changed = True
-
- result['requester_pays'] = requester_pays
-
- # Policy
- try:
- current_policy = get_bucket_policy(s3_client, name)
- except BotoCoreError as exp:
- module.fail_json_aws(exp, msg="Failed to get bucket policy")
- except ClientError as exp:
- if exp.response['Error']['Code'] != 'NotImplemented' or policy is not None:
- module.fail_json_aws(exp, msg="Failed to get bucket policy")
- else:
- if policy is not None:
- if isinstance(policy, string_types):
- policy = json.loads(policy)
-
- if not policy and current_policy:
- try:
- delete_bucket_policy(s3_client, name)
- except (BotoCoreError, ClientError) as e:
- module.fail_json_aws(e, msg="Failed to delete bucket policy")
- current_policy = wait_policy_is_applied(module, s3_client, name, policy)
- changed = True
- elif compare_policies(current_policy, policy):
- try:
- put_bucket_policy(s3_client, name, policy)
- except (BotoCoreError, ClientError) as e:
- module.fail_json_aws(e, msg="Failed to update bucket policy")
- current_policy = wait_policy_is_applied(module, s3_client, name, policy, should_fail=False)
- if current_policy is None:
- # As for request payement, it happens quite a lot of times that the put request was not taken into
- # account, so we retry one more time
- put_bucket_policy(s3_client, name, policy)
- current_policy = wait_policy_is_applied(module, s3_client, name, policy, should_fail=True)
- changed = True
-
- result['policy'] = current_policy
-
- # Tags
- try:
- current_tags_dict = get_current_bucket_tags_dict(s3_client, name)
- except BotoCoreError as exp:
- module.fail_json_aws(exp, msg="Failed to get bucket tags")
- except ClientError as exp:
- if exp.response['Error']['Code'] not in ('NotImplemented', 'XNotImplemented') or tags is not None:
- module.fail_json_aws(exp, msg="Failed to get bucket tags")
- else:
- if tags is not None:
- # Tags are always returned as text
- tags = dict((to_text(k), to_text(v)) for k, v in tags.items())
- if not purge_tags:
- # Ensure existing tags that aren't updated by desired tags remain
- current_copy = current_tags_dict.copy()
- current_copy.update(tags)
- tags = current_copy
- if current_tags_dict != tags:
- if tags:
- try:
- put_bucket_tagging(s3_client, name, tags)
- except (BotoCoreError, ClientError) as e:
- module.fail_json_aws(e, msg="Failed to update bucket tags")
- else:
- if purge_tags:
- try:
- delete_bucket_tagging(s3_client, name)
- except (BotoCoreError, ClientError) as e:
- module.fail_json_aws(e, msg="Failed to delete bucket tags")
- current_tags_dict = wait_tags_are_applied(module, s3_client, name, tags)
- changed = True
-
- result['tags'] = current_tags_dict
-
- # Encryption
- if hasattr(s3_client, "get_bucket_encryption"):
- try:
- current_encryption = get_bucket_encryption(s3_client, name)
- except (ClientError, BotoCoreError) as e:
- module.fail_json_aws(e, msg="Failed to get bucket encryption")
- elif encryption is not None:
- module.fail_json(msg="Using bucket encryption requires botocore version >= 1.7.41")
-
- if encryption is not None:
- current_encryption_algorithm = current_encryption.get('SSEAlgorithm') if current_encryption else None
- current_encryption_key = current_encryption.get('KMSMasterKeyID') if current_encryption else None
- if encryption == 'none' and current_encryption_algorithm is not None:
- try:
- delete_bucket_encryption(s3_client, name)
- except (BotoCoreError, ClientError) as e:
- module.fail_json_aws(e, msg="Failed to delete bucket encryption")
- current_encryption = wait_encryption_is_applied(module, s3_client, name, None)
- changed = True
- elif encryption != 'none' and (encryption != current_encryption_algorithm) or (encryption == 'aws:kms' and current_encryption_key != encryption_key_id):
- expected_encryption = {'SSEAlgorithm': encryption}
- if encryption == 'aws:kms' and encryption_key_id is not None:
- expected_encryption.update({'KMSMasterKeyID': encryption_key_id})
- try:
- put_bucket_encryption(s3_client, name, expected_encryption)
- except (BotoCoreError, ClientError) as e:
- module.fail_json_aws(e, msg="Failed to set bucket encryption")
- current_encryption = wait_encryption_is_applied(module, s3_client, name, expected_encryption)
- changed = True
-
- result['encryption'] = current_encryption
-
- module.exit_json(changed=changed, name=name, **result)
-
-
-def bucket_exists(s3_client, bucket_name):
- # head_bucket appeared to be really inconsistent, so we use list_buckets instead,
- # and loop over all the buckets, even if we know it's less performant :(
- all_buckets = s3_client.list_buckets(Bucket=bucket_name)['Buckets']
- return any(bucket['Name'] == bucket_name for bucket in all_buckets)
-
-
-@AWSRetry.exponential_backoff(max_delay=120)
-def create_bucket(s3_client, bucket_name, location):
- try:
- configuration = {}
- if location not in ('us-east-1', None):
- configuration['LocationConstraint'] = location
- if len(configuration) > 0:
- s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration=configuration)
- else:
- s3_client.create_bucket(Bucket=bucket_name)
- return True
- except ClientError as e:
- error_code = e.response['Error']['Code']
- if error_code == 'BucketAlreadyOwnedByYou':
- # We should never get there since we check the bucket presence before calling the create_or_update_bucket
- # method. However, the AWS Api sometimes fails to report bucket presence, so we catch this exception
- return False
- else:
- raise e
-
-
-@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=['NoSuchBucket'])
-def put_bucket_tagging(s3_client, bucket_name, tags):
- s3_client.put_bucket_tagging(Bucket=bucket_name, Tagging={'TagSet': ansible_dict_to_boto3_tag_list(tags)})
-
-
-@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=['NoSuchBucket'])
-def put_bucket_policy(s3_client, bucket_name, policy):
- s3_client.put_bucket_policy(Bucket=bucket_name, Policy=json.dumps(policy))
-
-
-@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=['NoSuchBucket'])
-def delete_bucket_policy(s3_client, bucket_name):
- s3_client.delete_bucket_policy(Bucket=bucket_name)
-
-
-@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=['NoSuchBucket'])
-def get_bucket_policy(s3_client, bucket_name):
- try:
- current_policy = json.loads(s3_client.get_bucket_policy(Bucket=bucket_name).get('Policy'))
- except ClientError as e:
- if e.response['Error']['Code'] == 'NoSuchBucketPolicy':
- current_policy = None
- else:
- raise e
- return current_policy
-
-
-@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=['NoSuchBucket'])
-def put_bucket_request_payment(s3_client, bucket_name, payer):
- s3_client.put_bucket_request_payment(Bucket=bucket_name, RequestPaymentConfiguration={'Payer': payer})
-
-
-@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=['NoSuchBucket'])
-def get_bucket_request_payment(s3_client, bucket_name):
- return s3_client.get_bucket_request_payment(Bucket=bucket_name).get('Payer')
-
-
-@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=['NoSuchBucket'])
-def get_bucket_versioning(s3_client, bucket_name):
- return s3_client.get_bucket_versioning(Bucket=bucket_name)
-
-
-@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=['NoSuchBucket'])
-def put_bucket_versioning(s3_client, bucket_name, required_versioning):
- s3_client.put_bucket_versioning(Bucket=bucket_name, VersioningConfiguration={'Status': required_versioning})
-
-
-@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=['NoSuchBucket'])
-def get_bucket_encryption(s3_client, bucket_name):
- try:
- result = s3_client.get_bucket_encryption(Bucket=bucket_name)
- return result.get('ServerSideEncryptionConfiguration', {}).get('Rules', [])[0].get('ApplyServerSideEncryptionByDefault')
- except ClientError as e:
- if e.response['Error']['Code'] == 'ServerSideEncryptionConfigurationNotFoundError':
- return None
- else:
- raise e
- except (IndexError, KeyError):
- return None
-
-
-@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=['NoSuchBucket'])
-def put_bucket_encryption(s3_client, bucket_name, encryption):
- server_side_encryption_configuration = {'Rules': [{'ApplyServerSideEncryptionByDefault': encryption}]}
- s3_client.put_bucket_encryption(Bucket=bucket_name, ServerSideEncryptionConfiguration=server_side_encryption_configuration)
-
-
-@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=['NoSuchBucket'])
-def delete_bucket_tagging(s3_client, bucket_name):
- s3_client.delete_bucket_tagging(Bucket=bucket_name)
-
-
-@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=['NoSuchBucket'])
-def delete_bucket_encryption(s3_client, bucket_name):
- s3_client.delete_bucket_encryption(Bucket=bucket_name)
-
-
-@AWSRetry.exponential_backoff(max_delay=120)
-def delete_bucket(s3_client, bucket_name):
- try:
- s3_client.delete_bucket(Bucket=bucket_name)
- except ClientError as e:
- if e.response['Error']['Code'] == 'NoSuchBucket':
- # This means bucket should have been in a deleting state when we checked it existence
- # We just ignore the error
- pass
- else:
- raise e
-
-
-def wait_policy_is_applied(module, s3_client, bucket_name, expected_policy, should_fail=True):
- for dummy in range(0, 12):
- try:
- current_policy = get_bucket_policy(s3_client, bucket_name)
- except (ClientError, BotoCoreError) as e:
- module.fail_json_aws(e, msg="Failed to get bucket policy")
-
- if compare_policies(current_policy, expected_policy):
- time.sleep(5)
- else:
- return current_policy
- if should_fail:
- module.fail_json(msg="Bucket policy failed to apply in the expected time")
- else:
- return None
-
-
-def wait_payer_is_applied(module, s3_client, bucket_name, expected_payer, should_fail=True):
- for dummy in range(0, 12):
- try:
- requester_pays_status = get_bucket_request_payment(s3_client, bucket_name)
- except (BotoCoreError, ClientError) as e:
- module.fail_json_aws(e, msg="Failed to get bucket request payment")
- if requester_pays_status != expected_payer:
- time.sleep(5)
- else:
- return requester_pays_status
- if should_fail:
- module.fail_json(msg="Bucket request payment failed to apply in the expected time")
- else:
- return None
-
-
-def wait_encryption_is_applied(module, s3_client, bucket_name, expected_encryption):
- for dummy in range(0, 12):
- try:
- encryption = get_bucket_encryption(s3_client, bucket_name)
- except (BotoCoreError, ClientError) as e:
- module.fail_json_aws(e, msg="Failed to get updated encryption for bucket")
- if encryption != expected_encryption:
- time.sleep(5)
- else:
- return encryption
- module.fail_json(msg="Bucket encryption failed to apply in the expected time")
-
-
-def wait_versioning_is_applied(module, s3_client, bucket_name, required_versioning):
- for dummy in range(0, 24):
- try:
- versioning_status = get_bucket_versioning(s3_client, bucket_name)
- except (BotoCoreError, ClientError) as e:
- module.fail_json_aws(e, msg="Failed to get updated versioning for bucket")
- if versioning_status.get('Status') != required_versioning:
- time.sleep(8)
- else:
- return versioning_status
- module.fail_json(msg="Bucket versioning failed to apply in the expected time")
-
-
-def wait_tags_are_applied(module, s3_client, bucket_name, expected_tags_dict):
- for dummy in range(0, 12):
- try:
- current_tags_dict = get_current_bucket_tags_dict(s3_client, bucket_name)
- except (ClientError, BotoCoreError) as e:
- module.fail_json_aws(e, msg="Failed to get bucket policy")
- if current_tags_dict != expected_tags_dict:
- time.sleep(5)
- else:
- return current_tags_dict
- module.fail_json(msg="Bucket tags failed to apply in the expected time")
-
-
-def get_current_bucket_tags_dict(s3_client, bucket_name):
- try:
- current_tags = s3_client.get_bucket_tagging(Bucket=bucket_name).get('TagSet')
- except ClientError as e:
- if e.response['Error']['Code'] == 'NoSuchTagSet':
- return {}
- raise e
-
- return boto3_tag_list_to_ansible_dict(current_tags)
-
-
-def paginated_list(s3_client, **pagination_params):
- pg = s3_client.get_paginator('list_objects_v2')
- for page in pg.paginate(**pagination_params):
- yield [data['Key'] for data in page.get('Contents', [])]
-
-
-def paginated_versions_list(s3_client, **pagination_params):
- try:
- pg = s3_client.get_paginator('list_object_versions')
- for page in pg.paginate(**pagination_params):
- # We have to merge the Versions and DeleteMarker lists here, as DeleteMarkers can still prevent a bucket deletion
- yield [(data['Key'], data['VersionId']) for data in (page.get('Versions', []) + page.get('DeleteMarkers', []))]
- except is_boto3_error_code('NoSuchBucket'):
- yield []
-
-
-def destroy_bucket(s3_client, module):
-
- force = module.params.get("force")
- name = module.params.get("name")
- try:
- bucket_is_present = bucket_exists(s3_client, name)
- except EndpointConnectionError as e:
- module.fail_json_aws(e, msg="Invalid endpoint provided: %s" % to_text(e))
- except (BotoCoreError, ClientError) as e:
- module.fail_json_aws(e, msg="Failed to check bucket presence")
-
- if not bucket_is_present:
- module.exit_json(changed=False)
-
- if force:
- # if there are contents then we need to delete them (including versions) before we can delete the bucket
- try:
- for key_version_pairs in paginated_versions_list(s3_client, Bucket=name):
- formatted_keys = [{'Key': key, 'VersionId': version} for key, version in key_version_pairs]
- for fk in formatted_keys:
- # remove VersionId from cases where they are `None` so that
- # unversioned objects are deleted using `DeleteObject`
- # rather than `DeleteObjectVersion`, improving backwards
- # compatibility with older IAM policies.
- if not fk.get('VersionId'):
- fk.pop('VersionId')
-
- if formatted_keys:
- resp = s3_client.delete_objects(Bucket=name, Delete={'Objects': formatted_keys})
- if resp.get('Errors'):
- module.fail_json(
- msg='Could not empty bucket before deleting. Could not delete objects: {0}'.format(
- ', '.join([k['Key'] for k in resp['Errors']])
- ),
- errors=resp['Errors'], response=resp
- )
- except (BotoCoreError, ClientError) as e:
- module.fail_json_aws(e, msg="Failed while deleting bucket")
-
- try:
- delete_bucket(s3_client, name)
- s3_client.get_waiter('bucket_not_exists').wait(Bucket=name, WaiterConfig=dict(Delay=5, MaxAttempts=60))
- except WaiterError as e:
- module.fail_json_aws(e, msg='An error occurred waiting for the bucket to be deleted.')
- except (BotoCoreError, ClientError) as e:
- module.fail_json_aws(e, msg="Failed to delete bucket")
-
- module.exit_json(changed=True)
-
-
-def is_fakes3(s3_url):
- """ Return True if s3_url has scheme fakes3:// """
- if s3_url is not None:
- return urlparse(s3_url).scheme in ('fakes3', 'fakes3s')
- else:
- return False
-
-
-def get_s3_client(module, aws_connect_kwargs, location, ceph, s3_url):
- if s3_url and ceph: # TODO - test this
- ceph = urlparse(s3_url)
- params = dict(module=module, conn_type='client', resource='s3', use_ssl=ceph.scheme == 'https', region=location, endpoint=s3_url, **aws_connect_kwargs)
- elif is_fakes3(s3_url):
- fakes3 = urlparse(s3_url)
- port = fakes3.port
- if fakes3.scheme == 'fakes3s':
- protocol = "https"
- if port is None:
- port = 443
- else:
- protocol = "http"
- if port is None:
- port = 80
- params = dict(module=module, conn_type='client', resource='s3', region=location,
- endpoint="%s://%s:%s" % (protocol, fakes3.hostname, to_text(port)),
- use_ssl=fakes3.scheme == 'fakes3s', **aws_connect_kwargs)
- else:
- params = dict(module=module, conn_type='client', resource='s3', region=location, endpoint=s3_url, **aws_connect_kwargs)
- return boto3_conn(**params)
-
-
-def main():
-
- argument_spec = ec2_argument_spec()
- argument_spec.update(
- dict(
- force=dict(default=False, type='bool'),
- policy=dict(type='json'),
- name=dict(required=True),
- requester_pays=dict(default=False, type='bool'),
- s3_url=dict(aliases=['S3_URL']),
- state=dict(default='present', choices=['present', 'absent']),
- tags=dict(type='dict'),
- purge_tags=dict(type='bool', default=True),
- versioning=dict(type='bool'),
- ceph=dict(default=False, type='bool'),
- encryption=dict(choices=['none', 'AES256', 'aws:kms']),
- encryption_key_id=dict()
- )
- )
-
- module = AnsibleAWSModule(
- argument_spec=argument_spec,
- )
-
- region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
-
- if region in ('us-east-1', '', None):
- # default to US Standard region
- location = 'us-east-1'
- else:
- # Boto uses symbolic names for locations but region strings will
- # actually work fine for everything except us-east-1 (US Standard)
- location = region
-
- s3_url = module.params.get('s3_url')
- ceph = module.params.get('ceph')
-
- # allow eucarc environment variables to be used if ansible vars aren't set
- if not s3_url and 'S3_URL' in os.environ:
- s3_url = os.environ['S3_URL']
-
- if ceph and not s3_url:
- module.fail_json(msg='ceph flavour requires s3_url')
-
- # Look at s3_url and tweak connection settings
- # if connecting to Ceph RGW, Walrus or fakes3
- if s3_url:
- for key in ['validate_certs', 'security_token', 'profile_name']:
- aws_connect_kwargs.pop(key, None)
- s3_client = get_s3_client(module, aws_connect_kwargs, location, ceph, s3_url)
-
- if s3_client is None: # this should never happen
- module.fail_json(msg='Unknown error, failed to create s3 connection, no information from boto.')
-
- state = module.params.get("state")
- encryption = module.params.get("encryption")
- encryption_key_id = module.params.get("encryption_key_id")
-
- # Parameter validation
- if encryption_key_id is not None and encryption is None:
- module.fail_json(msg="You must specify encryption parameter along with encryption_key_id.")
- elif encryption_key_id is not None and encryption != 'aws:kms':
- module.fail_json(msg="Only 'aws:kms' is a valid option for encryption parameter when you specify encryption_key_id.")
-
- if state == 'present':
- create_or_update_bucket(s3_client, module, location)
- elif state == 'absent':
- destroy_bucket(s3_client, module)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/selogin.py b/test/support/integration/plugins/modules/selogin.py
deleted file mode 100644
index 6429ef36..00000000
--- a/test/support/integration/plugins/modules/selogin.py
+++ /dev/null
@@ -1,260 +0,0 @@
-#!/usr/bin/python
-
-# (c) 2017, Petr Lautrbach <plautrba@redhat.com>
-# Based on seport.py module (c) 2014, Dan Keder <dan.keder@gmail.com>
-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
-
-DOCUMENTATION = '''
----
-module: selogin
-short_description: Manages linux user to SELinux user mapping
-description:
- - Manages linux user to SELinux user mapping
-version_added: "2.8"
-options:
- login:
- description:
- - a Linux user
- required: true
- seuser:
- description:
- - SELinux user name
- required: true
- selevel:
- aliases: [ serange ]
- description:
- - MLS/MCS Security Range (MLS/MCS Systems only) SELinux Range for SELinux login mapping defaults to the SELinux user record range.
- default: s0
- state:
- description:
- - Desired mapping value.
- required: true
- default: present
- choices: [ 'present', 'absent' ]
- reload:
- description:
- - Reload SELinux policy after commit.
- default: yes
- ignore_selinux_state:
- description:
- - Run independent of selinux runtime state
- type: bool
- default: false
-notes:
- - The changes are persistent across reboots
- - Not tested on any debian based system
-requirements: [ 'libselinux', 'policycoreutils' ]
-author:
-- Dan Keder (@dankeder)
-- Petr Lautrbach (@bachradsusi)
-- James Cassell (@jamescassell)
-'''
-
-EXAMPLES = '''
-# Modify the default user on the system to the guest_u user
-- selogin:
- login: __default__
- seuser: guest_u
- state: present
-
-# Assign gijoe user on an MLS machine a range and to the staff_u user
-- selogin:
- login: gijoe
- seuser: staff_u
- serange: SystemLow-Secret
- state: present
-
-# Assign all users in the engineering group to the staff_u user
-- selogin:
- login: '%engineering'
- seuser: staff_u
- state: present
-'''
-
-RETURN = r'''
-# Default return values
-'''
-
-
-import traceback
-
-SELINUX_IMP_ERR = None
-try:
- import selinux
- HAVE_SELINUX = True
-except ImportError:
- SELINUX_IMP_ERR = traceback.format_exc()
- HAVE_SELINUX = False
-
-SEOBJECT_IMP_ERR = None
-try:
- import seobject
- HAVE_SEOBJECT = True
-except ImportError:
- SEOBJECT_IMP_ERR = traceback.format_exc()
- HAVE_SEOBJECT = False
-
-
-from ansible.module_utils.basic import AnsibleModule, missing_required_lib
-from ansible.module_utils._text import to_native
-
-
-def semanage_login_add(module, login, seuser, do_reload, serange='s0', sestore=''):
- """ Add linux user to SELinux user mapping
-
- :type module: AnsibleModule
- :param module: Ansible module
-
- :type login: str
- :param login: a Linux User or a Linux group if it begins with %
-
- :type seuser: str
- :param proto: An SELinux user ('__default__', 'unconfined_u', 'staff_u', ...), see 'semanage login -l'
-
- :type serange: str
- :param serange: SELinux MLS/MCS range (defaults to 's0')
-
- :type do_reload: bool
- :param do_reload: Whether to reload SELinux policy after commit
-
- :type sestore: str
- :param sestore: SELinux store
-
- :rtype: bool
- :return: True if the policy was changed, otherwise False
- """
- try:
- selogin = seobject.loginRecords(sestore)
- selogin.set_reload(do_reload)
- change = False
- all_logins = selogin.get_all()
- # module.fail_json(msg="%s: %s %s" % (all_logins, login, sestore))
- # for local_login in all_logins:
- if login not in all_logins.keys():
- change = True
- if not module.check_mode:
- selogin.add(login, seuser, serange)
- else:
- if all_logins[login][0] != seuser or all_logins[login][1] != serange:
- change = True
- if not module.check_mode:
- selogin.modify(login, seuser, serange)
-
- except (ValueError, KeyError, OSError, RuntimeError) as e:
- module.fail_json(msg="%s: %s\n" % (e.__class__.__name__, to_native(e)), exception=traceback.format_exc())
-
- return change
-
-
-def semanage_login_del(module, login, seuser, do_reload, sestore=''):
- """ Delete linux user to SELinux user mapping
-
- :type module: AnsibleModule
- :param module: Ansible module
-
- :type login: str
- :param login: a Linux User or a Linux group if it begins with %
-
- :type seuser: str
- :param proto: An SELinux user ('__default__', 'unconfined_u', 'staff_u', ...), see 'semanage login -l'
-
- :type do_reload: bool
- :param do_reload: Whether to reload SELinux policy after commit
-
- :type sestore: str
- :param sestore: SELinux store
-
- :rtype: bool
- :return: True if the policy was changed, otherwise False
- """
- try:
- selogin = seobject.loginRecords(sestore)
- selogin.set_reload(do_reload)
- change = False
- all_logins = selogin.get_all()
- # module.fail_json(msg="%s: %s %s" % (all_logins, login, sestore))
- if login in all_logins.keys():
- change = True
- if not module.check_mode:
- selogin.delete(login)
-
- except (ValueError, KeyError, OSError, RuntimeError) as e:
- module.fail_json(msg="%s: %s\n" % (e.__class__.__name__, to_native(e)), exception=traceback.format_exc())
-
- return change
-
-
-def get_runtime_status(ignore_selinux_state=False):
- return True if ignore_selinux_state is True else selinux.is_selinux_enabled()
-
-
-def main():
- module = AnsibleModule(
- argument_spec=dict(
- ignore_selinux_state=dict(type='bool', default=False),
- login=dict(type='str', required=True),
- seuser=dict(type='str'),
- selevel=dict(type='str', aliases=['serange'], default='s0'),
- state=dict(type='str', default='present', choices=['absent', 'present']),
- reload=dict(type='bool', default=True),
- ),
- required_if=[
- ["state", "present", ["seuser"]]
- ],
- supports_check_mode=True
- )
- if not HAVE_SELINUX:
- module.fail_json(msg=missing_required_lib("libselinux"), exception=SELINUX_IMP_ERR)
-
- if not HAVE_SEOBJECT:
- module.fail_json(msg=missing_required_lib("seobject from policycoreutils"), exception=SEOBJECT_IMP_ERR)
-
- ignore_selinux_state = module.params['ignore_selinux_state']
-
- if not get_runtime_status(ignore_selinux_state):
- module.fail_json(msg="SELinux is disabled on this host.")
-
- login = module.params['login']
- seuser = module.params['seuser']
- serange = module.params['selevel']
- state = module.params['state']
- do_reload = module.params['reload']
-
- result = {
- 'login': login,
- 'seuser': seuser,
- 'serange': serange,
- 'state': state,
- }
-
- if state == 'present':
- result['changed'] = semanage_login_add(module, login, seuser, do_reload, serange)
- elif state == 'absent':
- result['changed'] = semanage_login_del(module, login, seuser, do_reload)
- else:
- module.fail_json(msg='Invalid value of argument "state": {0}'.format(state))
-
- module.exit_json(**result)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/support/integration/plugins/modules/x509_crl.py b/test/support/integration/plugins/modules/x509_crl.py
deleted file mode 100644
index 9bb83a5b..00000000
--- a/test/support/integration/plugins/modules/x509_crl.py
+++ /dev/null
@@ -1,783 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# Copyright: (c) 2019, Felix Fontein <felix@fontein.de>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
-
-DOCUMENTATION = r'''
----
-module: x509_crl
-version_added: "2.10"
-short_description: Generate Certificate Revocation Lists (CRLs)
-description:
- - This module allows one to (re)generate or update Certificate Revocation Lists (CRLs).
- - Certificates on the revocation list can be either specified via serial number and (optionally) their issuer,
- or as a path to a certificate file in PEM format.
-requirements:
- - cryptography >= 1.2
-author:
- - Felix Fontein (@felixfontein)
-options:
- state:
- description:
- - Whether the CRL file should exist or not, taking action if the state is different from what is stated.
- type: str
- default: present
- choices: [ absent, present ]
-
- mode:
- description:
- - Defines how to process entries of existing CRLs.
- - If set to C(generate), makes sure that the CRL has the exact set of revoked certificates
- as specified in I(revoked_certificates).
- - If set to C(update), makes sure that the CRL contains the revoked certificates from
- I(revoked_certificates), but can also contain other revoked certificates. If the CRL file
- already exists, all entries from the existing CRL will also be included in the new CRL.
- When using C(update), you might be interested in setting I(ignore_timestamps) to C(yes).
- type: str
- default: generate
- choices: [ generate, update ]
-
- force:
- description:
- - Should the CRL be forced to be regenerated.
- type: bool
- default: no
-
- backup:
- description:
- - Create a backup file including a timestamp so you can get the original
- CRL back if you overwrote it with a new one by accident.
- type: bool
- default: no
-
- path:
- description:
- - Remote absolute path where the generated CRL file should be created or is already located.
- type: path
- required: yes
-
- privatekey_path:
- description:
- - Path to the CA's private key to use when signing the CRL.
- - Either I(privatekey_path) or I(privatekey_content) must be specified if I(state) is C(present), but not both.
- type: path
-
- privatekey_content:
- description:
- - The content of the CA's private key to use when signing the CRL.
- - Either I(privatekey_path) or I(privatekey_content) must be specified if I(state) is C(present), but not both.
- type: str
-
- privatekey_passphrase:
- description:
- - The passphrase for the I(privatekey_path).
- - This is required if the private key is password protected.
- type: str
-
- issuer:
- description:
- - Key/value pairs that will be present in the issuer name field of the CRL.
- - If you need to specify more than one value with the same key, use a list as value.
- - Required if I(state) is C(present).
- type: dict
-
- last_update:
- description:
- - The point in time from which this CRL can be trusted.
- - Time can be specified either as relative time or as absolute timestamp.
- - Time will always be interpreted as UTC.
- - Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
- + C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- - Note that if using relative time this module is NOT idempotent, except when
- I(ignore_timestamps) is set to C(yes).
- type: str
- default: "+0s"
-
- next_update:
- description:
- - "The absolute latest point in time by which this I(issuer) is expected to have issued
- another CRL. Many clients will treat a CRL as expired once I(next_update) occurs."
- - Time can be specified either as relative time or as absolute timestamp.
- - Time will always be interpreted as UTC.
- - Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
- + C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- - Note that if using relative time this module is NOT idempotent, except when
- I(ignore_timestamps) is set to C(yes).
- - Required if I(state) is C(present).
- type: str
-
- digest:
- description:
- - Digest algorithm to be used when signing the CRL.
- type: str
- default: sha256
-
- revoked_certificates:
- description:
- - List of certificates to be revoked.
- - Required if I(state) is C(present).
- type: list
- elements: dict
- suboptions:
- path:
- description:
- - Path to a certificate in PEM format.
- - The serial number and issuer will be extracted from the certificate.
- - Mutually exclusive with I(content) and I(serial_number). One of these three options
- must be specified.
- type: path
- content:
- description:
- - Content of a certificate in PEM format.
- - The serial number and issuer will be extracted from the certificate.
- - Mutually exclusive with I(path) and I(serial_number). One of these three options
- must be specified.
- type: str
- serial_number:
- description:
- - Serial number of the certificate.
- - Mutually exclusive with I(path) and I(content). One of these three options must
- be specified.
- type: int
- revocation_date:
- description:
- - The point in time the certificate was revoked.
- - Time can be specified either as relative time or as absolute timestamp.
- - Time will always be interpreted as UTC.
- - Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
- + C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- - Note that if using relative time this module is NOT idempotent, except when
- I(ignore_timestamps) is set to C(yes).
- type: str
- default: "+0s"
- issuer:
- description:
- - The certificate's issuer.
- - "Example: C(DNS:ca.example.org)"
- type: list
- elements: str
- issuer_critical:
- description:
- - Whether the certificate issuer extension should be critical.
- type: bool
- default: no
- reason:
- description:
- - The value for the revocation reason extension.
- type: str
- choices:
- - unspecified
- - key_compromise
- - ca_compromise
- - affiliation_changed
- - superseded
- - cessation_of_operation
- - certificate_hold
- - privilege_withdrawn
- - aa_compromise
- - remove_from_crl
- reason_critical:
- description:
- - Whether the revocation reason extension should be critical.
- type: bool
- default: no
- invalidity_date:
- description:
- - The point in time it was known/suspected that the private key was compromised
- or that the certificate otherwise became invalid.
- - Time can be specified either as relative time or as absolute timestamp.
- - Time will always be interpreted as UTC.
- - Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
- + C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- - Note that if using relative time this module is NOT idempotent. This will NOT
- change when I(ignore_timestamps) is set to C(yes).
- type: str
- invalidity_date_critical:
- description:
- - Whether the invalidity date extension should be critical.
- type: bool
- default: no
-
- ignore_timestamps:
- description:
- - Whether the timestamps I(last_update), I(next_update) and I(revocation_date) (in
- I(revoked_certificates)) should be ignored for idempotency checks. The timestamp
- I(invalidity_date) in I(revoked_certificates) will never be ignored.
- - Use this in combination with relative timestamps for these values to get idempotency.
- type: bool
- default: no
-
- return_content:
- description:
- - If set to C(yes), will return the (current or generated) CRL's content as I(crl).
- type: bool
- default: no
-
-extends_documentation_fragment:
- - files
-
-notes:
- - All ASN.1 TIME values should be specified following the YYYYMMDDHHMMSSZ pattern.
- - Date specified should be UTC. Minutes and seconds are mandatory.
-'''
-
-EXAMPLES = r'''
-- name: Generate a CRL
- x509_crl:
- path: /etc/ssl/my-ca.crl
- privatekey_path: /etc/ssl/private/my-ca.pem
- issuer:
- CN: My CA
- last_update: "+0s"
- next_update: "+7d"
- revoked_certificates:
- - serial_number: 1234
- revocation_date: 20190331202428Z
- issuer:
- CN: My CA
- - serial_number: 2345
- revocation_date: 20191013152910Z
- reason: affiliation_changed
- invalidity_date: 20191001000000Z
- - path: /etc/ssl/crt/revoked-cert.pem
- revocation_date: 20191010010203Z
-'''
-
-RETURN = r'''
-filename:
- description: Path to the generated CRL
- returned: changed or success
- type: str
- sample: /path/to/my-ca.crl
-backup_file:
- description: Name of backup file created.
- returned: changed and if I(backup) is C(yes)
- type: str
- sample: /path/to/my-ca.crl.2019-03-09@11:22~
-privatekey:
- description: Path to the private CA key
- returned: changed or success
- type: str
- sample: /path/to/my-ca.pem
-issuer:
- description:
- - The CRL's issuer.
- - Note that for repeated values, only the last one will be returned.
- returned: success
- type: dict
- sample: '{"organizationName": "Ansible", "commonName": "ca.example.com"}'
-issuer_ordered:
- description: The CRL's issuer as an ordered list of tuples.
- returned: success
- type: list
- elements: list
- sample: '[["organizationName", "Ansible"], ["commonName": "ca.example.com"]]'
-last_update:
- description: The point in time from which this CRL can be trusted as ASN.1 TIME.
- returned: success
- type: str
- sample: 20190413202428Z
-next_update:
- description: The point in time from which a new CRL will be issued and the client has to check for it as ASN.1 TIME.
- returned: success
- type: str
- sample: 20190413202428Z
-digest:
- description: The signature algorithm used to sign the CRL.
- returned: success
- type: str
- sample: sha256WithRSAEncryption
-revoked_certificates:
- description: List of certificates to be revoked.
- returned: success
- type: list
- elements: dict
- contains:
- serial_number:
- description: Serial number of the certificate.
- type: int
- sample: 1234
- revocation_date:
- description: The point in time the certificate was revoked as ASN.1 TIME.
- type: str
- sample: 20190413202428Z
- issuer:
- description: The certificate's issuer.
- type: list
- elements: str
- sample: '["DNS:ca.example.org"]'
- issuer_critical:
- description: Whether the certificate issuer extension is critical.
- type: bool
- sample: no
- reason:
- description:
- - The value for the revocation reason extension.
- - One of C(unspecified), C(key_compromise), C(ca_compromise), C(affiliation_changed), C(superseded),
- C(cessation_of_operation), C(certificate_hold), C(privilege_withdrawn), C(aa_compromise), and
- C(remove_from_crl).
- type: str
- sample: key_compromise
- reason_critical:
- description: Whether the revocation reason extension is critical.
- type: bool
- sample: no
- invalidity_date:
- description: |
- The point in time it was known/suspected that the private key was compromised
- or that the certificate otherwise became invalid as ASN.1 TIME.
- type: str
- sample: 20190413202428Z
- invalidity_date_critical:
- description: Whether the invalidity date extension is critical.
- type: bool
- sample: no
-crl:
- description: The (current or generated) CRL's content.
- returned: if I(state) is C(present) and I(return_content) is C(yes)
- type: str
-'''
-
-
-import os
-import traceback
-from ansible.module_utils.compat.version import LooseVersion
-
-from ansible.module_utils import crypto as crypto_utils
-from ansible.module_utils._text import to_native, to_text
-from ansible.module_utils.basic import AnsibleModule, missing_required_lib
-
-MINIMAL_CRYPTOGRAPHY_VERSION = '1.2'
-
-CRYPTOGRAPHY_IMP_ERR = None
-try:
- import cryptography
- from cryptography import x509
- from cryptography.hazmat.backends import default_backend
- from cryptography.hazmat.primitives.serialization import Encoding
- from cryptography.x509 import (
- CertificateRevocationListBuilder,
- RevokedCertificateBuilder,
- NameAttribute,
- Name,
- )
- CRYPTOGRAPHY_VERSION = LooseVersion(cryptography.__version__)
-except ImportError:
- CRYPTOGRAPHY_IMP_ERR = traceback.format_exc()
- CRYPTOGRAPHY_FOUND = False
-else:
- CRYPTOGRAPHY_FOUND = True
-
-
-TIMESTAMP_FORMAT = "%Y%m%d%H%M%SZ"
-
-
-class CRLError(crypto_utils.OpenSSLObjectError):
- pass
-
-
-class CRL(crypto_utils.OpenSSLObject):
-
- def __init__(self, module):
- super(CRL, self).__init__(
- module.params['path'],
- module.params['state'],
- module.params['force'],
- module.check_mode
- )
-
- self.update = module.params['mode'] == 'update'
- self.ignore_timestamps = module.params['ignore_timestamps']
- self.return_content = module.params['return_content']
- self.crl_content = None
-
- self.privatekey_path = module.params['privatekey_path']
- self.privatekey_content = module.params['privatekey_content']
- if self.privatekey_content is not None:
- self.privatekey_content = self.privatekey_content.encode('utf-8')
- self.privatekey_passphrase = module.params['privatekey_passphrase']
-
- self.issuer = crypto_utils.parse_name_field(module.params['issuer'])
- self.issuer = [(entry[0], entry[1]) for entry in self.issuer if entry[1]]
-
- self.last_update = crypto_utils.get_relative_time_option(module.params['last_update'], 'last_update')
- self.next_update = crypto_utils.get_relative_time_option(module.params['next_update'], 'next_update')
-
- self.digest = crypto_utils.select_message_digest(module.params['digest'])
- if self.digest is None:
- raise CRLError('The digest "{0}" is not supported'.format(module.params['digest']))
-
- self.revoked_certificates = []
- for i, rc in enumerate(module.params['revoked_certificates']):
- result = {
- 'serial_number': None,
- 'revocation_date': None,
- 'issuer': None,
- 'issuer_critical': False,
- 'reason': None,
- 'reason_critical': False,
- 'invalidity_date': None,
- 'invalidity_date_critical': False,
- }
- path_prefix = 'revoked_certificates[{0}].'.format(i)
- if rc['path'] is not None or rc['content'] is not None:
- # Load certificate from file or content
- try:
- if rc['content'] is not None:
- rc['content'] = rc['content'].encode('utf-8')
- cert = crypto_utils.load_certificate(rc['path'], content=rc['content'], backend='cryptography')
- try:
- result['serial_number'] = cert.serial_number
- except AttributeError:
- # The property was called "serial" before cryptography 1.4
- result['serial_number'] = cert.serial
- except crypto_utils.OpenSSLObjectError as e:
- if rc['content'] is not None:
- module.fail_json(
- msg='Cannot parse certificate from {0}content: {1}'.format(path_prefix, to_native(e))
- )
- else:
- module.fail_json(
- msg='Cannot read certificate "{1}" from {0}path: {2}'.format(path_prefix, rc['path'], to_native(e))
- )
- else:
- # Specify serial_number (and potentially issuer) directly
- result['serial_number'] = rc['serial_number']
- # All other options
- if rc['issuer']:
- result['issuer'] = [crypto_utils.cryptography_get_name(issuer) for issuer in rc['issuer']]
- result['issuer_critical'] = rc['issuer_critical']
- result['revocation_date'] = crypto_utils.get_relative_time_option(
- rc['revocation_date'],
- path_prefix + 'revocation_date'
- )
- if rc['reason']:
- result['reason'] = crypto_utils.REVOCATION_REASON_MAP[rc['reason']]
- result['reason_critical'] = rc['reason_critical']
- if rc['invalidity_date']:
- result['invalidity_date'] = crypto_utils.get_relative_time_option(
- rc['invalidity_date'],
- path_prefix + 'invalidity_date'
- )
- result['invalidity_date_critical'] = rc['invalidity_date_critical']
- self.revoked_certificates.append(result)
-
- self.module = module
-
- self.backup = module.params['backup']
- self.backup_file = None
-
- try:
- self.privatekey = crypto_utils.load_privatekey(
- path=self.privatekey_path,
- content=self.privatekey_content,
- passphrase=self.privatekey_passphrase,
- backend='cryptography'
- )
- except crypto_utils.OpenSSLBadPassphraseError as exc:
- raise CRLError(exc)
-
- self.crl = None
- try:
- with open(self.path, 'rb') as f:
- data = f.read()
- self.crl = x509.load_pem_x509_crl(data, default_backend())
- if self.return_content:
- self.crl_content = data
- except Exception as dummy:
- self.crl_content = None
-
- def remove(self):
- if self.backup:
- self.backup_file = self.module.backup_local(self.path)
- super(CRL, self).remove(self.module)
-
- def _compress_entry(self, entry):
- if self.ignore_timestamps:
- # Throw out revocation_date
- return (
- entry['serial_number'],
- tuple(entry['issuer']) if entry['issuer'] is not None else None,
- entry['issuer_critical'],
- entry['reason'],
- entry['reason_critical'],
- entry['invalidity_date'],
- entry['invalidity_date_critical'],
- )
- else:
- return (
- entry['serial_number'],
- entry['revocation_date'],
- tuple(entry['issuer']) if entry['issuer'] is not None else None,
- entry['issuer_critical'],
- entry['reason'],
- entry['reason_critical'],
- entry['invalidity_date'],
- entry['invalidity_date_critical'],
- )
-
- def check(self, perms_required=True):
- """Ensure the resource is in its desired state."""
-
- state_and_perms = super(CRL, self).check(self.module, perms_required)
-
- if not state_and_perms:
- return False
-
- if self.crl is None:
- return False
-
- if self.last_update != self.crl.last_update and not self.ignore_timestamps:
- return False
- if self.next_update != self.crl.next_update and not self.ignore_timestamps:
- return False
- if self.digest.name != self.crl.signature_hash_algorithm.name:
- return False
-
- want_issuer = [(crypto_utils.cryptography_name_to_oid(entry[0]), entry[1]) for entry in self.issuer]
- if want_issuer != [(sub.oid, sub.value) for sub in self.crl.issuer]:
- return False
-
- old_entries = [self._compress_entry(crypto_utils.cryptography_decode_revoked_certificate(cert)) for cert in self.crl]
- new_entries = [self._compress_entry(cert) for cert in self.revoked_certificates]
- if self.update:
- # We don't simply use a set so that duplicate entries are treated correctly
- for entry in new_entries:
- try:
- old_entries.remove(entry)
- except ValueError:
- return False
- else:
- if old_entries != new_entries:
- return False
-
- return True
-
- def _generate_crl(self):
- backend = default_backend()
- crl = CertificateRevocationListBuilder()
-
- try:
- crl = crl.issuer_name(Name([
- NameAttribute(crypto_utils.cryptography_name_to_oid(entry[0]), to_text(entry[1]))
- for entry in self.issuer
- ]))
- except ValueError as e:
- raise CRLError(e)
-
- crl = crl.last_update(self.last_update)
- crl = crl.next_update(self.next_update)
-
- if self.update and self.crl:
- new_entries = set([self._compress_entry(entry) for entry in self.revoked_certificates])
- for entry in self.crl:
- decoded_entry = self._compress_entry(crypto_utils.cryptography_decode_revoked_certificate(entry))
- if decoded_entry not in new_entries:
- crl = crl.add_revoked_certificate(entry)
- for entry in self.revoked_certificates:
- revoked_cert = RevokedCertificateBuilder()
- revoked_cert = revoked_cert.serial_number(entry['serial_number'])
- revoked_cert = revoked_cert.revocation_date(entry['revocation_date'])
- if entry['issuer'] is not None:
- revoked_cert = revoked_cert.add_extension(
- x509.CertificateIssuer([
- crypto_utils.cryptography_get_name(name) for name in self.entry['issuer']
- ]),
- entry['issuer_critical']
- )
- if entry['reason'] is not None:
- revoked_cert = revoked_cert.add_extension(
- x509.CRLReason(entry['reason']),
- entry['reason_critical']
- )
- if entry['invalidity_date'] is not None:
- revoked_cert = revoked_cert.add_extension(
- x509.InvalidityDate(entry['invalidity_date']),
- entry['invalidity_date_critical']
- )
- crl = crl.add_revoked_certificate(revoked_cert.build(backend))
-
- self.crl = crl.sign(self.privatekey, self.digest, backend=backend)
- return self.crl.public_bytes(Encoding.PEM)
-
- def generate(self):
- if not self.check(perms_required=False) or self.force:
- result = self._generate_crl()
- if self.return_content:
- self.crl_content = result
- if self.backup:
- self.backup_file = self.module.backup_local(self.path)
- crypto_utils.write_file(self.module, result)
- self.changed = True
-
- file_args = self.module.load_file_common_arguments(self.module.params)
- if self.module.set_fs_attributes_if_different(file_args, False):
- self.changed = True
-
- def _dump_revoked(self, entry):
- return {
- 'serial_number': entry['serial_number'],
- 'revocation_date': entry['revocation_date'].strftime(TIMESTAMP_FORMAT),
- 'issuer':
- [crypto_utils.cryptography_decode_name(issuer) for issuer in entry['issuer']]
- if entry['issuer'] is not None else None,
- 'issuer_critical': entry['issuer_critical'],
- 'reason': crypto_utils.REVOCATION_REASON_MAP_INVERSE.get(entry['reason']) if entry['reason'] is not None else None,
- 'reason_critical': entry['reason_critical'],
- 'invalidity_date':
- entry['invalidity_date'].strftime(TIMESTAMP_FORMAT)
- if entry['invalidity_date'] is not None else None,
- 'invalidity_date_critical': entry['invalidity_date_critical'],
- }
-
- def dump(self, check_mode=False):
- result = {
- 'changed': self.changed,
- 'filename': self.path,
- 'privatekey': self.privatekey_path,
- 'last_update': None,
- 'next_update': None,
- 'digest': None,
- 'issuer_ordered': None,
- 'issuer': None,
- 'revoked_certificates': [],
- }
- if self.backup_file:
- result['backup_file'] = self.backup_file
-
- if check_mode:
- result['last_update'] = self.last_update.strftime(TIMESTAMP_FORMAT)
- result['next_update'] = self.next_update.strftime(TIMESTAMP_FORMAT)
- # result['digest'] = crypto_utils.cryptography_oid_to_name(self.crl.signature_algorithm_oid)
- result['digest'] = self.module.params['digest']
- result['issuer_ordered'] = self.issuer
- result['issuer'] = {}
- for k, v in self.issuer:
- result['issuer'][k] = v
- result['revoked_certificates'] = []
- for entry in self.revoked_certificates:
- result['revoked_certificates'].append(self._dump_revoked(entry))
- elif self.crl:
- result['last_update'] = self.crl.last_update.strftime(TIMESTAMP_FORMAT)
- result['next_update'] = self.crl.next_update.strftime(TIMESTAMP_FORMAT)
- try:
- result['digest'] = crypto_utils.cryptography_oid_to_name(self.crl.signature_algorithm_oid)
- except AttributeError:
- # Older cryptography versions don't have signature_algorithm_oid yet
- dotted = crypto_utils._obj2txt(
- self.crl._backend._lib,
- self.crl._backend._ffi,
- self.crl._x509_crl.sig_alg.algorithm
- )
- oid = x509.oid.ObjectIdentifier(dotted)
- result['digest'] = crypto_utils.cryptography_oid_to_name(oid)
- issuer = []
- for attribute in self.crl.issuer:
- issuer.append([crypto_utils.cryptography_oid_to_name(attribute.oid), attribute.value])
- result['issuer_ordered'] = issuer
- result['issuer'] = {}
- for k, v in issuer:
- result['issuer'][k] = v
- result['revoked_certificates'] = []
- for cert in self.crl:
- entry = crypto_utils.cryptography_decode_revoked_certificate(cert)
- result['revoked_certificates'].append(self._dump_revoked(entry))
-
- if self.return_content:
- result['crl'] = self.crl_content
-
- return result
-
-
-def main():
- module = AnsibleModule(
- argument_spec=dict(
- state=dict(type='str', default='present', choices=['present', 'absent']),
- mode=dict(type='str', default='generate', choices=['generate', 'update']),
- force=dict(type='bool', default=False),
- backup=dict(type='bool', default=False),
- path=dict(type='path', required=True),
- privatekey_path=dict(type='path'),
- privatekey_content=dict(type='str'),
- privatekey_passphrase=dict(type='str', no_log=True),
- issuer=dict(type='dict'),
- last_update=dict(type='str', default='+0s'),
- next_update=dict(type='str'),
- digest=dict(type='str', default='sha256'),
- ignore_timestamps=dict(type='bool', default=False),
- return_content=dict(type='bool', default=False),
- revoked_certificates=dict(
- type='list',
- elements='dict',
- options=dict(
- path=dict(type='path'),
- content=dict(type='str'),
- serial_number=dict(type='int'),
- revocation_date=dict(type='str', default='+0s'),
- issuer=dict(type='list', elements='str'),
- issuer_critical=dict(type='bool', default=False),
- reason=dict(
- type='str',
- choices=[
- 'unspecified', 'key_compromise', 'ca_compromise', 'affiliation_changed',
- 'superseded', 'cessation_of_operation', 'certificate_hold',
- 'privilege_withdrawn', 'aa_compromise', 'remove_from_crl'
- ]
- ),
- reason_critical=dict(type='bool', default=False),
- invalidity_date=dict(type='str'),
- invalidity_date_critical=dict(type='bool', default=False),
- ),
- required_one_of=[['path', 'content', 'serial_number']],
- mutually_exclusive=[['path', 'content', 'serial_number']],
- ),
- ),
- required_if=[
- ('state', 'present', ['privatekey_path', 'privatekey_content'], True),
- ('state', 'present', ['issuer', 'next_update', 'revoked_certificates'], False),
- ],
- mutually_exclusive=(
- ['privatekey_path', 'privatekey_content'],
- ),
- supports_check_mode=True,
- add_file_common_args=True,
- )
-
- if not CRYPTOGRAPHY_FOUND:
- module.fail_json(msg=missing_required_lib('cryptography >= {0}'.format(MINIMAL_CRYPTOGRAPHY_VERSION)),
- exception=CRYPTOGRAPHY_IMP_ERR)
-
- try:
- crl = CRL(module)
-
- if module.params['state'] == 'present':
- if module.check_mode:
- result = crl.dump(check_mode=True)
- result['changed'] = module.params['force'] or not crl.check()
- module.exit_json(**result)
-
- crl.generate()
- else:
- if module.check_mode:
- result = crl.dump(check_mode=True)
- result['changed'] = os.path.exists(module.params['path'])
- module.exit_json(**result)
-
- crl.remove()
-
- result = crl.dump()
- module.exit_json(**result)
- except crypto_utils.OpenSSLObjectError as exc:
- module.fail_json(msg=to_native(exc))
-
-
-if __name__ == "__main__":
- main()
diff --git a/test/support/integration/plugins/modules/x509_crl_info.py b/test/support/integration/plugins/modules/x509_crl_info.py
deleted file mode 100644
index b6d36320..00000000
--- a/test/support/integration/plugins/modules/x509_crl_info.py
+++ /dev/null
@@ -1,281 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# Copyright: (c) 2020, Felix Fontein <felix@fontein.de>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
-
-DOCUMENTATION = r'''
----
-module: x509_crl_info
-version_added: "2.10"
-short_description: Retrieve information on Certificate Revocation Lists (CRLs)
-description:
- - This module allows one to retrieve information on Certificate Revocation Lists (CRLs).
-requirements:
- - cryptography >= 1.2
-author:
- - Felix Fontein (@felixfontein)
-options:
- path:
- description:
- - Remote absolute path where the generated CRL file should be created or is already located.
- - Either I(path) or I(content) must be specified, but not both.
- type: path
- content:
- description:
- - Content of the X.509 certificate in PEM format.
- - Either I(path) or I(content) must be specified, but not both.
- type: str
-
-notes:
- - All timestamp values are provided in ASN.1 TIME format, i.e. following the C(YYYYMMDDHHMMSSZ) pattern.
- They are all in UTC.
-seealso:
- - module: x509_crl
-'''
-
-EXAMPLES = r'''
-- name: Get information on CRL
- x509_crl_info:
- path: /etc/ssl/my-ca.crl
- register: result
-
-- debug:
- msg: "{{ result }}"
-'''
-
-RETURN = r'''
-issuer:
- description:
- - The CRL's issuer.
- - Note that for repeated values, only the last one will be returned.
- returned: success
- type: dict
- sample: '{"organizationName": "Ansible", "commonName": "ca.example.com"}'
-issuer_ordered:
- description: The CRL's issuer as an ordered list of tuples.
- returned: success
- type: list
- elements: list
- sample: '[["organizationName", "Ansible"], ["commonName": "ca.example.com"]]'
-last_update:
- description: The point in time from which this CRL can be trusted as ASN.1 TIME.
- returned: success
- type: str
- sample: 20190413202428Z
-next_update:
- description: The point in time from which a new CRL will be issued and the client has to check for it as ASN.1 TIME.
- returned: success
- type: str
- sample: 20190413202428Z
-digest:
- description: The signature algorithm used to sign the CRL.
- returned: success
- type: str
- sample: sha256WithRSAEncryption
-revoked_certificates:
- description: List of certificates to be revoked.
- returned: success
- type: list
- elements: dict
- contains:
- serial_number:
- description: Serial number of the certificate.
- type: int
- sample: 1234
- revocation_date:
- description: The point in time the certificate was revoked as ASN.1 TIME.
- type: str
- sample: 20190413202428Z
- issuer:
- description: The certificate's issuer.
- type: list
- elements: str
- sample: '["DNS:ca.example.org"]'
- issuer_critical:
- description: Whether the certificate issuer extension is critical.
- type: bool
- sample: no
- reason:
- description:
- - The value for the revocation reason extension.
- - One of C(unspecified), C(key_compromise), C(ca_compromise), C(affiliation_changed), C(superseded),
- C(cessation_of_operation), C(certificate_hold), C(privilege_withdrawn), C(aa_compromise), and
- C(remove_from_crl).
- type: str
- sample: key_compromise
- reason_critical:
- description: Whether the revocation reason extension is critical.
- type: bool
- sample: no
- invalidity_date:
- description: |
- The point in time it was known/suspected that the private key was compromised
- or that the certificate otherwise became invalid as ASN.1 TIME.
- type: str
- sample: 20190413202428Z
- invalidity_date_critical:
- description: Whether the invalidity date extension is critical.
- type: bool
- sample: no
-'''
-
-
-import traceback
-from ansible.module_utils.compat.version import LooseVersion
-
-from ansible.module_utils import crypto as crypto_utils
-from ansible.module_utils._text import to_native
-from ansible.module_utils.basic import AnsibleModule, missing_required_lib
-
-MINIMAL_CRYPTOGRAPHY_VERSION = '1.2'
-
-CRYPTOGRAPHY_IMP_ERR = None
-try:
- import cryptography
- from cryptography import x509
- from cryptography.hazmat.backends import default_backend
- CRYPTOGRAPHY_VERSION = LooseVersion(cryptography.__version__)
-except ImportError:
- CRYPTOGRAPHY_IMP_ERR = traceback.format_exc()
- CRYPTOGRAPHY_FOUND = False
-else:
- CRYPTOGRAPHY_FOUND = True
-
-
-TIMESTAMP_FORMAT = "%Y%m%d%H%M%SZ"
-
-
-class CRLError(crypto_utils.OpenSSLObjectError):
- pass
-
-
-class CRLInfo(crypto_utils.OpenSSLObject):
- """The main module implementation."""
-
- def __init__(self, module):
- super(CRLInfo, self).__init__(
- module.params['path'] or '',
- 'present',
- False,
- module.check_mode
- )
-
- self.content = module.params['content']
-
- self.module = module
-
- self.crl = None
- if self.content is None:
- try:
- with open(self.path, 'rb') as f:
- data = f.read()
- except Exception as e:
- self.module.fail_json(msg='Error while reading CRL file from disk: {0}'.format(e))
- else:
- data = self.content.encode('utf-8')
-
- try:
- self.crl = x509.load_pem_x509_crl(data, default_backend())
- except Exception as e:
- self.module.fail_json(msg='Error while decoding CRL: {0}'.format(e))
-
- def _dump_revoked(self, entry):
- return {
- 'serial_number': entry['serial_number'],
- 'revocation_date': entry['revocation_date'].strftime(TIMESTAMP_FORMAT),
- 'issuer':
- [crypto_utils.cryptography_decode_name(issuer) for issuer in entry['issuer']]
- if entry['issuer'] is not None else None,
- 'issuer_critical': entry['issuer_critical'],
- 'reason': crypto_utils.REVOCATION_REASON_MAP_INVERSE.get(entry['reason']) if entry['reason'] is not None else None,
- 'reason_critical': entry['reason_critical'],
- 'invalidity_date':
- entry['invalidity_date'].strftime(TIMESTAMP_FORMAT)
- if entry['invalidity_date'] is not None else None,
- 'invalidity_date_critical': entry['invalidity_date_critical'],
- }
-
- def get_info(self):
- result = {
- 'changed': False,
- 'last_update': None,
- 'next_update': None,
- 'digest': None,
- 'issuer_ordered': None,
- 'issuer': None,
- 'revoked_certificates': [],
- }
-
- result['last_update'] = self.crl.last_update.strftime(TIMESTAMP_FORMAT)
- result['next_update'] = self.crl.next_update.strftime(TIMESTAMP_FORMAT)
- try:
- result['digest'] = crypto_utils.cryptography_oid_to_name(self.crl.signature_algorithm_oid)
- except AttributeError:
- # Older cryptography versions don't have signature_algorithm_oid yet
- dotted = crypto_utils._obj2txt(
- self.crl._backend._lib,
- self.crl._backend._ffi,
- self.crl._x509_crl.sig_alg.algorithm
- )
- oid = x509.oid.ObjectIdentifier(dotted)
- result['digest'] = crypto_utils.cryptography_oid_to_name(oid)
- issuer = []
- for attribute in self.crl.issuer:
- issuer.append([crypto_utils.cryptography_oid_to_name(attribute.oid), attribute.value])
- result['issuer_ordered'] = issuer
- result['issuer'] = {}
- for k, v in issuer:
- result['issuer'][k] = v
- result['revoked_certificates'] = []
- for cert in self.crl:
- entry = crypto_utils.cryptography_decode_revoked_certificate(cert)
- result['revoked_certificates'].append(self._dump_revoked(entry))
-
- return result
-
- def generate(self):
- # Empty method because crypto_utils.OpenSSLObject wants this
- pass
-
- def dump(self):
- # Empty method because crypto_utils.OpenSSLObject wants this
- pass
-
-
-def main():
- module = AnsibleModule(
- argument_spec=dict(
- path=dict(type='path'),
- content=dict(type='str'),
- ),
- required_one_of=(
- ['path', 'content'],
- ),
- mutually_exclusive=(
- ['path', 'content'],
- ),
- supports_check_mode=True,
- )
-
- if not CRYPTOGRAPHY_FOUND:
- module.fail_json(msg=missing_required_lib('cryptography >= {0}'.format(MINIMAL_CRYPTOGRAPHY_VERSION)),
- exception=CRYPTOGRAPHY_IMP_ERR)
-
- try:
- crl = CRLInfo(module)
- result = crl.get_info()
- module.exit_json(**result)
- except crypto_utils.OpenSSLObjectError as e:
- module.fail_json(msg=to_native(e))
-
-
-if __name__ == "__main__":
- main()
diff --git a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/filter/network.py b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/filter/network.py
index f99e6e76..72d6c868 100644
--- a/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/filter/network.py
+++ b/test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/filter/network.py
@@ -26,6 +26,7 @@ import os
import traceback
import string
+from collections.abc import Mapping
from xml.etree.ElementTree import fromstring
from ansible.module_utils._text import to_native, to_text
@@ -33,7 +34,6 @@ from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.u
Template,
)
from ansible.module_utils.six import iteritems, string_types
-from ansible.module_utils.common._collections_compat import Mapping
from ansible.errors import AnsibleError, AnsibleFilterError
from ansible.utils.display import Display
from ansible.utils.encrypt import passlib_or_crypt, random_password
diff --git a/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/cliconf/ios.py b/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/cliconf/ios.py
index 8a390034..feba971a 100644
--- a/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/cliconf/ios.py
+++ b/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/cliconf/ios.py
@@ -35,9 +35,10 @@ import re
import time
import json
+from collections.abc import Mapping
+
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils._text import to_text
-from ansible.module_utils.common._collections_compat import Mapping
from ansible.module_utils.six import iteritems
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.config import (
NetworkConfig,
diff --git a/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/cliconf/vyos.py b/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/cliconf/vyos.py
index 30336031..3212615f 100644
--- a/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/cliconf/vyos.py
+++ b/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/cliconf/vyos.py
@@ -34,9 +34,10 @@ version_added: "2.4"
import re
import json
+from collections.abc import Mapping
+
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils._text import to_text
-from ansible.module_utils.common._collections_compat import Mapping
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.config import (
NetworkConfig,
)
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/action/win_copy.py b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/action/win_copy.py
index 0364d766..adb918be 120000..100644
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/action/win_copy.py
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/action/win_copy.py
@@ -1 +1,522 @@
-../../../../../../plugins/action/win_copy.py \ No newline at end of file
+# This file is part of Ansible
+
+# Copyright (c) 2017 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import base64
+import json
+import os
+import os.path
+import shutil
+import tempfile
+import traceback
+import zipfile
+
+from ansible import constants as C
+from ansible.errors import AnsibleError, AnsibleFileNotFound
+from ansible.module_utils._text import to_bytes, to_native, to_text
+from ansible.module_utils.parsing.convert_bool import boolean
+from ansible.plugins.action import ActionBase
+from ansible.utils.hashing import checksum
+
+
+def _walk_dirs(topdir, loader, decrypt=True, base_path=None, local_follow=False, trailing_slash_detector=None, checksum_check=False):
+ """
+ Walk a filesystem tree returning enough information to copy the files.
+ This is similar to the _walk_dirs function in ``copy.py`` but returns
+ a dict instead of a tuple for each entry and includes the checksum of
+ a local file if wanted.
+
+ :arg topdir: The directory that the filesystem tree is rooted at
+ :arg loader: The self._loader object from ActionBase
+ :kwarg decrypt: Whether to decrypt a file encrypted with ansible-vault
+ :kwarg base_path: The initial directory structure to strip off of the
+ files for the destination directory. If this is None (the default),
+ the base_path is set to ``top_dir``.
+ :kwarg local_follow: Whether to follow symlinks on the source. When set
+ to False, no symlinks are dereferenced. When set to True (the
+ default), the code will dereference most symlinks. However, symlinks
+ can still be present if needed to break a circular link.
+ :kwarg trailing_slash_detector: Function to determine if a path has
+ a trailing directory separator. Only needed when dealing with paths on
+ a remote machine (in which case, pass in a function that is aware of the
+ directory separator conventions on the remote machine).
+ :kawrg whether to get the checksum of the local file and add to the dict
+ :returns: dictionary of dictionaries. All of the path elements in the structure are text string.
+ This separates all the files, directories, and symlinks along with
+ import information about each::
+
+ {
+ 'files'; [{
+ src: '/absolute/path/to/copy/from',
+ dest: 'relative/path/to/copy/to',
+ checksum: 'b54ba7f5621240d403f06815f7246006ef8c7d43'
+ }, ...],
+ 'directories'; [{
+ src: '/absolute/path/to/copy/from',
+ dest: 'relative/path/to/copy/to'
+ }, ...],
+ 'symlinks'; [{
+ src: '/symlink/target/path',
+ dest: 'relative/path/to/copy/to'
+ }, ...],
+
+ }
+
+ The ``symlinks`` field is only populated if ``local_follow`` is set to False
+ *or* a circular symlink cannot be dereferenced. The ``checksum`` entry is set
+ to None if checksum_check=False.
+
+ """
+ # Convert the path segments into byte strings
+
+ r_files = {'files': [], 'directories': [], 'symlinks': []}
+
+ def _recurse(topdir, rel_offset, parent_dirs, rel_base=u'', checksum_check=False):
+ """
+ This is a closure (function utilizing variables from it's parent
+ function's scope) so that we only need one copy of all the containers.
+ Note that this function uses side effects (See the Variables used from
+ outer scope).
+
+ :arg topdir: The directory we are walking for files
+ :arg rel_offset: Integer defining how many characters to strip off of
+ the beginning of a path
+ :arg parent_dirs: Directories that we're copying that this directory is in.
+ :kwarg rel_base: String to prepend to the path after ``rel_offset`` is
+ applied to form the relative path.
+
+ Variables used from the outer scope
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ :r_files: Dictionary of files in the hierarchy. See the return value
+ for :func:`walk` for the structure of this dictionary.
+ :local_follow: Read-only inside of :func:`_recurse`. Whether to follow symlinks
+ """
+ for base_path, sub_folders, files in os.walk(topdir):
+ for filename in files:
+ filepath = os.path.join(base_path, filename)
+ dest_filepath = os.path.join(rel_base, filepath[rel_offset:])
+
+ if os.path.islink(filepath):
+ # Dereference the symlnk
+ real_file = loader.get_real_file(os.path.realpath(filepath), decrypt=decrypt)
+ if local_follow and os.path.isfile(real_file):
+ # Add the file pointed to by the symlink
+ r_files['files'].append(
+ {
+ "src": real_file,
+ "dest": dest_filepath,
+ "checksum": _get_local_checksum(checksum_check, real_file)
+ }
+ )
+ else:
+ # Mark this file as a symlink to copy
+ r_files['symlinks'].append({"src": os.readlink(filepath), "dest": dest_filepath})
+ else:
+ # Just a normal file
+ real_file = loader.get_real_file(filepath, decrypt=decrypt)
+ r_files['files'].append(
+ {
+ "src": real_file,
+ "dest": dest_filepath,
+ "checksum": _get_local_checksum(checksum_check, real_file)
+ }
+ )
+
+ for dirname in sub_folders:
+ dirpath = os.path.join(base_path, dirname)
+ dest_dirpath = os.path.join(rel_base, dirpath[rel_offset:])
+ real_dir = os.path.realpath(dirpath)
+ dir_stats = os.stat(real_dir)
+
+ if os.path.islink(dirpath):
+ if local_follow:
+ if (dir_stats.st_dev, dir_stats.st_ino) in parent_dirs:
+ # Just insert the symlink if the target directory
+ # exists inside of the copy already
+ r_files['symlinks'].append({"src": os.readlink(dirpath), "dest": dest_dirpath})
+ else:
+ # Walk the dirpath to find all parent directories.
+ new_parents = set()
+ parent_dir_list = os.path.dirname(dirpath).split(os.path.sep)
+ for parent in range(len(parent_dir_list), 0, -1):
+ parent_stat = os.stat(u'/'.join(parent_dir_list[:parent]))
+ if (parent_stat.st_dev, parent_stat.st_ino) in parent_dirs:
+ # Reached the point at which the directory
+ # tree is already known. Don't add any
+ # more or we might go to an ancestor that
+ # isn't being copied.
+ break
+ new_parents.add((parent_stat.st_dev, parent_stat.st_ino))
+
+ if (dir_stats.st_dev, dir_stats.st_ino) in new_parents:
+ # This was a a circular symlink. So add it as
+ # a symlink
+ r_files['symlinks'].append({"src": os.readlink(dirpath), "dest": dest_dirpath})
+ else:
+ # Walk the directory pointed to by the symlink
+ r_files['directories'].append({"src": real_dir, "dest": dest_dirpath})
+ offset = len(real_dir) + 1
+ _recurse(real_dir, offset, parent_dirs.union(new_parents),
+ rel_base=dest_dirpath,
+ checksum_check=checksum_check)
+ else:
+ # Add the symlink to the destination
+ r_files['symlinks'].append({"src": os.readlink(dirpath), "dest": dest_dirpath})
+ else:
+ # Just a normal directory
+ r_files['directories'].append({"src": dirpath, "dest": dest_dirpath})
+
+ # Check if the source ends with a "/" so that we know which directory
+ # level to work at (similar to rsync)
+ source_trailing_slash = False
+ if trailing_slash_detector:
+ source_trailing_slash = trailing_slash_detector(topdir)
+ else:
+ source_trailing_slash = topdir.endswith(os.path.sep)
+
+ # Calculate the offset needed to strip the base_path to make relative
+ # paths
+ if base_path is None:
+ base_path = topdir
+ if not source_trailing_slash:
+ base_path = os.path.dirname(base_path)
+ if topdir.startswith(base_path):
+ offset = len(base_path)
+
+ # Make sure we're making the new paths relative
+ if trailing_slash_detector and not trailing_slash_detector(base_path):
+ offset += 1
+ elif not base_path.endswith(os.path.sep):
+ offset += 1
+
+ if os.path.islink(topdir) and not local_follow:
+ r_files['symlinks'] = {"src": os.readlink(topdir), "dest": os.path.basename(topdir)}
+ return r_files
+
+ dir_stats = os.stat(topdir)
+ parents = frozenset(((dir_stats.st_dev, dir_stats.st_ino),))
+ # Actually walk the directory hierarchy
+ _recurse(topdir, offset, parents, checksum_check=checksum_check)
+
+ return r_files
+
+
+def _get_local_checksum(get_checksum, local_path):
+ if get_checksum:
+ return checksum(local_path)
+ else:
+ return None
+
+
+class ActionModule(ActionBase):
+
+ WIN_PATH_SEPARATOR = "\\"
+
+ def _create_content_tempfile(self, content):
+ ''' Create a tempfile containing defined content '''
+ fd, content_tempfile = tempfile.mkstemp(dir=C.DEFAULT_LOCAL_TMP)
+ f = os.fdopen(fd, 'wb')
+ content = to_bytes(content)
+ try:
+ f.write(content)
+ except Exception as err:
+ os.remove(content_tempfile)
+ raise Exception(err)
+ finally:
+ f.close()
+ return content_tempfile
+
+ def _create_zip_tempfile(self, files, directories):
+ tmpdir = tempfile.mkdtemp(dir=C.DEFAULT_LOCAL_TMP)
+ zip_file_path = os.path.join(tmpdir, "win_copy.zip")
+ zip_file = zipfile.ZipFile(zip_file_path, "w", zipfile.ZIP_STORED, True)
+
+ # encoding the file/dir name with base64 so Windows can unzip a unicode
+ # filename and get the right name, Windows doesn't handle unicode names
+ # very well
+ for directory in directories:
+ directory_path = to_bytes(directory['src'], errors='surrogate_or_strict')
+ archive_path = to_bytes(directory['dest'], errors='surrogate_or_strict')
+
+ encoded_path = to_text(base64.b64encode(archive_path), errors='surrogate_or_strict')
+ zip_file.write(directory_path, encoded_path, zipfile.ZIP_DEFLATED)
+
+ for file in files:
+ file_path = to_bytes(file['src'], errors='surrogate_or_strict')
+ archive_path = to_bytes(file['dest'], errors='surrogate_or_strict')
+
+ encoded_path = to_text(base64.b64encode(archive_path), errors='surrogate_or_strict')
+ zip_file.write(file_path, encoded_path, zipfile.ZIP_DEFLATED)
+
+ return zip_file_path
+
+ def _remove_tempfile_if_content_defined(self, content, content_tempfile):
+ if content is not None:
+ os.remove(content_tempfile)
+
+ def _copy_single_file(self, local_file, dest, source_rel, task_vars, tmp, backup):
+ if self._play_context.check_mode:
+ module_return = dict(changed=True)
+ return module_return
+
+ # copy the file across to the server
+ tmp_src = self._connection._shell.join_path(tmp, 'source')
+ self._transfer_file(local_file, tmp_src)
+
+ copy_args = self._task.args.copy()
+ copy_args.update(
+ dict(
+ dest=dest,
+ src=tmp_src,
+ _original_basename=source_rel,
+ _copy_mode="single",
+ backup=backup,
+ )
+ )
+ copy_args.pop('content', None)
+
+ copy_result = self._execute_module(module_name="copy",
+ module_args=copy_args,
+ task_vars=task_vars)
+
+ return copy_result
+
+ def _copy_zip_file(self, dest, files, directories, task_vars, tmp, backup):
+ # create local zip file containing all the files and directories that
+ # need to be copied to the server
+ if self._play_context.check_mode:
+ module_return = dict(changed=True)
+ return module_return
+
+ try:
+ zip_file = self._create_zip_tempfile(files, directories)
+ except Exception as e:
+ module_return = dict(
+ changed=False,
+ failed=True,
+ msg="failed to create tmp zip file: %s" % to_text(e),
+ exception=traceback.format_exc()
+ )
+ return module_return
+
+ zip_path = self._loader.get_real_file(zip_file)
+
+ # send zip file to remote, file must end in .zip so
+ # Com Shell.Application works
+ tmp_src = self._connection._shell.join_path(tmp, 'source.zip')
+ self._transfer_file(zip_path, tmp_src)
+
+ # run the explode operation of win_copy on remote
+ copy_args = self._task.args.copy()
+ copy_args.update(
+ dict(
+ src=tmp_src,
+ dest=dest,
+ _copy_mode="explode",
+ backup=backup,
+ )
+ )
+ copy_args.pop('content', None)
+ module_return = self._execute_module(module_name='copy',
+ module_args=copy_args,
+ task_vars=task_vars)
+ shutil.rmtree(os.path.dirname(zip_path))
+ return module_return
+
+ def run(self, tmp=None, task_vars=None):
+ ''' handler for file transfer operations '''
+ if task_vars is None:
+ task_vars = dict()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+ del tmp # tmp no longer has any effect
+
+ source = self._task.args.get('src', None)
+ content = self._task.args.get('content', None)
+ dest = self._task.args.get('dest', None)
+ remote_src = boolean(self._task.args.get('remote_src', False), strict=False)
+ local_follow = boolean(self._task.args.get('local_follow', False), strict=False)
+ force = boolean(self._task.args.get('force', True), strict=False)
+ decrypt = boolean(self._task.args.get('decrypt', True), strict=False)
+ backup = boolean(self._task.args.get('backup', False), strict=False)
+
+ result['src'] = source
+ result['dest'] = dest
+
+ result['failed'] = True
+ if (source is None and content is None) or dest is None:
+ result['msg'] = "src (or content) and dest are required"
+ elif source is not None and content is not None:
+ result['msg'] = "src and content are mutually exclusive"
+ elif content is not None and dest is not None and (
+ dest.endswith(os.path.sep) or dest.endswith(self.WIN_PATH_SEPARATOR)):
+ result['msg'] = "dest must be a file if content is defined"
+ else:
+ del result['failed']
+
+ if result.get('failed'):
+ return result
+
+ # If content is defined make a temp file and write the content into it
+ content_tempfile = None
+ if content is not None:
+ try:
+ # if content comes to us as a dict it should be decoded json.
+ # We need to encode it back into a string and write it out
+ if isinstance(content, dict) or isinstance(content, list):
+ content_tempfile = self._create_content_tempfile(json.dumps(content))
+ else:
+ content_tempfile = self._create_content_tempfile(content)
+ source = content_tempfile
+ except Exception as err:
+ result['failed'] = True
+ result['msg'] = "could not write content tmp file: %s" % to_native(err)
+ return result
+ # all actions should occur on the remote server, run win_copy module
+ elif remote_src:
+ new_module_args = self._task.args.copy()
+ new_module_args.update(
+ dict(
+ _copy_mode="remote",
+ dest=dest,
+ src=source,
+ force=force,
+ backup=backup,
+ )
+ )
+ new_module_args.pop('content', None)
+ result.update(self._execute_module(module_args=new_module_args, task_vars=task_vars))
+ return result
+ # find_needle returns a path that may not have a trailing slash on a
+ # directory so we need to find that out first and append at the end
+ else:
+ trailing_slash = source.endswith(os.path.sep)
+ try:
+ # find in expected paths
+ source = self._find_needle('files', source)
+ except AnsibleError as e:
+ result['failed'] = True
+ result['msg'] = to_text(e)
+ result['exception'] = traceback.format_exc()
+ return result
+
+ if trailing_slash != source.endswith(os.path.sep):
+ if source[-1] == os.path.sep:
+ source = source[:-1]
+ else:
+ source = source + os.path.sep
+
+ # A list of source file tuples (full_path, relative_path) which will try to copy to the destination
+ source_files = {'files': [], 'directories': [], 'symlinks': []}
+
+ # If source is a directory populate our list else source is a file and translate it to a tuple.
+ if os.path.isdir(to_bytes(source, errors='surrogate_or_strict')):
+ result['operation'] = 'folder_copy'
+
+ # Get a list of the files we want to replicate on the remote side
+ source_files = _walk_dirs(source, self._loader, decrypt=decrypt, local_follow=local_follow,
+ trailing_slash_detector=self._connection._shell.path_has_trailing_slash,
+ checksum_check=force)
+
+ # If it's recursive copy, destination is always a dir,
+ # explicitly mark it so (note - win_copy module relies on this).
+ if not self._connection._shell.path_has_trailing_slash(dest):
+ dest = "%s%s" % (dest, self.WIN_PATH_SEPARATOR)
+
+ check_dest = dest
+ # Source is a file, add details to source_files dict
+ else:
+ result['operation'] = 'file_copy'
+
+ # If the local file does not exist, get_real_file() raises AnsibleFileNotFound
+ try:
+ source_full = self._loader.get_real_file(source, decrypt=decrypt)
+ except AnsibleFileNotFound as e:
+ result['failed'] = True
+ result['msg'] = "could not find src=%s, %s" % (source_full, to_text(e))
+ return result
+
+ original_basename = os.path.basename(source)
+ result['original_basename'] = original_basename
+
+ # check if dest ends with / or \ and append source filename to dest
+ if self._connection._shell.path_has_trailing_slash(dest):
+ check_dest = dest
+ filename = original_basename
+ result['dest'] = self._connection._shell.join_path(dest, filename)
+ else:
+ # replace \\ with / so we can use os.path to get the filename or dirname
+ unix_path = dest.replace(self.WIN_PATH_SEPARATOR, os.path.sep)
+ filename = os.path.basename(unix_path)
+ check_dest = os.path.dirname(unix_path)
+
+ file_checksum = _get_local_checksum(force, source_full)
+ source_files['files'].append(
+ dict(
+ src=source_full,
+ dest=filename,
+ checksum=file_checksum
+ )
+ )
+ result['checksum'] = file_checksum
+ result['size'] = os.path.getsize(to_bytes(source_full, errors='surrogate_or_strict'))
+
+ # find out the files/directories/symlinks that we need to copy to the server
+ query_args = self._task.args.copy()
+ query_args.update(
+ dict(
+ _copy_mode="query",
+ dest=check_dest,
+ force=force,
+ files=source_files['files'],
+ directories=source_files['directories'],
+ symlinks=source_files['symlinks'],
+ )
+ )
+ # src is not required for query, will fail path validation is src has unix allowed chars
+ query_args.pop('src', None)
+
+ query_args.pop('content', None)
+ query_return = self._execute_module(module_args=query_args,
+ task_vars=task_vars)
+
+ if query_return.get('failed') is True:
+ result.update(query_return)
+ return result
+
+ if len(query_return['files']) > 0 or len(query_return['directories']) > 0 and self._connection._shell.tmpdir is None:
+ self._connection._shell.tmpdir = self._make_tmp_path()
+
+ if len(query_return['files']) == 1 and len(query_return['directories']) == 0:
+ # we only need to copy 1 file, don't mess around with zips
+ file_src = query_return['files'][0]['src']
+ file_dest = query_return['files'][0]['dest']
+ result.update(self._copy_single_file(file_src, dest, file_dest,
+ task_vars, self._connection._shell.tmpdir, backup))
+ if result.get('failed') is True:
+ result['msg'] = "failed to copy file %s: %s" % (file_src, result['msg'])
+ result['changed'] = True
+
+ elif len(query_return['files']) > 0 or len(query_return['directories']) > 0:
+ # either multiple files or directories need to be copied, compress
+ # to a zip and 'explode' the zip on the server
+ # TODO: handle symlinks
+ result.update(self._copy_zip_file(dest, source_files['files'],
+ source_files['directories'],
+ task_vars, self._connection._shell.tmpdir, backup))
+ result['changed'] = True
+ else:
+ # no operations need to occur
+ result['failed'] = False
+ result['changed'] = False
+
+ # remove the content tmp file and remote tmp file if it was created
+ self._remove_tempfile_if_content_defined(content, content_tempfile)
+ self._remove_tmp_path(self._connection._shell.tmpdir)
+ return result
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/async_status.ps1 b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/async_status.ps1
index 6fc438d6..1ce3ff40 120000..100644
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/async_status.ps1
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/async_status.ps1
@@ -1 +1,58 @@
-../../../../../../plugins/modules/async_status.ps1 \ No newline at end of file
+#!powershell
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+#Requires -Module Ansible.ModuleUtils.Legacy
+
+$results = @{changed=$false}
+
+$parsed_args = Parse-Args $args
+$jid = Get-AnsibleParam $parsed_args "jid" -failifempty $true -resultobj $results
+$mode = Get-AnsibleParam $parsed_args "mode" -Default "status" -ValidateSet "status","cleanup"
+
+# parsed in from the async_status action plugin
+$async_dir = Get-AnsibleParam $parsed_args "_async_dir" -type "path" -failifempty $true
+
+$log_path = [System.IO.Path]::Combine($async_dir, $jid)
+
+If(-not $(Test-Path $log_path))
+{
+ Fail-Json @{ansible_job_id=$jid; started=1; finished=1} "could not find job at '$async_dir'"
+}
+
+If($mode -eq "cleanup") {
+ Remove-Item $log_path -Recurse
+ Exit-Json @{ansible_job_id=$jid; erased=$log_path}
+}
+
+# NOT in cleanup mode, assume regular status mode
+# no remote kill mode currently exists, but probably should
+# consider log_path + ".pid" file and also unlink that above
+
+$data = $null
+Try {
+ $data_raw = Get-Content $log_path
+
+ # TODO: move this into module_utils/powershell.ps1?
+ $jss = New-Object System.Web.Script.Serialization.JavaScriptSerializer
+ $data = $jss.DeserializeObject($data_raw)
+}
+Catch {
+ If(-not $data_raw) {
+ # file not written yet? That means it is running
+ Exit-Json @{results_file=$log_path; ansible_job_id=$jid; started=1; finished=0}
+ }
+ Else {
+ Fail-Json @{ansible_job_id=$jid; results_file=$log_path; started=1; finished=1} "Could not parse job output: $data"
+ }
+}
+
+If (-not $data.ContainsKey("started")) {
+ $data['finished'] = 1
+ $data['ansible_job_id'] = $jid
+}
+ElseIf (-not $data.ContainsKey("finished")) {
+ $data['finished'] = 0
+}
+
+Exit-Json $data
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_acl.ps1 b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_acl.ps1
index 81d8afa3..e3c38130 120000..100644
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_acl.ps1
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_acl.ps1
@@ -1 +1,225 @@
-../../../../../../plugins/modules/win_acl.ps1 \ No newline at end of file
+#!powershell
+
+# Copyright: (c) 2015, Phil Schwartz <schwartzmx@gmail.com>
+# Copyright: (c) 2015, Trond Hindenes
+# Copyright: (c) 2015, Hans-Joachim Kliemeck <git@kliemeck.de>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+#Requires -Module Ansible.ModuleUtils.Legacy
+#Requires -Module Ansible.ModuleUtils.PrivilegeUtil
+#Requires -Module Ansible.ModuleUtils.SID
+
+$ErrorActionPreference = "Stop"
+
+# win_acl module (File/Resources Permission Additions/Removal)
+
+#Functions
+function Get-UserSID {
+ param(
+ [String]$AccountName
+ )
+
+ $userSID = $null
+ $searchAppPools = $false
+
+ if ($AccountName.Split("\").Count -gt 1) {
+ if ($AccountName.Split("\")[0] -eq "IIS APPPOOL") {
+ $searchAppPools = $true
+ $AccountName = $AccountName.Split("\")[1]
+ }
+ }
+
+ if ($searchAppPools) {
+ Import-Module -Name WebAdministration
+ $testIISPath = Test-Path -LiteralPath "IIS:"
+ if ($testIISPath) {
+ $appPoolObj = Get-ItemProperty -LiteralPath "IIS:\AppPools\$AccountName"
+ $userSID = $appPoolObj.applicationPoolSid
+ }
+ }
+ else {
+ $userSID = Convert-ToSID -account_name $AccountName
+ }
+
+ return $userSID
+}
+
+$params = Parse-Args $args
+
+Function SetPrivilegeTokens() {
+ # Set privilege tokens only if admin.
+ # Admins would have these privs or be able to set these privs in the UI Anyway
+
+ $adminRole=[System.Security.Principal.WindowsBuiltInRole]::Administrator
+ $myWindowsID=[System.Security.Principal.WindowsIdentity]::GetCurrent()
+ $myWindowsPrincipal=new-object System.Security.Principal.WindowsPrincipal($myWindowsID)
+
+
+ if ($myWindowsPrincipal.IsInRole($adminRole)) {
+ # Need to adjust token privs when executing Set-ACL in certain cases.
+ # e.g. d:\testdir is owned by group in which current user is not a member and no perms are inherited from d:\
+ # This also sets us up for setting the owner as a feature.
+ # See the following for details of each privilege
+ # https://msdn.microsoft.com/en-us/library/windows/desktop/bb530716(v=vs.85).aspx
+ $privileges = @(
+ "SeRestorePrivilege", # Grants all write access control to any file, regardless of ACL.
+ "SeBackupPrivilege", # Grants all read access control to any file, regardless of ACL.
+ "SeTakeOwnershipPrivilege" # Grants ability to take owernship of an object w/out being granted discretionary access
+ )
+ foreach ($privilege in $privileges) {
+ $state = Get-AnsiblePrivilege -Name $privilege
+ if ($state -eq $false) {
+ Set-AnsiblePrivilege -Name $privilege -Value $true
+ }
+ }
+ }
+}
+
+
+$result = @{
+ changed = $false
+}
+
+$path = Get-AnsibleParam -obj $params -name "path" -type "str" -failifempty $true
+$user = Get-AnsibleParam -obj $params -name "user" -type "str" -failifempty $true
+$rights = Get-AnsibleParam -obj $params -name "rights" -type "str" -failifempty $true
+
+$type = Get-AnsibleParam -obj $params -name "type" -type "str" -failifempty $true -validateset "allow","deny"
+$state = Get-AnsibleParam -obj $params -name "state" -type "str" -default "present" -validateset "absent","present"
+
+$inherit = Get-AnsibleParam -obj $params -name "inherit" -type "str"
+$propagation = Get-AnsibleParam -obj $params -name "propagation" -type "str" -default "None" -validateset "InheritOnly","None","NoPropagateInherit"
+
+# We mount the HKCR, HKU, and HKCC registry hives so PS can access them.
+# Network paths have no qualifiers so we use -EA SilentlyContinue to ignore that
+$path_qualifier = Split-Path -Path $path -Qualifier -ErrorAction SilentlyContinue
+if ($path_qualifier -eq "HKCR:" -and (-not (Test-Path -LiteralPath HKCR:\))) {
+ New-PSDrive -Name HKCR -PSProvider Registry -Root HKEY_CLASSES_ROOT > $null
+}
+if ($path_qualifier -eq "HKU:" -and (-not (Test-Path -LiteralPath HKU:\))) {
+ New-PSDrive -Name HKU -PSProvider Registry -Root HKEY_USERS > $null
+}
+if ($path_qualifier -eq "HKCC:" -and (-not (Test-Path -LiteralPath HKCC:\))) {
+ New-PSDrive -Name HKCC -PSProvider Registry -Root HKEY_CURRENT_CONFIG > $null
+}
+
+If (-Not (Test-Path -LiteralPath $path)) {
+ Fail-Json -obj $result -message "$path file or directory does not exist on the host"
+}
+
+# Test that the user/group is resolvable on the local machine
+$sid = Get-UserSID -AccountName $user
+if (!$sid) {
+ Fail-Json -obj $result -message "$user is not a valid user or group on the host machine or domain"
+}
+
+If (Test-Path -LiteralPath $path -PathType Leaf) {
+ $inherit = "None"
+}
+ElseIf ($null -eq $inherit) {
+ $inherit = "ContainerInherit, ObjectInherit"
+}
+
+# Bug in Set-Acl, Get-Acl where -LiteralPath only works for the Registry provider if the location is in that root
+# qualifier. We also don't have a qualifier for a network path so only change if not null
+if ($null -ne $path_qualifier) {
+ Push-Location -LiteralPath $path_qualifier
+}
+
+Try {
+ SetPrivilegeTokens
+ $path_item = Get-Item -LiteralPath $path -Force
+ If ($path_item.PSProvider.Name -eq "Registry") {
+ $colRights = [System.Security.AccessControl.RegistryRights]$rights
+ }
+ Else {
+ $colRights = [System.Security.AccessControl.FileSystemRights]$rights
+ }
+
+ $InheritanceFlag = [System.Security.AccessControl.InheritanceFlags]$inherit
+ $PropagationFlag = [System.Security.AccessControl.PropagationFlags]$propagation
+
+ If ($type -eq "allow") {
+ $objType =[System.Security.AccessControl.AccessControlType]::Allow
+ }
+ Else {
+ $objType =[System.Security.AccessControl.AccessControlType]::Deny
+ }
+
+ $objUser = New-Object System.Security.Principal.SecurityIdentifier($sid)
+ If ($path_item.PSProvider.Name -eq "Registry") {
+ $objACE = New-Object System.Security.AccessControl.RegistryAccessRule ($objUser, $colRights, $InheritanceFlag, $PropagationFlag, $objType)
+ }
+ Else {
+ $objACE = New-Object System.Security.AccessControl.FileSystemAccessRule ($objUser, $colRights, $InheritanceFlag, $PropagationFlag, $objType)
+ }
+ $objACL = Get-ACL -LiteralPath $path
+
+ # Check if the ACE exists already in the objects ACL list
+ $match = $false
+
+ ForEach($rule in $objACL.GetAccessRules($true, $true, [System.Security.Principal.SecurityIdentifier])){
+
+ If ($path_item.PSProvider.Name -eq "Registry") {
+ If (($rule.RegistryRights -eq $objACE.RegistryRights) -And ($rule.AccessControlType -eq $objACE.AccessControlType) -And ($rule.IdentityReference -eq $objACE.IdentityReference) -And ($rule.IsInherited -eq $objACE.IsInherited) -And ($rule.InheritanceFlags -eq $objACE.InheritanceFlags) -And ($rule.PropagationFlags -eq $objACE.PropagationFlags)) {
+ $match = $true
+ Break
+ }
+ } else {
+ If (($rule.FileSystemRights -eq $objACE.FileSystemRights) -And ($rule.AccessControlType -eq $objACE.AccessControlType) -And ($rule.IdentityReference -eq $objACE.IdentityReference) -And ($rule.IsInherited -eq $objACE.IsInherited) -And ($rule.InheritanceFlags -eq $objACE.InheritanceFlags) -And ($rule.PropagationFlags -eq $objACE.PropagationFlags)) {
+ $match = $true
+ Break
+ }
+ }
+ }
+
+ If ($state -eq "present" -And $match -eq $false) {
+ Try {
+ $objACL.AddAccessRule($objACE)
+ If ($path_item.PSProvider.Name -eq "Registry") {
+ Set-ACL -LiteralPath $path -AclObject $objACL
+ } else {
+ (Get-Item -LiteralPath $path).SetAccessControl($objACL)
+ }
+ $result.changed = $true
+ }
+ Catch {
+ Fail-Json -obj $result -message "an exception occurred when adding the specified rule - $($_.Exception.Message)"
+ }
+ }
+ ElseIf ($state -eq "absent" -And $match -eq $true) {
+ Try {
+ $objACL.RemoveAccessRule($objACE)
+ If ($path_item.PSProvider.Name -eq "Registry") {
+ Set-ACL -LiteralPath $path -AclObject $objACL
+ } else {
+ (Get-Item -LiteralPath $path).SetAccessControl($objACL)
+ }
+ $result.changed = $true
+ }
+ Catch {
+ Fail-Json -obj $result -message "an exception occurred when removing the specified rule - $($_.Exception.Message)"
+ }
+ }
+ Else {
+ # A rule was attempting to be added but already exists
+ If ($match -eq $true) {
+ Exit-Json -obj $result -message "the specified rule already exists"
+ }
+ # A rule didn't exist that was trying to be removed
+ Else {
+ Exit-Json -obj $result -message "the specified rule does not exist"
+ }
+ }
+}
+Catch {
+ Fail-Json -obj $result -message "an error occurred when attempting to $state $rights permission(s) on $path for $user - $($_.Exception.Message)"
+}
+Finally {
+ # Make sure we revert the location stack to the original path just for cleanups sake
+ if ($null -ne $path_qualifier) {
+ Pop-Location
+ }
+}
+
+Exit-Json -obj $result
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_acl.py b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_acl.py
index 3a2434cf..14fbd82f 120000..100644
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_acl.py
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_acl.py
@@ -1 +1,132 @@
-../../../../../../plugins/modules/win_acl.py \ No newline at end of file
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright: (c) 2015, Phil Schwartz <schwartzmx@gmail.com>
+# Copyright: (c) 2015, Trond Hindenes
+# Copyright: (c) 2015, Hans-Joachim Kliemeck <git@kliemeck.de>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+ANSIBLE_METADATA = {'metadata_version': '1.1',
+ 'status': ['preview'],
+ 'supported_by': 'core'}
+
+DOCUMENTATION = r'''
+---
+module: win_acl
+version_added: "2.0"
+short_description: Set file/directory/registry permissions for a system user or group
+description:
+- Add or remove rights/permissions for a given user or group for the specified
+ file, folder, registry key or AppPool identifies.
+options:
+ path:
+ description:
+ - The path to the file or directory.
+ type: str
+ required: yes
+ user:
+ description:
+ - User or Group to add specified rights to act on src file/folder or
+ registry key.
+ type: str
+ required: yes
+ state:
+ description:
+ - Specify whether to add C(present) or remove C(absent) the specified access rule.
+ type: str
+ choices: [ absent, present ]
+ default: present
+ type:
+ description:
+ - Specify whether to allow or deny the rights specified.
+ type: str
+ required: yes
+ choices: [ allow, deny ]
+ rights:
+ description:
+ - The rights/permissions that are to be allowed/denied for the specified
+ user or group for the item at C(path).
+ - If C(path) is a file or directory, rights can be any right under MSDN
+ FileSystemRights U(https://msdn.microsoft.com/en-us/library/system.security.accesscontrol.filesystemrights.aspx).
+ - If C(path) is a registry key, rights can be any right under MSDN
+ RegistryRights U(https://msdn.microsoft.com/en-us/library/system.security.accesscontrol.registryrights.aspx).
+ type: str
+ required: yes
+ inherit:
+ description:
+ - Inherit flags on the ACL rules.
+ - Can be specified as a comma separated list, e.g. C(ContainerInherit),
+ C(ObjectInherit).
+ - For more information on the choices see MSDN InheritanceFlags enumeration
+ at U(https://msdn.microsoft.com/en-us/library/system.security.accesscontrol.inheritanceflags.aspx).
+ - Defaults to C(ContainerInherit, ObjectInherit) for Directories.
+ type: str
+ choices: [ ContainerInherit, ObjectInherit ]
+ propagation:
+ description:
+ - Propagation flag on the ACL rules.
+ - For more information on the choices see MSDN PropagationFlags enumeration
+ at U(https://msdn.microsoft.com/en-us/library/system.security.accesscontrol.propagationflags.aspx).
+ type: str
+ choices: [ InheritOnly, None, NoPropagateInherit ]
+ default: "None"
+notes:
+- If adding ACL's for AppPool identities (available since 2.3), the Windows
+ Feature "Web-Scripting-Tools" must be enabled.
+seealso:
+- module: win_acl_inheritance
+- module: win_file
+- module: win_owner
+- module: win_stat
+author:
+- Phil Schwartz (@schwartzmx)
+- Trond Hindenes (@trondhindenes)
+- Hans-Joachim Kliemeck (@h0nIg)
+'''
+
+EXAMPLES = r'''
+- name: Restrict write and execute access to User Fed-Phil
+ win_acl:
+ user: Fed-Phil
+ path: C:\Important\Executable.exe
+ type: deny
+ rights: ExecuteFile,Write
+
+- name: Add IIS_IUSRS allow rights
+ win_acl:
+ path: C:\inetpub\wwwroot\MySite
+ user: IIS_IUSRS
+ rights: FullControl
+ type: allow
+ state: present
+ inherit: ContainerInherit, ObjectInherit
+ propagation: 'None'
+
+- name: Set registry key right
+ win_acl:
+ path: HKCU:\Bovine\Key
+ user: BUILTIN\Users
+ rights: EnumerateSubKeys
+ type: allow
+ state: present
+ inherit: ContainerInherit, ObjectInherit
+ propagation: 'None'
+
+- name: Remove FullControl AccessRule for IIS_IUSRS
+ win_acl:
+ path: C:\inetpub\wwwroot\MySite
+ user: IIS_IUSRS
+ rights: FullControl
+ type: allow
+ state: absent
+ inherit: ContainerInherit, ObjectInherit
+ propagation: 'None'
+
+- name: Deny Intern
+ win_acl:
+ path: C:\Administrator\Documents
+ user: Intern
+ rights: Read,Write,Modify,FullControl,Delete
+ type: deny
+ state: present
+'''
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_copy.ps1 b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_copy.ps1
index a34fb012..6a26ee72 120000..100644
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_copy.ps1
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_copy.ps1
@@ -1 +1,403 @@
-../../../../../../plugins/modules/win_copy.ps1 \ No newline at end of file
+#!powershell
+
+# Copyright: (c) 2015, Jon Hawkesworth (@jhawkesworth) <figs@unity.demon.co.uk>
+# Copyright: (c) 2017, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+#Requires -Module Ansible.ModuleUtils.Legacy
+#Requires -Module Ansible.ModuleUtils.Backup
+
+$ErrorActionPreference = 'Stop'
+
+$params = Parse-Args -arguments $args -supports_check_mode $true
+$check_mode = Get-AnsibleParam -obj $params -name "_ansible_check_mode" -type "bool" -default $false
+$diff_mode = Get-AnsibleParam -obj $params -name "_ansible_diff" -type "bool" -default $false
+
+# there are 4 modes to win_copy which are driven by the action plugins:
+# explode: src is a zip file which needs to be extracted to dest, for use with multiple files
+# query: win_copy action plugin wants to get the state of remote files to check whether it needs to send them
+# remote: all copy action is happening remotely (remote_src=True)
+# single: a single file has been copied, also used with template
+$copy_mode = Get-AnsibleParam -obj $params -name "_copy_mode" -type "str" -default "single" -validateset "explode","query","remote","single"
+
+# used in explode, remote and single mode
+$src = Get-AnsibleParam -obj $params -name "src" -type "path" -failifempty ($copy_mode -in @("explode","process","single"))
+$dest = Get-AnsibleParam -obj $params -name "dest" -type "path" -failifempty $true
+$backup = Get-AnsibleParam -obj $params -name "backup" -type "bool" -default $false
+
+# used in single mode
+$original_basename = Get-AnsibleParam -obj $params -name "_original_basename" -type "str"
+
+# used in query and remote mode
+$force = Get-AnsibleParam -obj $params -name "force" -type "bool" -default $true
+
+# used in query mode, contains the local files/directories/symlinks that are to be copied
+$files = Get-AnsibleParam -obj $params -name "files" -type "list"
+$directories = Get-AnsibleParam -obj $params -name "directories" -type "list"
+
+$result = @{
+ changed = $false
+}
+
+if ($diff_mode) {
+ $result.diff = @{}
+}
+
+Function Copy-File($source, $dest) {
+ $diff = ""
+ $copy_file = $false
+ $source_checksum = $null
+ if ($force) {
+ $source_checksum = Get-FileChecksum -path $source
+ }
+
+ if (Test-Path -LiteralPath $dest -PathType Container) {
+ Fail-Json -obj $result -message "cannot copy file from '$source' to '$dest': dest is already a folder"
+ } elseif (Test-Path -LiteralPath $dest -PathType Leaf) {
+ if ($force) {
+ $target_checksum = Get-FileChecksum -path $dest
+ if ($source_checksum -ne $target_checksum) {
+ $copy_file = $true
+ }
+ }
+ } else {
+ $copy_file = $true
+ }
+
+ if ($copy_file) {
+ $file_dir = [System.IO.Path]::GetDirectoryName($dest)
+ # validate the parent dir is not a file and that it exists
+ if (Test-Path -LiteralPath $file_dir -PathType Leaf) {
+ Fail-Json -obj $result -message "cannot copy file from '$source' to '$dest': object at dest parent dir is not a folder"
+ } elseif (-not (Test-Path -LiteralPath $file_dir)) {
+ # directory doesn't exist, need to create
+ New-Item -Path $file_dir -ItemType Directory -WhatIf:$check_mode | Out-Null
+ $diff += "+$file_dir\`n"
+ }
+
+ if ($backup) {
+ $result.backup_file = Backup-File -path $dest -WhatIf:$check_mode
+ }
+
+ if (Test-Path -LiteralPath $dest -PathType Leaf) {
+ Remove-Item -LiteralPath $dest -Force -Recurse -WhatIf:$check_mode | Out-Null
+ $diff += "-$dest`n"
+ }
+
+ if (-not $check_mode) {
+ # cannot run with -WhatIf:$check_mode as if the parent dir didn't
+ # exist and was created above would still not exist in check mode
+ Copy-Item -LiteralPath $source -Destination $dest -Force | Out-Null
+ }
+ $diff += "+$dest`n"
+
+ $result.changed = $true
+ }
+
+ # ugly but to save us from running the checksum twice, let's return it for
+ # the main code to add it to $result
+ return ,@{ diff = $diff; checksum = $source_checksum }
+}
+
+Function Copy-Folder($source, $dest) {
+ $diff = ""
+
+ if (-not (Test-Path -LiteralPath $dest -PathType Container)) {
+ $parent_dir = [System.IO.Path]::GetDirectoryName($dest)
+ if (Test-Path -LiteralPath $parent_dir -PathType Leaf) {
+ Fail-Json -obj $result -message "cannot copy file from '$source' to '$dest': object at dest parent dir is not a folder"
+ }
+ if (Test-Path -LiteralPath $dest -PathType Leaf) {
+ Fail-Json -obj $result -message "cannot copy folder from '$source' to '$dest': dest is already a file"
+ }
+
+ New-Item -Path $dest -ItemType Container -WhatIf:$check_mode | Out-Null
+ $diff += "+$dest\`n"
+ $result.changed = $true
+ }
+
+ $child_items = Get-ChildItem -LiteralPath $source -Force
+ foreach ($child_item in $child_items) {
+ $dest_child_path = Join-Path -Path $dest -ChildPath $child_item.Name
+ if ($child_item.PSIsContainer) {
+ $diff += (Copy-Folder -source $child_item.Fullname -dest $dest_child_path)
+ } else {
+ $diff += (Copy-File -source $child_item.Fullname -dest $dest_child_path).diff
+ }
+ }
+
+ return $diff
+}
+
+Function Get-FileSize($path) {
+ $file = Get-Item -LiteralPath $path -Force
+ if ($file.PSIsContainer) {
+ $size = (Get-ChildItem -Literalpath $file.FullName -Recurse -Force | `
+ Where-Object { $_.PSObject.Properties.Name -contains 'Length' } | `
+ Measure-Object -Property Length -Sum).Sum
+ if ($null -eq $size) {
+ $size = 0
+ }
+ } else {
+ $size = $file.Length
+ }
+
+ $size
+}
+
+Function Extract-Zip($src, $dest) {
+ $archive = [System.IO.Compression.ZipFile]::Open($src, [System.IO.Compression.ZipArchiveMode]::Read, [System.Text.Encoding]::UTF8)
+ foreach ($entry in $archive.Entries) {
+ $archive_name = $entry.FullName
+
+ # FullName may be appended with / or \, determine if it is padded and remove it
+ $padding_length = $archive_name.Length % 4
+ if ($padding_length -eq 0) {
+ $is_dir = $false
+ $base64_name = $archive_name
+ } elseif ($padding_length -eq 1) {
+ $is_dir = $true
+ if ($archive_name.EndsWith("/") -or $archive_name.EndsWith("`\")) {
+ $base64_name = $archive_name.Substring(0, $archive_name.Length - 1)
+ } else {
+ throw "invalid base64 archive name '$archive_name'"
+ }
+ } else {
+ throw "invalid base64 length '$archive_name'"
+ }
+
+ # to handle unicode character, win_copy action plugin has encoded the filename
+ $decoded_archive_name = [System.Text.Encoding]::UTF8.GetString([System.Convert]::FromBase64String($base64_name))
+ # re-add the / to the entry full name if it was a directory
+ if ($is_dir) {
+ $decoded_archive_name = "$decoded_archive_name/"
+ }
+ $entry_target_path = [System.IO.Path]::Combine($dest, $decoded_archive_name)
+ $entry_dir = [System.IO.Path]::GetDirectoryName($entry_target_path)
+
+ if (-not (Test-Path -LiteralPath $entry_dir)) {
+ New-Item -Path $entry_dir -ItemType Directory -WhatIf:$check_mode | Out-Null
+ }
+
+ if ($is_dir -eq $false) {
+ if (-not $check_mode) {
+ [System.IO.Compression.ZipFileExtensions]::ExtractToFile($entry, $entry_target_path, $true)
+ }
+ }
+ }
+ $archive.Dispose() # release the handle of the zip file
+}
+
+Function Extract-ZipLegacy($src, $dest) {
+ if (-not (Test-Path -LiteralPath $dest)) {
+ New-Item -Path $dest -ItemType Directory -WhatIf:$check_mode | Out-Null
+ }
+ $shell = New-Object -ComObject Shell.Application
+ $zip = $shell.NameSpace($src)
+ $dest_path = $shell.NameSpace($dest)
+
+ foreach ($entry in $zip.Items()) {
+ $is_dir = $entry.IsFolder
+ $encoded_archive_entry = $entry.Name
+ # to handle unicode character, win_copy action plugin has encoded the filename
+ $decoded_archive_entry = [System.Text.Encoding]::UTF8.GetString([System.Convert]::FromBase64String($encoded_archive_entry))
+ if ($is_dir) {
+ $decoded_archive_entry = "$decoded_archive_entry/"
+ }
+
+ $entry_target_path = [System.IO.Path]::Combine($dest, $decoded_archive_entry)
+ $entry_dir = [System.IO.Path]::GetDirectoryName($entry_target_path)
+
+ if (-not (Test-Path -LiteralPath $entry_dir)) {
+ New-Item -Path $entry_dir -ItemType Directory -WhatIf:$check_mode | Out-Null
+ }
+
+ if ($is_dir -eq $false -and (-not $check_mode)) {
+ # https://msdn.microsoft.com/en-us/library/windows/desktop/bb787866.aspx
+ # From Folder.CopyHere documentation, 1044 means:
+ # - 1024: do not display a user interface if an error occurs
+ # - 16: respond with "yes to all" for any dialog box that is displayed
+ # - 4: do not display a progress dialog box
+ $dest_path.CopyHere($entry, 1044)
+
+ # once file is extraced, we need to rename it with non base64 name
+ $combined_encoded_path = [System.IO.Path]::Combine($dest, $encoded_archive_entry)
+ Move-Item -LiteralPath $combined_encoded_path -Destination $entry_target_path -Force | Out-Null
+ }
+ }
+}
+
+if ($copy_mode -eq "query") {
+ # we only return a list of files/directories that need to be copied over
+ # the source of the local file will be the key used
+ $changed_files = @()
+ $changed_directories = @()
+ $changed_symlinks = @()
+
+ foreach ($file in $files) {
+ $filename = $file.dest
+ $local_checksum = $file.checksum
+
+ $filepath = Join-Path -Path $dest -ChildPath $filename
+ if (Test-Path -LiteralPath $filepath -PathType Leaf) {
+ if ($force) {
+ $checksum = Get-FileChecksum -path $filepath
+ if ($checksum -ne $local_checksum) {
+ $changed_files += $file
+ }
+ }
+ } elseif (Test-Path -LiteralPath $filepath -PathType Container) {
+ Fail-Json -obj $result -message "cannot copy file to dest '$filepath': object at path is already a directory"
+ } else {
+ $changed_files += $file
+ }
+ }
+
+ foreach ($directory in $directories) {
+ $dirname = $directory.dest
+
+ $dirpath = Join-Path -Path $dest -ChildPath $dirname
+ $parent_dir = [System.IO.Path]::GetDirectoryName($dirpath)
+ if (Test-Path -LiteralPath $parent_dir -PathType Leaf) {
+ Fail-Json -obj $result -message "cannot copy folder to dest '$dirpath': object at parent directory path is already a file"
+ }
+ if (Test-Path -LiteralPath $dirpath -PathType Leaf) {
+ Fail-Json -obj $result -message "cannot copy folder to dest '$dirpath': object at path is already a file"
+ } elseif (-not (Test-Path -LiteralPath $dirpath -PathType Container)) {
+ $changed_directories += $directory
+ }
+ }
+
+ # TODO: Handle symlinks
+
+ $result.files = $changed_files
+ $result.directories = $changed_directories
+ $result.symlinks = $changed_symlinks
+} elseif ($copy_mode -eq "explode") {
+ # a single zip file containing the files and directories needs to be
+ # expanded this will always result in a change as the calculation is done
+ # on the win_copy action plugin and is only run if a change needs to occur
+ if (-not (Test-Path -LiteralPath $src -PathType Leaf)) {
+ Fail-Json -obj $result -message "Cannot expand src zip file: '$src' as it does not exist"
+ }
+
+ # Detect if the PS zip assemblies are available or whether to use Shell
+ $use_legacy = $false
+ try {
+ Add-Type -AssemblyName System.IO.Compression.FileSystem | Out-Null
+ Add-Type -AssemblyName System.IO.Compression | Out-Null
+ } catch {
+ $use_legacy = $true
+ }
+ if ($use_legacy) {
+ Extract-ZipLegacy -src $src -dest $dest
+ } else {
+ Extract-Zip -src $src -dest $dest
+ }
+
+ $result.changed = $true
+} elseif ($copy_mode -eq "remote") {
+ # all copy actions are happening on the remote side (windows host), need
+ # too copy source and dest using PS code
+ $result.src = $src
+ $result.dest = $dest
+
+ if (-not (Test-Path -LiteralPath $src)) {
+ Fail-Json -obj $result -message "Cannot copy src file: '$src' as it does not exist"
+ }
+
+ if (Test-Path -LiteralPath $src -PathType Container) {
+ # we are copying a directory or the contents of a directory
+ $result.operation = 'folder_copy'
+ if ($src.EndsWith("/") -or $src.EndsWith("`\")) {
+ # copying the folder's contents to dest
+ $diff = ""
+ $child_files = Get-ChildItem -LiteralPath $src -Force
+ foreach ($child_file in $child_files) {
+ $dest_child_path = Join-Path -Path $dest -ChildPath $child_file.Name
+ if ($child_file.PSIsContainer) {
+ $diff += Copy-Folder -source $child_file.FullName -dest $dest_child_path
+ } else {
+ $diff += (Copy-File -source $child_file.FullName -dest $dest_child_path).diff
+ }
+ }
+ } else {
+ # copying the folder and it's contents to dest
+ $dest = Join-Path -Path $dest -ChildPath (Get-Item -LiteralPath $src -Force).Name
+ $result.dest = $dest
+ $diff = Copy-Folder -source $src -dest $dest
+ }
+ } else {
+ # we are just copying a single file to dest
+ $result.operation = 'file_copy'
+
+ $source_basename = (Get-Item -LiteralPath $src -Force).Name
+ $result.original_basename = $source_basename
+
+ if ($dest.EndsWith("/") -or $dest.EndsWith("`\")) {
+ $dest = Join-Path -Path $dest -ChildPath (Get-Item -LiteralPath $src -Force).Name
+ $result.dest = $dest
+ } else {
+ # check if the parent dir exists, this is only done if src is a
+ # file and dest if the path to a file (doesn't end with \ or /)
+ $parent_dir = Split-Path -LiteralPath $dest
+ if (Test-Path -LiteralPath $parent_dir -PathType Leaf) {
+ Fail-Json -obj $result -message "object at destination parent dir '$parent_dir' is currently a file"
+ } elseif (-not (Test-Path -LiteralPath $parent_dir -PathType Container)) {
+ Fail-Json -obj $result -message "Destination directory '$parent_dir' does not exist"
+ }
+ }
+ $copy_result = Copy-File -source $src -dest $dest
+ $diff = $copy_result.diff
+ $result.checksum = $copy_result.checksum
+ }
+
+ # the file might not exist if running in check mode
+ if (-not $check_mode -or (Test-Path -LiteralPath $dest -PathType Leaf)) {
+ $result.size = Get-FileSize -path $dest
+ } else {
+ $result.size = $null
+ }
+ if ($diff_mode) {
+ $result.diff.prepared = $diff
+ }
+} elseif ($copy_mode -eq "single") {
+ # a single file is located in src and we need to copy to dest, this will
+ # always result in a change as the calculation is done on the Ansible side
+ # before this is run. This should also never run in check mode
+ if (-not (Test-Path -LiteralPath $src -PathType Leaf)) {
+ Fail-Json -obj $result -message "Cannot copy src file: '$src' as it does not exist"
+ }
+
+ # the dest parameter is a directory, we need to append original_basename
+ if ($dest.EndsWith("/") -or $dest.EndsWith("`\") -or (Test-Path -LiteralPath $dest -PathType Container)) {
+ $remote_dest = Join-Path -Path $dest -ChildPath $original_basename
+ $parent_dir = Split-Path -LiteralPath $remote_dest
+
+ # when dest ends with /, we need to create the destination directories
+ if (Test-Path -LiteralPath $parent_dir -PathType Leaf) {
+ Fail-Json -obj $result -message "object at destination parent dir '$parent_dir' is currently a file"
+ } elseif (-not (Test-Path -LiteralPath $parent_dir -PathType Container)) {
+ New-Item -Path $parent_dir -ItemType Directory | Out-Null
+ }
+ } else {
+ $remote_dest = $dest
+ $parent_dir = Split-Path -LiteralPath $remote_dest
+
+ # check if the dest parent dirs exist, need to fail if they don't
+ if (Test-Path -LiteralPath $parent_dir -PathType Leaf) {
+ Fail-Json -obj $result -message "object at destination parent dir '$parent_dir' is currently a file"
+ } elseif (-not (Test-Path -LiteralPath $parent_dir -PathType Container)) {
+ Fail-Json -obj $result -message "Destination directory '$parent_dir' does not exist"
+ }
+ }
+
+ if ($backup) {
+ $result.backup_file = Backup-File -path $remote_dest -WhatIf:$check_mode
+ }
+
+ Copy-Item -LiteralPath $src -Destination $remote_dest -Force | Out-Null
+ $result.changed = $true
+}
+
+Exit-Json -obj $result
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_copy.py b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_copy.py
index 2d2c69a2..a55f4c65 120000..100644
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_copy.py
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_copy.py
@@ -1 +1,207 @@
-../../../../../../plugins/modules/win_copy.py \ No newline at end of file
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright: (c) 2015, Jon Hawkesworth (@jhawkesworth) <figs@unity.demon.co.uk>
+# Copyright: (c) 2017, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+ANSIBLE_METADATA = {'metadata_version': '1.1',
+ 'status': ['stableinterface'],
+ 'supported_by': 'core'}
+
+DOCUMENTATION = r'''
+---
+module: win_copy
+version_added: '1.9.2'
+short_description: Copies files to remote locations on windows hosts
+description:
+- The C(win_copy) module copies a file on the local box to remote windows locations.
+- For non-Windows targets, use the M(copy) module instead.
+options:
+ content:
+ description:
+ - When used instead of C(src), sets the contents of a file directly to the
+ specified value.
+ - This is for simple values, for anything complex or with formatting please
+ switch to the M(template) module.
+ type: str
+ version_added: '2.3'
+ decrypt:
+ description:
+ - This option controls the autodecryption of source files using vault.
+ type: bool
+ default: yes
+ version_added: '2.5'
+ dest:
+ description:
+ - Remote absolute path where the file should be copied to.
+ - If C(src) is a directory, this must be a directory too.
+ - Use \ for path separators or \\ when in "double quotes".
+ - If C(dest) ends with \ then source or the contents of source will be
+ copied to the directory without renaming.
+ - If C(dest) is a nonexistent path, it will only be created if C(dest) ends
+ with "/" or "\", or C(src) is a directory.
+ - If C(src) and C(dest) are files and if the parent directory of C(dest)
+ doesn't exist, then the task will fail.
+ type: path
+ required: yes
+ backup:
+ description:
+ - Determine whether a backup should be created.
+ - When set to C(yes), create a backup file including the timestamp information
+ so you can get the original file back if you somehow clobbered it incorrectly.
+ - No backup is taken when C(remote_src=False) and multiple files are being
+ copied.
+ type: bool
+ default: no
+ version_added: '2.8'
+ force:
+ description:
+ - If set to C(yes), the file will only be transferred if the content
+ is different than destination.
+ - If set to C(no), the file will only be transferred if the
+ destination does not exist.
+ - If set to C(no), no checksuming of the content is performed which can
+ help improve performance on larger files.
+ type: bool
+ default: yes
+ version_added: '2.3'
+ local_follow:
+ description:
+ - This flag indicates that filesystem links in the source tree, if they
+ exist, should be followed.
+ type: bool
+ default: yes
+ version_added: '2.4'
+ remote_src:
+ description:
+ - If C(no), it will search for src at originating/master machine.
+ - If C(yes), it will go to the remote/target machine for the src.
+ type: bool
+ default: no
+ version_added: '2.3'
+ src:
+ description:
+ - Local path to a file to copy to the remote server; can be absolute or
+ relative.
+ - If path is a directory, it is copied (including the source folder name)
+ recursively to C(dest).
+ - If path is a directory and ends with "/", only the inside contents of
+ that directory are copied to the destination. Otherwise, if it does not
+ end with "/", the directory itself with all contents is copied.
+ - If path is a file and dest ends with "\", the file is copied to the
+ folder with the same filename.
+ - Required unless using C(content).
+ type: path
+notes:
+- Currently win_copy does not support copying symbolic links from both local to
+ remote and remote to remote.
+- It is recommended that backslashes C(\) are used instead of C(/) when dealing
+ with remote paths.
+- Because win_copy runs over WinRM, it is not a very efficient transfer
+ mechanism. If sending large files consider hosting them on a web service and
+ using M(win_get_url) instead.
+seealso:
+- module: assemble
+- module: copy
+- module: win_get_url
+- module: win_robocopy
+author:
+- Jon Hawkesworth (@jhawkesworth)
+- Jordan Borean (@jborean93)
+'''
+
+EXAMPLES = r'''
+- name: Copy a single file
+ win_copy:
+ src: /srv/myfiles/foo.conf
+ dest: C:\Temp\renamed-foo.conf
+
+- name: Copy a single file, but keep a backup
+ win_copy:
+ src: /srv/myfiles/foo.conf
+ dest: C:\Temp\renamed-foo.conf
+ backup: yes
+
+- name: Copy a single file keeping the filename
+ win_copy:
+ src: /src/myfiles/foo.conf
+ dest: C:\Temp\
+
+- name: Copy folder to C:\Temp (results in C:\Temp\temp_files)
+ win_copy:
+ src: files/temp_files
+ dest: C:\Temp
+
+- name: Copy folder contents recursively
+ win_copy:
+ src: files/temp_files/
+ dest: C:\Temp
+
+- name: Copy a single file where the source is on the remote host
+ win_copy:
+ src: C:\Temp\foo.txt
+ dest: C:\ansible\foo.txt
+ remote_src: yes
+
+- name: Copy a folder recursively where the source is on the remote host
+ win_copy:
+ src: C:\Temp
+ dest: C:\ansible
+ remote_src: yes
+
+- name: Set the contents of a file
+ win_copy:
+ content: abc123
+ dest: C:\Temp\foo.txt
+
+- name: Copy a single file as another user
+ win_copy:
+ src: NuGet.config
+ dest: '%AppData%\NuGet\NuGet.config'
+ vars:
+ ansible_become_user: user
+ ansible_become_password: pass
+ # The tmp dir must be set when using win_copy as another user
+ # This ensures the become user will have permissions for the operation
+ # Make sure to specify a folder both the ansible_user and the become_user have access to (i.e not %TEMP% which is user specific and requires Admin)
+ ansible_remote_tmp: 'c:\tmp'
+'''
+
+RETURN = r'''
+backup_file:
+ description: Name of the backup file that was created.
+ returned: if backup=yes
+ type: str
+ sample: C:\Path\To\File.txt.11540.20150212-220915.bak
+dest:
+ description: Destination file/path.
+ returned: changed
+ type: str
+ sample: C:\Temp\
+src:
+ description: Source file used for the copy on the target machine.
+ returned: changed
+ type: str
+ sample: /home/httpd/.ansible/tmp/ansible-tmp-1423796390.97-147729857856000/source
+checksum:
+ description: SHA1 checksum of the file after running copy.
+ returned: success, src is a file
+ type: str
+ sample: 6e642bb8dd5c2e027bf21dd923337cbb4214f827
+size:
+ description: Size of the target, after execution.
+ returned: changed, src is a file
+ type: int
+ sample: 1220
+operation:
+ description: Whether a single file copy took place or a folder copy.
+ returned: success
+ type: str
+ sample: file_copy
+original_basename:
+ description: Basename of the copied file.
+ returned: changed, src is a file
+ type: str
+ sample: foo.txt
+'''
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_file.ps1 b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_file.ps1
index 8ee5c2b5..54427549 120000..100644
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_file.ps1
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_file.ps1
@@ -1 +1,152 @@
-../../../../../../plugins/modules/win_file.ps1 \ No newline at end of file
+#!powershell
+
+# Copyright: (c) 2017, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+#Requires -Module Ansible.ModuleUtils.Legacy
+
+$ErrorActionPreference = "Stop"
+
+$params = Parse-Args $args -supports_check_mode $true
+
+$check_mode = Get-AnsibleParam -obj $params -name "_ansible_check_mode" -default $false
+$_remote_tmp = Get-AnsibleParam $params "_ansible_remote_tmp" -type "path" -default $env:TMP
+
+$path = Get-AnsibleParam -obj $params -name "path" -type "path" -failifempty $true -aliases "dest","name"
+$state = Get-AnsibleParam -obj $params -name "state" -type "str" -validateset "absent","directory","file","touch"
+
+# used in template/copy when dest is the path to a dir and source is a file
+$original_basename = Get-AnsibleParam -obj $params -name "_original_basename" -type "str"
+if ((Test-Path -LiteralPath $path -PathType Container) -and ($null -ne $original_basename)) {
+ $path = Join-Path -Path $path -ChildPath $original_basename
+}
+
+$result = @{
+ changed = $false
+}
+
+# Used to delete symlinks as powershell cannot delete broken symlinks
+$symlink_util = @"
+using System;
+using System.ComponentModel;
+using System.Runtime.InteropServices;
+
+namespace Ansible.Command {
+ public class SymLinkHelper {
+ [DllImport("kernel32.dll", CharSet=CharSet.Unicode, SetLastError=true)]
+ public static extern bool DeleteFileW(string lpFileName);
+
+ [DllImport("kernel32.dll", CharSet=CharSet.Unicode, SetLastError=true)]
+ public static extern bool RemoveDirectoryW(string lpPathName);
+
+ public static void DeleteDirectory(string path) {
+ if (!RemoveDirectoryW(path))
+ throw new Exception(String.Format("RemoveDirectoryW({0}) failed: {1}", path, new Win32Exception(Marshal.GetLastWin32Error()).Message));
+ }
+
+ public static void DeleteFile(string path) {
+ if (!DeleteFileW(path))
+ throw new Exception(String.Format("DeleteFileW({0}) failed: {1}", path, new Win32Exception(Marshal.GetLastWin32Error()).Message));
+ }
+ }
+}
+"@
+$original_tmp = $env:TMP
+$env:TMP = $_remote_tmp
+Add-Type -TypeDefinition $symlink_util
+$env:TMP = $original_tmp
+
+# Used to delete directories and files with logic on handling symbolic links
+function Remove-File($file, $checkmode) {
+ try {
+ if ($file.Attributes -band [System.IO.FileAttributes]::ReparsePoint) {
+ # Bug with powershell, if you try and delete a symbolic link that is pointing
+ # to an invalid path it will fail, using Win32 API to do this instead
+ if ($file.PSIsContainer) {
+ if (-not $checkmode) {
+ [Ansible.Command.SymLinkHelper]::DeleteDirectory($file.FullName)
+ }
+ } else {
+ if (-not $checkmode) {
+ [Ansible.Command.SymlinkHelper]::DeleteFile($file.FullName)
+ }
+ }
+ } elseif ($file.PSIsContainer) {
+ Remove-Directory -directory $file -checkmode $checkmode
+ } else {
+ Remove-Item -LiteralPath $file.FullName -Force -WhatIf:$checkmode
+ }
+ } catch [Exception] {
+ Fail-Json $result "Failed to delete $($file.FullName): $($_.Exception.Message)"
+ }
+}
+
+function Remove-Directory($directory, $checkmode) {
+ foreach ($file in Get-ChildItem -LiteralPath $directory.FullName) {
+ Remove-File -file $file -checkmode $checkmode
+ }
+ Remove-Item -LiteralPath $directory.FullName -Force -Recurse -WhatIf:$checkmode
+}
+
+
+if ($state -eq "touch") {
+ if (Test-Path -LiteralPath $path) {
+ if (-not $check_mode) {
+ (Get-ChildItem -LiteralPath $path).LastWriteTime = Get-Date
+ }
+ $result.changed = $true
+ } else {
+ Write-Output $null | Out-File -LiteralPath $path -Encoding ASCII -WhatIf:$check_mode
+ $result.changed = $true
+ }
+}
+
+if (Test-Path -LiteralPath $path) {
+ $fileinfo = Get-Item -LiteralPath $path -Force
+ if ($state -eq "absent") {
+ Remove-File -file $fileinfo -checkmode $check_mode
+ $result.changed = $true
+ } else {
+ if ($state -eq "directory" -and -not $fileinfo.PsIsContainer) {
+ Fail-Json $result "path $path is not a directory"
+ }
+
+ if ($state -eq "file" -and $fileinfo.PsIsContainer) {
+ Fail-Json $result "path $path is not a file"
+ }
+ }
+
+} else {
+
+ # If state is not supplied, test the $path to see if it looks like
+ # a file or a folder and set state to file or folder
+ if ($null -eq $state) {
+ $basename = Split-Path -Path $path -Leaf
+ if ($basename.length -gt 0) {
+ $state = "file"
+ } else {
+ $state = "directory"
+ }
+ }
+
+ if ($state -eq "directory") {
+ try {
+ New-Item -Path $path -ItemType Directory -WhatIf:$check_mode | Out-Null
+ } catch {
+ if ($_.CategoryInfo.Category -eq "ResourceExists") {
+ $fileinfo = Get-Item -LiteralPath $_.CategoryInfo.TargetName
+ if ($state -eq "directory" -and -not $fileinfo.PsIsContainer) {
+ Fail-Json $result "path $path is not a directory"
+ }
+ } else {
+ Fail-Json $result $_.Exception.Message
+ }
+ }
+ $result.changed = $true
+ } elseif ($state -eq "file") {
+ Fail-Json $result "path $path will not be created"
+ }
+
+}
+
+Exit-Json $result
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_file.py b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_file.py
index b4bc0583..28149579 120000..100644
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_file.py
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_file.py
@@ -1 +1,70 @@
-../../../../../../plugins/modules/win_file.py \ No newline at end of file
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright: (c) 2015, Jon Hawkesworth (@jhawkesworth) <figs@unity.demon.co.uk>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+ANSIBLE_METADATA = {'metadata_version': '1.1',
+ 'status': ['stableinterface'],
+ 'supported_by': 'core'}
+
+DOCUMENTATION = r'''
+---
+module: win_file
+version_added: "1.9.2"
+short_description: Creates, touches or removes files or directories
+description:
+ - Creates (empty) files, updates file modification stamps of existing files,
+ and can create or remove directories.
+ - Unlike M(file), does not modify ownership, permissions or manipulate links.
+ - For non-Windows targets, use the M(file) module instead.
+options:
+ path:
+ description:
+ - Path to the file being managed.
+ required: yes
+ type: path
+ aliases: [ dest, name ]
+ state:
+ description:
+ - If C(directory), all immediate subdirectories will be created if they
+ do not exist.
+ - If C(file), the file will NOT be created if it does not exist, see the M(copy)
+ or M(template) module if you want that behavior.
+ - If C(absent), directories will be recursively deleted, and files will be removed.
+ - If C(touch), an empty file will be created if the C(path) does not
+ exist, while an existing file or directory will receive updated file access and
+ modification times (similar to the way C(touch) works from the command line).
+ type: str
+ choices: [ absent, directory, file, touch ]
+seealso:
+- module: file
+- module: win_acl
+- module: win_acl_inheritance
+- module: win_owner
+- module: win_stat
+author:
+- Jon Hawkesworth (@jhawkesworth)
+'''
+
+EXAMPLES = r'''
+- name: Touch a file (creates if not present, updates modification time if present)
+ win_file:
+ path: C:\Temp\foo.conf
+ state: touch
+
+- name: Remove a file, if present
+ win_file:
+ path: C:\Temp\foo.conf
+ state: absent
+
+- name: Create directory structure
+ win_file:
+ path: C:\Temp\folder\subfolder
+ state: directory
+
+- name: Remove directory structure
+ win_file:
+ path: C:\Temp
+ state: absent
+'''
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_ping.ps1 b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_ping.ps1
index d7b25ed0..c848b912 120000..100644
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_ping.ps1
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_ping.ps1
@@ -1 +1,21 @@
-../../../../../../plugins/modules/win_ping.ps1 \ No newline at end of file
+#!powershell
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+#AnsibleRequires -CSharpUtil Ansible.Basic
+
+$spec = @{
+ options = @{
+ data = @{ type = "str"; default = "pong" }
+ }
+ supports_check_mode = $true
+}
+$module = [Ansible.Basic.AnsibleModule]::Create($args, $spec)
+$data = $module.Params.data
+
+if ($data -eq "crash") {
+ throw "boom"
+}
+
+$module.Result.ping = $data
+$module.ExitJson()
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_ping.py b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_ping.py
index 0b97c87b..6d35f379 120000..100644
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_ping.py
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_ping.py
@@ -1 +1,55 @@
-../../../../../../plugins/modules/win_ping.py \ No newline at end of file
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright: (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>, and others
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# this is a windows documentation stub. actual code lives in the .ps1
+# file of the same name
+
+ANSIBLE_METADATA = {'metadata_version': '1.1',
+ 'status': ['stableinterface'],
+ 'supported_by': 'core'}
+
+DOCUMENTATION = r'''
+---
+module: win_ping
+version_added: "1.7"
+short_description: A windows version of the classic ping module
+description:
+ - Checks management connectivity of a windows host.
+ - This is NOT ICMP ping, this is just a trivial test module.
+ - For non-Windows targets, use the M(ping) module instead.
+ - For Network targets, use the M(net_ping) module instead.
+options:
+ data:
+ description:
+ - Alternate data to return instead of 'pong'.
+ - If this parameter is set to C(crash), the module will cause an exception.
+ type: str
+ default: pong
+seealso:
+- module: ping
+author:
+- Chris Church (@cchurch)
+'''
+
+EXAMPLES = r'''
+# Test connectivity to a windows host
+# ansible winserver -m win_ping
+
+- name: Example from an Ansible Playbook
+ win_ping:
+
+- name: Induce an exception to see what happens
+ win_ping:
+ data: crash
+'''
+
+RETURN = r'''
+ping:
+ description: Value provided with the data parameter.
+ returned: success
+ type: str
+ sample: pong
+'''
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_shell.ps1 b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_shell.ps1
index eb07a017..54aef8de 120000..100644
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_shell.ps1
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_shell.ps1
@@ -1 +1,138 @@
-../../../../../../plugins/modules/win_shell.ps1 \ No newline at end of file
+#!powershell
+
+# Copyright: (c) 2017, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+#Requires -Module Ansible.ModuleUtils.Legacy
+#Requires -Module Ansible.ModuleUtils.CommandUtil
+#Requires -Module Ansible.ModuleUtils.FileUtil
+
+# TODO: add check mode support
+
+Set-StrictMode -Version 2
+$ErrorActionPreference = "Stop"
+
+# Cleanse CLIXML from stderr (sift out error stream data, discard others for now)
+Function Cleanse-Stderr($raw_stderr) {
+ Try {
+ # NB: this regex isn't perfect, but is decent at finding CLIXML amongst other stderr noise
+ If($raw_stderr -match "(?s)(?<prenoise1>.*)#< CLIXML(?<prenoise2>.*)(?<clixml><Objs.+</Objs>)(?<postnoise>.*)") {
+ $clixml = [xml]$matches["clixml"]
+
+ $merged_stderr = "{0}{1}{2}{3}" -f @(
+ $matches["prenoise1"],
+ $matches["prenoise2"],
+ # filter out just the Error-tagged strings for now, and zap embedded CRLF chars
+ ($clixml.Objs.ChildNodes | Where-Object { $_.Name -eq 'S' } | Where-Object { $_.S -eq 'Error' } | ForEach-Object { $_.'#text'.Replace('_x000D__x000A_','') } | Out-String),
+ $matches["postnoise"]) | Out-String
+
+ return $merged_stderr.Trim()
+
+ # FUTURE: parse/return other streams
+ }
+ Else {
+ $raw_stderr
+ }
+ }
+ Catch {
+ "***EXCEPTION PARSING CLIXML: $_***" + $raw_stderr
+ }
+}
+
+$params = Parse-Args $args -supports_check_mode $false
+
+$raw_command_line = Get-AnsibleParam -obj $params -name "_raw_params" -type "str" -failifempty $true
+$chdir = Get-AnsibleParam -obj $params -name "chdir" -type "path"
+$executable = Get-AnsibleParam -obj $params -name "executable" -type "path"
+$creates = Get-AnsibleParam -obj $params -name "creates" -type "path"
+$removes = Get-AnsibleParam -obj $params -name "removes" -type "path"
+$stdin = Get-AnsibleParam -obj $params -name "stdin" -type "str"
+$no_profile = Get-AnsibleParam -obj $params -name "no_profile" -type "bool" -default $false
+$output_encoding_override = Get-AnsibleParam -obj $params -name "output_encoding_override" -type "str"
+
+$raw_command_line = $raw_command_line.Trim()
+
+$result = @{
+ changed = $true
+ cmd = $raw_command_line
+}
+
+if ($creates -and $(Test-AnsiblePath -Path $creates)) {
+ Exit-Json @{msg="skipped, since $creates exists";cmd=$raw_command_line;changed=$false;skipped=$true;rc=0}
+}
+
+if ($removes -and -not $(Test-AnsiblePath -Path $removes)) {
+ Exit-Json @{msg="skipped, since $removes does not exist";cmd=$raw_command_line;changed=$false;skipped=$true;rc=0}
+}
+
+$exec_args = $null
+If(-not $executable -or $executable -eq "powershell") {
+ $exec_application = "powershell.exe"
+
+ # force input encoding to preamble-free UTF8 so PS sub-processes (eg, Start-Job) don't blow up
+ $raw_command_line = "[Console]::InputEncoding = New-Object Text.UTF8Encoding `$false; " + $raw_command_line
+
+ # Base64 encode the command so we don't have to worry about the various levels of escaping
+ $encoded_command = [Convert]::ToBase64String([System.Text.Encoding]::Unicode.GetBytes($raw_command_line))
+
+ if ($stdin) {
+ $exec_args = "-encodedcommand $encoded_command"
+ } else {
+ $exec_args = "-noninteractive -encodedcommand $encoded_command"
+ }
+
+ if ($no_profile) {
+ $exec_args = "-noprofile $exec_args"
+ }
+}
+Else {
+ # FUTURE: support arg translation from executable (or executable_args?) to process arguments for arbitrary interpreter?
+ $exec_application = $executable
+ if (-not ($exec_application.EndsWith(".exe"))) {
+ $exec_application = "$($exec_application).exe"
+ }
+ $exec_args = "/c $raw_command_line"
+}
+
+$command = "`"$exec_application`" $exec_args"
+$run_command_arg = @{
+ command = $command
+}
+if ($chdir) {
+ $run_command_arg['working_directory'] = $chdir
+}
+if ($stdin) {
+ $run_command_arg['stdin'] = $stdin
+}
+if ($output_encoding_override) {
+ $run_command_arg['output_encoding_override'] = $output_encoding_override
+}
+
+$start_datetime = [DateTime]::UtcNow
+try {
+ $command_result = Run-Command @run_command_arg
+} catch {
+ $result.changed = $false
+ try {
+ $result.rc = $_.Exception.NativeErrorCode
+ } catch {
+ $result.rc = 2
+ }
+ Fail-Json -obj $result -message $_.Exception.Message
+}
+
+# TODO: decode CLIXML stderr output (and other streams?)
+$result.stdout = $command_result.stdout
+$result.stderr = Cleanse-Stderr $command_result.stderr
+$result.rc = $command_result.rc
+
+$end_datetime = [DateTime]::UtcNow
+$result.start = $start_datetime.ToString("yyyy-MM-dd hh:mm:ss.ffffff")
+$result.end = $end_datetime.ToString("yyyy-MM-dd hh:mm:ss.ffffff")
+$result.delta = $($end_datetime - $start_datetime).ToString("h\:mm\:ss\.ffffff")
+
+If ($result.rc -ne 0) {
+ Fail-Json -obj $result -message "non-zero return code"
+}
+
+Exit-Json $result
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_shell.py b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_shell.py
index 3c6f0749..ee2cd762 120000..100644
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_shell.py
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_shell.py
@@ -1 +1,167 @@
-../../../../../../plugins/modules/win_shell.py \ No newline at end of file
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright: (c) 2016, Ansible, inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+ANSIBLE_METADATA = {'metadata_version': '1.1',
+ 'status': ['preview'],
+ 'supported_by': 'core'}
+
+DOCUMENTATION = r'''
+---
+module: win_shell
+short_description: Execute shell commands on target hosts
+version_added: 2.2
+description:
+ - The C(win_shell) module takes the command name followed by a list of space-delimited arguments.
+ It is similar to the M(win_command) module, but runs
+ the command via a shell (defaults to PowerShell) on the target host.
+ - For non-Windows targets, use the M(shell) module instead.
+options:
+ free_form:
+ description:
+ - The C(win_shell) module takes a free form command to run.
+ - There is no parameter actually named 'free form'. See the examples!
+ type: str
+ required: yes
+ creates:
+ description:
+ - A path or path filter pattern; when the referenced path exists on the target host, the task will be skipped.
+ type: path
+ removes:
+ description:
+ - A path or path filter pattern; when the referenced path B(does not) exist on the target host, the task will be skipped.
+ type: path
+ chdir:
+ description:
+ - Set the specified path as the current working directory before executing a command
+ type: path
+ executable:
+ description:
+ - Change the shell used to execute the command (eg, C(cmd)).
+ - The target shell must accept a C(/c) parameter followed by the raw command line to be executed.
+ type: path
+ stdin:
+ description:
+ - Set the stdin of the command directly to the specified value.
+ type: str
+ version_added: '2.5'
+ no_profile:
+ description:
+ - Do not load the user profile before running a command. This is only valid
+ when using PowerShell as the executable.
+ type: bool
+ default: no
+ version_added: '2.8'
+ output_encoding_override:
+ description:
+ - This option overrides the encoding of stdout/stderr output.
+ - You can use this option when you need to run a command which ignore the console's codepage.
+ - You should only need to use this option in very rare circumstances.
+ - This value can be any valid encoding C(Name) based on the output of C([System.Text.Encoding]::GetEncodings()).
+ See U(https://docs.microsoft.com/dotnet/api/system.text.encoding.getencodings).
+ type: str
+ version_added: '2.10'
+notes:
+ - If you want to run an executable securely and predictably, it may be
+ better to use the M(win_command) module instead. Best practices when writing
+ playbooks will follow the trend of using M(win_command) unless C(win_shell) is
+ explicitly required. When running ad-hoc commands, use your best judgement.
+ - WinRM will not return from a command execution until all child processes created have exited.
+ Thus, it is not possible to use C(win_shell) to spawn long-running child or background processes.
+ Consider creating a Windows service for managing background processes.
+seealso:
+- module: psexec
+- module: raw
+- module: script
+- module: shell
+- module: win_command
+- module: win_psexec
+author:
+ - Matt Davis (@nitzmahone)
+'''
+
+EXAMPLES = r'''
+# Execute a command in the remote shell; stdout goes to the specified
+# file on the remote.
+- win_shell: C:\somescript.ps1 >> C:\somelog.txt
+
+# Change the working directory to somedir/ before executing the command.
+- win_shell: C:\somescript.ps1 >> C:\somelog.txt chdir=C:\somedir
+
+# You can also use the 'args' form to provide the options. This command
+# will change the working directory to somedir/ and will only run when
+# somedir/somelog.txt doesn't exist.
+- win_shell: C:\somescript.ps1 >> C:\somelog.txt
+ args:
+ chdir: C:\somedir
+ creates: C:\somelog.txt
+
+# Run a command under a non-Powershell interpreter (cmd in this case)
+- win_shell: echo %HOMEDIR%
+ args:
+ executable: cmd
+ register: homedir_out
+
+- name: Run multi-lined shell commands
+ win_shell: |
+ $value = Test-Path -Path C:\temp
+ if ($value) {
+ Remove-Item -Path C:\temp -Force
+ }
+ New-Item -Path C:\temp -ItemType Directory
+
+- name: Retrieve the input based on stdin
+ win_shell: '$string = [Console]::In.ReadToEnd(); Write-Output $string.Trim()'
+ args:
+ stdin: Input message
+'''
+
+RETURN = r'''
+msg:
+ description: Changed.
+ returned: always
+ type: bool
+ sample: true
+start:
+ description: The command execution start time.
+ returned: always
+ type: str
+ sample: '2016-02-25 09:18:26.429568'
+end:
+ description: The command execution end time.
+ returned: always
+ type: str
+ sample: '2016-02-25 09:18:26.755339'
+delta:
+ description: The command execution delta time.
+ returned: always
+ type: str
+ sample: '0:00:00.325771'
+stdout:
+ description: The command standard output.
+ returned: always
+ type: str
+ sample: 'Clustering node rabbit@slave1 with rabbit@master ...'
+stderr:
+ description: The command standard error.
+ returned: always
+ type: str
+ sample: 'ls: cannot access foo: No such file or directory'
+cmd:
+ description: The command executed by the task.
+ returned: always
+ type: str
+ sample: 'rabbitmqctl join_cluster rabbit@master'
+rc:
+ description: The command return code (0 means success).
+ returned: always
+ type: int
+ sample: 0
+stdout_lines:
+ description: The command standard output split in lines.
+ returned: always
+ type: list
+ sample: [u'Clustering node rabbit@slave1 with rabbit@master ...']
+'''
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_stat.ps1 b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_stat.ps1
index 62a7a40a..071eb11c 120000..100644
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_stat.ps1
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_stat.ps1
@@ -1 +1,186 @@
-../../../../../../plugins/modules/win_stat.ps1 \ No newline at end of file
+#!powershell
+
+# Copyright: (c) 2017, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+#AnsibleRequires -CSharpUtil Ansible.Basic
+#Requires -Module Ansible.ModuleUtils.FileUtil
+#Requires -Module Ansible.ModuleUtils.LinkUtil
+
+function ConvertTo-Timestamp($start_date, $end_date) {
+ if ($start_date -and $end_date) {
+ return (New-TimeSpan -Start $start_date -End $end_date).TotalSeconds
+ }
+}
+
+function Get-FileChecksum($path, $algorithm) {
+ switch ($algorithm) {
+ 'md5' { $sp = New-Object -TypeName System.Security.Cryptography.MD5CryptoServiceProvider }
+ 'sha1' { $sp = New-Object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider }
+ 'sha256' { $sp = New-Object -TypeName System.Security.Cryptography.SHA256CryptoServiceProvider }
+ 'sha384' { $sp = New-Object -TypeName System.Security.Cryptography.SHA384CryptoServiceProvider }
+ 'sha512' { $sp = New-Object -TypeName System.Security.Cryptography.SHA512CryptoServiceProvider }
+ default { Fail-Json -obj $result -message "Unsupported hash algorithm supplied '$algorithm'" }
+ }
+
+ $fp = [System.IO.File]::Open($path, [System.IO.Filemode]::Open, [System.IO.FileAccess]::Read, [System.IO.FileShare]::ReadWrite)
+ try {
+ $hash = [System.BitConverter]::ToString($sp.ComputeHash($fp)).Replace("-", "").ToLower()
+ } finally {
+ $fp.Dispose()
+ }
+
+ return $hash
+}
+
+function Get-FileInfo {
+ param([String]$Path, [Switch]$Follow)
+
+ $info = Get-AnsibleItem -Path $Path -ErrorAction SilentlyContinue
+ $link_info = $null
+ if ($null -ne $info) {
+ try {
+ $link_info = Get-Link -link_path $info.FullName
+ } catch {
+ $module.Warn("Failed to check/get link info for file: $($_.Exception.Message)")
+ }
+
+ # If follow=true we want to follow the link all the way back to root object
+ if ($Follow -and $null -ne $link_info -and $link_info.Type -in @("SymbolicLink", "JunctionPoint")) {
+ $info, $link_info = Get-FileInfo -Path $link_info.AbsolutePath -Follow
+ }
+ }
+
+ return $info, $link_info
+}
+
+$spec = @{
+ options = @{
+ path = @{ type='path'; required=$true; aliases=@( 'dest', 'name' ) }
+ get_checksum = @{ type='bool'; default=$true }
+ checksum_algorithm = @{ type='str'; default='sha1'; choices=@( 'md5', 'sha1', 'sha256', 'sha384', 'sha512' ) }
+ follow = @{ type='bool'; default=$false }
+ }
+ supports_check_mode = $true
+}
+
+$module = [Ansible.Basic.AnsibleModule]::Create($args, $spec)
+
+$path = $module.Params.path
+$get_checksum = $module.Params.get_checksum
+$checksum_algorithm = $module.Params.checksum_algorithm
+$follow = $module.Params.follow
+
+$module.Result.stat = @{ exists=$false }
+
+Load-LinkUtils
+$info, $link_info = Get-FileInfo -Path $path -Follow:$follow
+If ($null -ne $info) {
+ $epoch_date = Get-Date -Date "01/01/1970"
+ $attributes = @()
+ foreach ($attribute in ($info.Attributes -split ',')) {
+ $attributes += $attribute.Trim()
+ }
+
+ # default values that are always set, specific values are set below this
+ # but are kept commented for easier readability
+ $stat = @{
+ exists = $true
+ attributes = $info.Attributes.ToString()
+ isarchive = ($attributes -contains "Archive")
+ isdir = $false
+ ishidden = ($attributes -contains "Hidden")
+ isjunction = $false
+ islnk = $false
+ isreadonly = ($attributes -contains "ReadOnly")
+ isreg = $false
+ isshared = $false
+ nlink = 1 # Number of links to the file (hard links), overriden below if islnk
+ # lnk_target = islnk or isjunction Target of the symlink. Note that relative paths remain relative
+ # lnk_source = islnk os isjunction Target of the symlink normalized for the remote filesystem
+ hlnk_targets = @()
+ creationtime = (ConvertTo-Timestamp -start_date $epoch_date -end_date $info.CreationTime)
+ lastaccesstime = (ConvertTo-Timestamp -start_date $epoch_date -end_date $info.LastAccessTime)
+ lastwritetime = (ConvertTo-Timestamp -start_date $epoch_date -end_date $info.LastWriteTime)
+ # size = a file and directory - calculated below
+ path = $info.FullName
+ filename = $info.Name
+ # extension = a file
+ # owner = set outsite this dict in case it fails
+ # sharename = a directory and isshared is True
+ # checksum = a file and get_checksum: True
+ }
+ try {
+ $stat.owner = $info.GetAccessControl().Owner
+ } catch {
+ # may not have rights, historical behaviour was to just set to $null
+ # due to ErrorActionPreference being set to "Continue"
+ $stat.owner = $null
+ }
+
+ # values that are set according to the type of file
+ if ($info.Attributes.HasFlag([System.IO.FileAttributes]::Directory)) {
+ $stat.isdir = $true
+ $share_info = Get-CimInstance -ClassName Win32_Share -Filter "Path='$($stat.path -replace '\\', '\\')'"
+ if ($null -ne $share_info) {
+ $stat.isshared = $true
+ $stat.sharename = $share_info.Name
+ }
+
+ try {
+ $size = 0
+ foreach ($file in $info.EnumerateFiles("*", [System.IO.SearchOption]::AllDirectories)) {
+ $size += $file.Length
+ }
+ $stat.size = $size
+ } catch {
+ $stat.size = 0
+ }
+ } else {
+ $stat.extension = $info.Extension
+ $stat.isreg = $true
+ $stat.size = $info.Length
+
+ if ($get_checksum) {
+ try {
+ $stat.checksum = Get-FileChecksum -path $path -algorithm $checksum_algorithm
+ } catch {
+ $module.FailJson("Failed to get hash of file, set get_checksum to False to ignore this error: $($_.Exception.Message)", $_)
+ }
+ }
+ }
+
+ # Get symbolic link, junction point, hard link info
+ if ($null -ne $link_info) {
+ switch ($link_info.Type) {
+ "SymbolicLink" {
+ $stat.islnk = $true
+ $stat.isreg = $false
+ $stat.lnk_target = $link_info.TargetPath
+ $stat.lnk_source = $link_info.AbsolutePath
+ break
+ }
+ "JunctionPoint" {
+ $stat.isjunction = $true
+ $stat.isreg = $false
+ $stat.lnk_target = $link_info.TargetPath
+ $stat.lnk_source = $link_info.AbsolutePath
+ break
+ }
+ "HardLink" {
+ $stat.lnk_type = "hard"
+ $stat.nlink = $link_info.HardTargets.Count
+
+ # remove current path from the targets
+ $hlnk_targets = $link_info.HardTargets | Where-Object { $_ -ne $stat.path }
+ $stat.hlnk_targets = @($hlnk_targets)
+ break
+ }
+ }
+ }
+
+ $module.Result.stat = $stat
+}
+
+$module.ExitJson()
+
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_stat.py b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_stat.py
index 1db4c95e..0676b5b2 120000..100644
--- a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_stat.py
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_stat.py
@@ -1 +1,236 @@
-../../../../../../plugins/modules/win_stat.py \ No newline at end of file
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright: (c) 2017, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# this is a windows documentation stub. actual code lives in the .ps1
+# file of the same name
+
+ANSIBLE_METADATA = {'metadata_version': '1.1',
+ 'status': ['stableinterface'],
+ 'supported_by': 'core'}
+
+DOCUMENTATION = r'''
+---
+module: win_stat
+version_added: "1.7"
+short_description: Get information about Windows files
+description:
+ - Returns information about a Windows file.
+ - For non-Windows targets, use the M(stat) module instead.
+options:
+ path:
+ description:
+ - The full path of the file/object to get the facts of; both forward and
+ back slashes are accepted.
+ type: path
+ required: yes
+ aliases: [ dest, name ]
+ get_checksum:
+ description:
+ - Whether to return a checksum of the file (default sha1)
+ type: bool
+ default: yes
+ version_added: "2.1"
+ checksum_algorithm:
+ description:
+ - Algorithm to determine checksum of file.
+ - Will throw an error if the host is unable to use specified algorithm.
+ type: str
+ default: sha1
+ choices: [ md5, sha1, sha256, sha384, sha512 ]
+ version_added: "2.3"
+ follow:
+ description:
+ - Whether to follow symlinks or junction points.
+ - In the case of C(path) pointing to another link, then that will
+ be followed until no more links are found.
+ type: bool
+ default: no
+ version_added: "2.8"
+seealso:
+- module: stat
+- module: win_acl
+- module: win_file
+- module: win_owner
+author:
+- Chris Church (@cchurch)
+'''
+
+EXAMPLES = r'''
+- name: Obtain information about a file
+ win_stat:
+ path: C:\foo.ini
+ register: file_info
+
+- name: Obtain information about a folder
+ win_stat:
+ path: C:\bar
+ register: folder_info
+
+- name: Get MD5 checksum of a file
+ win_stat:
+ path: C:\foo.ini
+ get_checksum: yes
+ checksum_algorithm: md5
+ register: md5_checksum
+
+- debug:
+ var: md5_checksum.stat.checksum
+
+- name: Get SHA1 checksum of file
+ win_stat:
+ path: C:\foo.ini
+ get_checksum: yes
+ register: sha1_checksum
+
+- debug:
+ var: sha1_checksum.stat.checksum
+
+- name: Get SHA256 checksum of file
+ win_stat:
+ path: C:\foo.ini
+ get_checksum: yes
+ checksum_algorithm: sha256
+ register: sha256_checksum
+
+- debug:
+ var: sha256_checksum.stat.checksum
+'''
+
+RETURN = r'''
+changed:
+ description: Whether anything was changed
+ returned: always
+ type: bool
+ sample: true
+stat:
+ description: dictionary containing all the stat data
+ returned: success
+ type: complex
+ contains:
+ attributes:
+ description: Attributes of the file at path in raw form.
+ returned: success, path exists
+ type: str
+ sample: "Archive, Hidden"
+ checksum:
+ description: The checksum of a file based on checksum_algorithm specified.
+ returned: success, path exist, path is a file, get_checksum == True
+ checksum_algorithm specified is supported
+ type: str
+ sample: 09cb79e8fc7453c84a07f644e441fd81623b7f98
+ creationtime:
+ description: The create time of the file represented in seconds since epoch.
+ returned: success, path exists
+ type: float
+ sample: 1477984205.15
+ exists:
+ description: If the path exists or not.
+ returned: success
+ type: bool
+ sample: true
+ extension:
+ description: The extension of the file at path.
+ returned: success, path exists, path is a file
+ type: str
+ sample: ".ps1"
+ filename:
+ description: The name of the file (without path).
+ returned: success, path exists, path is a file
+ type: str
+ sample: foo.ini
+ hlnk_targets:
+ description: List of other files pointing to the same file (hard links), excludes the current file.
+ returned: success, path exists
+ type: list
+ sample:
+ - C:\temp\file.txt
+ - C:\Windows\update.log
+ isarchive:
+ description: If the path is ready for archiving or not.
+ returned: success, path exists
+ type: bool
+ sample: true
+ isdir:
+ description: If the path is a directory or not.
+ returned: success, path exists
+ type: bool
+ sample: true
+ ishidden:
+ description: If the path is hidden or not.
+ returned: success, path exists
+ type: bool
+ sample: true
+ isjunction:
+ description: If the path is a junction point or not.
+ returned: success, path exists
+ type: bool
+ sample: true
+ islnk:
+ description: If the path is a symbolic link or not.
+ returned: success, path exists
+ type: bool
+ sample: true
+ isreadonly:
+ description: If the path is read only or not.
+ returned: success, path exists
+ type: bool
+ sample: true
+ isreg:
+ description: If the path is a regular file.
+ returned: success, path exists
+ type: bool
+ sample: true
+ isshared:
+ description: If the path is shared or not.
+ returned: success, path exists
+ type: bool
+ sample: true
+ lastaccesstime:
+ description: The last access time of the file represented in seconds since epoch.
+ returned: success, path exists
+ type: float
+ sample: 1477984205.15
+ lastwritetime:
+ description: The last modification time of the file represented in seconds since epoch.
+ returned: success, path exists
+ type: float
+ sample: 1477984205.15
+ lnk_source:
+ description: Target of the symlink normalized for the remote filesystem.
+ returned: success, path exists and the path is a symbolic link or junction point
+ type: str
+ sample: C:\temp\link
+ lnk_target:
+ description: Target of the symlink. Note that relative paths remain relative.
+ returned: success, path exists and the path is a symbolic link or junction point
+ type: str
+ sample: ..\link
+ nlink:
+ description: Number of links to the file (hard links).
+ returned: success, path exists
+ type: int
+ sample: 1
+ owner:
+ description: The owner of the file.
+ returned: success, path exists
+ type: str
+ sample: BUILTIN\Administrators
+ path:
+ description: The full absolute path to the file.
+ returned: success, path exists, file exists
+ type: str
+ sample: C:\foo.ini
+ sharename:
+ description: The name of share if folder is shared.
+ returned: success, path exists, file is a directory and isshared == True
+ type: str
+ sample: file-share
+ size:
+ description: The size in bytes of a file or folder.
+ returned: success, path exists, file is not a link
+ type: int
+ sample: 1024
+'''
diff --git a/test/support/windows-integration/plugins/modules/win_data_deduplication.ps1 b/test/support/windows-integration/plugins/modules/win_data_deduplication.ps1
deleted file mode 100644
index 593ee763..00000000
--- a/test/support/windows-integration/plugins/modules/win_data_deduplication.ps1
+++ /dev/null
@@ -1,129 +0,0 @@
-#!powershell
-
-# Copyright: 2019, rnsc(@rnsc) <github@rnsc.be>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt
-
-#AnsibleRequires -CSharpUtil Ansible.Basic
-#AnsibleRequires -OSVersion 6.3
-
-$spec = @{
- options = @{
- drive_letter = @{ type = "str"; required = $true }
- state = @{ type = "str"; choices = "absent", "present"; default = "present"; }
- settings = @{
- type = "dict"
- required = $false
- options = @{
- minimum_file_size = @{ type = "int"; default = 32768 }
- minimum_file_age_days = @{ type = "int"; default = 2 }
- no_compress = @{ type = "bool"; required = $false; default = $false }
- optimize_in_use_files = @{ type = "bool"; required = $false; default = $false }
- verify = @{ type = "bool"; required = $false; default = $false }
- }
- }
- }
- supports_check_mode = $true
-}
-
-$module = [Ansible.Basic.AnsibleModule]::Create($args, $spec)
-
-$drive_letter = $module.Params.drive_letter
-$state = $module.Params.state
-$settings = $module.Params.settings
-
-$module.Result.changed = $false
-$module.Result.reboot_required = $false
-$module.Result.msg = ""
-
-function Set-DataDeduplication($volume, $state, $settings, $dedup_job) {
-
- $current_state = 'absent'
-
- try {
- $dedup_info = Get-DedupVolume -Volume "$($volume.DriveLetter):"
- } catch {
- $dedup_info = $null
- }
-
- if ($dedup_info.Enabled) {
- $current_state = 'present'
- }
-
- if ( $state -ne $current_state ) {
- if( -not $module.CheckMode) {
- if($state -eq 'present') {
- # Enable-DedupVolume -Volume <String>
- Enable-DedupVolume -Volume "$($volume.DriveLetter):"
- } elseif ($state -eq 'absent') {
- Disable-DedupVolume -Volume "$($volume.DriveLetter):"
- }
- }
- $module.Result.changed = $true
- }
-
- if ($state -eq 'present') {
- if ($null -ne $settings) {
- Set-DataDedupJobSettings -volume $volume -settings $settings
- }
- }
-}
-
-function Set-DataDedupJobSettings ($volume, $settings) {
-
- try {
- $dedup_info = Get-DedupVolume -Volume "$($volume.DriveLetter):"
- } catch {
- $dedup_info = $null
- }
-
- ForEach ($key in $settings.keys) {
-
- # See Microsoft documentation:
- # https://docs.microsoft.com/en-us/powershell/module/deduplication/set-dedupvolume?view=win10-ps
-
- $update_key = $key
- $update_value = $settings.$($key)
- # Transform Ansible style options to Powershell params
- $update_key = $update_key -replace('_', '')
-
- if ($update_key -eq "MinimumFileSize" -and $update_value -lt 32768) {
- $update_value = 32768
- }
-
- $current_value = ($dedup_info | Select-Object -ExpandProperty $update_key)
-
- if ($update_value -ne $current_value) {
- $command_param = @{
- $($update_key) = $update_value
- }
-
- # Set-DedupVolume -Volume <String>`
- # -NoCompress <bool> `
- # -MinimumFileAgeDays <UInt32> `
- # -MinimumFileSize <UInt32> (minimum 32768)
- if( -not $module.CheckMode ) {
- Set-DedupVolume -Volume "$($volume.DriveLetter):" @command_param
- }
-
- $module.Result.changed = $true
- }
- }
-
-}
-
-# Install required feature
-$feature_name = "FS-Data-Deduplication"
-if( -not $module.CheckMode) {
- $feature = Install-WindowsFeature -Name $feature_name
-
- if ($feature.RestartNeeded -eq 'Yes') {
- $module.Result.reboot_required = $true
- $module.FailJson("$feature_name was installed but requires Windows to be rebooted to work.")
- }
-}
-
-$volume = Get-Volume -DriveLetter $drive_letter
-
-Set-DataDeduplication -volume $volume -state $state -settings $settings -dedup_job $dedup_job
-
-$module.ExitJson()
diff --git a/test/support/windows-integration/plugins/modules/win_data_deduplication.py b/test/support/windows-integration/plugins/modules/win_data_deduplication.py
deleted file mode 100644
index d320b9f7..00000000
--- a/test/support/windows-integration/plugins/modules/win_data_deduplication.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# Copyright: 2019, rnsc(@rnsc) <github@rnsc.be>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
-
-DOCUMENTATION = r'''
----
-module: win_data_deduplication
-version_added: "2.10"
-short_description: Module to enable Data Deduplication on a volume.
-description:
-- This module can be used to enable Data Deduplication on a Windows volume.
-- The module will install the FS-Data-Deduplication feature (a reboot will be necessary).
-options:
- drive_letter:
- description:
- - Windows drive letter on which to enable data deduplication.
- required: yes
- type: str
- state:
- description:
- - Wether to enable or disable data deduplication on the selected volume.
- default: present
- type: str
- choices: [ present, absent ]
- settings:
- description:
- - Dictionary of settings to pass to the Set-DedupVolume powershell command.
- type: dict
- suboptions:
- minimum_file_size:
- description:
- - Minimum file size you want to target for deduplication.
- - It will default to 32768 if not defined or if the value is less than 32768.
- type: int
- default: 32768
- minimum_file_age_days:
- description:
- - Minimum file age you want to target for deduplication.
- type: int
- default: 2
- no_compress:
- description:
- - Wether you want to enabled filesystem compression or not.
- type: bool
- default: no
- optimize_in_use_files:
- description:
- - Indicates that the server attempts to optimize currently open files.
- type: bool
- default: no
- verify:
- description:
- - Indicates whether the deduplication engine performs a byte-for-byte verification for each duplicate chunk
- that optimization creates, rather than relying on a cryptographically strong hash.
- - This option is not recommend.
- - Setting this parameter to True can degrade optimization performance.
- type: bool
- default: no
-author:
-- rnsc (@rnsc)
-'''
-
-EXAMPLES = r'''
-- name: Enable Data Deduplication on D
- win_data_deduplication:
- drive_letter: 'D'
- state: present
-
-- name: Enable Data Deduplication on D
- win_data_deduplication:
- drive_letter: 'D'
- state: present
- settings:
- no_compress: true
- minimum_file_age_days: 1
- minimum_file_size: 0
-'''
-
-RETURN = r'''
-#
-'''
diff --git a/test/support/windows-integration/plugins/modules/win_dsc.ps1 b/test/support/windows-integration/plugins/modules/win_dsc.ps1
deleted file mode 100644
index 690f391a..00000000
--- a/test/support/windows-integration/plugins/modules/win_dsc.ps1
+++ /dev/null
@@ -1,398 +0,0 @@
-#!powershell
-
-# Copyright: (c) 2015, Trond Hindenes <trond@hindenes.com>, and others
-# Copyright: (c) 2017, Ansible Project
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-#AnsibleRequires -CSharpUtil Ansible.Basic
-#Requires -Version 5
-
-Function ConvertTo-ArgSpecType {
- <#
- .SYNOPSIS
- Converts the DSC parameter type to the arg spec type required for Ansible.
- #>
- param(
- [Parameter(Mandatory=$true)][String]$CimType
- )
-
- $arg_type = switch($CimType) {
- Boolean { "bool" }
- Char16 { [Func[[Object], [Char]]]{ [System.Char]::Parse($args[0].ToString()) } }
- DateTime { [Func[[Object], [DateTime]]]{ [System.DateTime]($args[0].ToString()) } }
- Instance { "dict" }
- Real32 { "float" }
- Real64 { [Func[[Object], [Double]]]{ [System.Double]::Parse($args[0].ToString()) } }
- Reference { "dict" }
- SInt16 { [Func[[Object], [Int16]]]{ [System.Int16]::Parse($args[0].ToString()) } }
- SInt32 { "int" }
- SInt64 { [Func[[Object], [Int64]]]{ [System.Int64]::Parse($args[0].ToString()) } }
- SInt8 { [Func[[Object], [SByte]]]{ [System.SByte]::Parse($args[0].ToString()) } }
- String { "str" }
- UInt16 { [Func[[Object], [UInt16]]]{ [System.UInt16]::Parse($args[0].ToString()) } }
- UInt32 { [Func[[Object], [UInt32]]]{ [System.UInt32]::Parse($args[0].ToString()) } }
- UInt64 { [Func[[Object], [UInt64]]]{ [System.UInt64]::Parse($args[0].ToString()) } }
- UInt8 { [Func[[Object], [Byte]]]{ [System.Byte]::Parse($args[0].ToString()) } }
- Unknown { "raw" }
- default { "raw" }
- }
- return $arg_type
-}
-
-Function Get-DscCimClassProperties {
- <#
- .SYNOPSIS
- Get's a list of CimProperties of a CIM Class. It filters out any magic or
- read only properties that we don't need to know about.
- #>
- param([Parameter(Mandatory=$true)][String]$ClassName)
-
- $resource = Get-CimClass -ClassName $ClassName -Namespace root\Microsoft\Windows\DesiredStateConfiguration
-
- # Filter out any magic properties that are used internally on an OMI_BaseResource
- # https://github.com/PowerShell/PowerShell/blob/master/src/System.Management.Automation/DscSupport/CimDSCParser.cs#L1203
- $magic_properties = @("ResourceId", "SourceInfo", "ModuleName", "ModuleVersion", "ConfigurationName")
- $properties = $resource.CimClassProperties | Where-Object {
-
- ($resource.CimSuperClassName -ne "OMI_BaseResource" -or $_.Name -notin $magic_properties) -and
- -not $_.Flags.HasFlag([Microsoft.Management.Infrastructure.CimFlags]::ReadOnly)
- }
-
- return ,$properties
-}
-
-Function Add-PropertyOption {
- <#
- .SYNOPSIS
- Adds the spec for the property type to the existing module specification.
- #>
- param(
- [Parameter(Mandatory=$true)][Hashtable]$Spec,
- [Parameter(Mandatory=$true)]
- [Microsoft.Management.Infrastructure.CimPropertyDeclaration]$Property
- )
-
- $option = @{
- required = $false
- }
- $property_name = $Property.Name
- $property_type = $Property.CimType.ToString()
-
- if ($Property.Flags.HasFlag([Microsoft.Management.Infrastructure.CimFlags]::Key) -or
- $Property.Flags.HasFlag([Microsoft.Management.Infrastructure.CimFlags]::Required)) {
- $option.required = $true
- }
-
- if ($null -ne $Property.Qualifiers['Values']) {
- $option.choices = [System.Collections.Generic.List`1[Object]]$Property.Qualifiers['Values'].Value
- }
-
- if ($property_name -eq "Name") {
- # For backwards compatibility we support specifying the Name DSC property as item_name
- $option.aliases = @("item_name")
- } elseif ($property_name -ceq "key") {
- # There seems to be a bug in the CIM property parsing when the property name is 'Key'. The CIM instance will
- # think the name is 'key' when the MOF actually defines it as 'Key'. We set the proper casing so the module arg
- # validator won't fire a case sensitive warning
- $property_name = "Key"
- }
-
- if ($Property.ReferenceClassName -eq "MSFT_Credential") {
- # Special handling for the MSFT_Credential type (PSCredential), we handle this with having 2 options that
- # have the suffix _username and _password.
- $option_spec_pass = @{
- type = "str"
- required = $option.required
- no_log = $true
- }
- $Spec.options."$($property_name)_password" = $option_spec_pass
- $Spec.required_together.Add(@("$($property_name)_username", "$($property_name)_password")) > $null
-
- $property_name = "$($property_name)_username"
- $option.type = "str"
- } elseif ($Property.ReferenceClassName -eq "MSFT_KeyValuePair") {
- $option.type = "dict"
- } elseif ($property_type.EndsWith("Array")) {
- $option.type = "list"
- $option.elements = ConvertTo-ArgSpecType -CimType $property_type.Substring(0, $property_type.Length - 5)
- } else {
- $option.type = ConvertTo-ArgSpecType -CimType $property_type
- }
-
- if (($option.type -eq "dict" -or ($option.type -eq "list" -and $option.elements -eq "dict")) -and
- $Property.ReferenceClassName -ne "MSFT_KeyValuePair") {
- # Get the sub spec if the type is a Instance (CimInstance/dict)
- $sub_option_spec = Get-OptionSpec -ClassName $Property.ReferenceClassName
- $option += $sub_option_spec
- }
-
- $Spec.options.$property_name = $option
-}
-
-Function Get-OptionSpec {
- <#
- .SYNOPSIS
- Generates the specifiec used in AnsibleModule for a CIM MOF resource name.
-
- .NOTES
- This won't be able to retrieve the default values for an option as that is not defined in the MOF for a resource.
- Default values are still preserved in the DSC engine if we don't pass in the property at all, we just can't report
- on what they are automatically.
- #>
- param(
- [Parameter(Mandatory=$true)][String]$ClassName
- )
-
- $spec = @{
- options = @{}
- required_together = [System.Collections.ArrayList]@()
- }
- $properties = Get-DscCimClassProperties -ClassName $ClassName
- foreach ($property in $properties) {
- Add-PropertyOption -Spec $spec -Property $property
- }
-
- return $spec
-}
-
-Function ConvertTo-CimInstance {
- <#
- .SYNOPSIS
- Converts a dict to a CimInstance of the specified Class. Also provides a
- better error message if this fails that contains the option name that failed.
- #>
- param(
- [Parameter(Mandatory=$true)][String]$Name,
- [Parameter(Mandatory=$true)][String]$ClassName,
- [Parameter(Mandatory=$true)][System.Collections.IDictionary]$Value,
- [Parameter(Mandatory=$true)][Ansible.Basic.AnsibleModule]$Module,
- [Switch]$Recurse
- )
-
- $properties = @{}
- foreach ($value_info in $Value.GetEnumerator()) {
- # Need to remove all null values from existing dict so the conversion works
- if ($null -eq $value_info.Value) {
- continue
- }
- $properties.($value_info.Key) = $value_info.Value
- }
-
- if ($Recurse) {
- # We want to validate and convert and values to what's required by DSC
- $properties = ConvertTo-DscProperty -ClassName $ClassName -Params $properties -Module $Module
- }
-
- try {
- return (New-CimInstance -ClassName $ClassName -Property $properties -ClientOnly)
- } catch {
- # New-CimInstance raises a poor error message, make sure we mention what option it is for
- $Module.FailJson("Failed to cast dict value for option '$Name' to a CimInstance: $($_.Exception.Message)", $_)
- }
-}
-
-Function ConvertTo-DscProperty {
- <#
- .SYNOPSIS
- Converts the input module parameters that have been validated and casted
- into the types expected by the DSC engine. This is mostly done to deal with
- types like PSCredential and Dictionaries.
- #>
- param(
- [Parameter(Mandatory=$true)][String]$ClassName,
- [Parameter(Mandatory=$true)][System.Collections.IDictionary]$Params,
- [Parameter(Mandatory=$true)][Ansible.Basic.AnsibleModule]$Module
- )
- $properties = Get-DscCimClassProperties -ClassName $ClassName
-
- $dsc_properties = @{}
- foreach ($property in $properties) {
- $property_name = $property.Name
- $property_type = $property.CimType.ToString()
-
- if ($property.ReferenceClassName -eq "MSFT_Credential") {
- $username = $Params."$($property_name)_username"
- $password = $Params."$($property_name)_password"
-
- # No user set == No option set in playbook, skip this property
- if ($null -eq $username) {
- continue
- }
- $sec_password = ConvertTo-SecureString -String $password -AsPlainText -Force
- $value = New-Object -TypeName System.Management.Automation.PSCredential -ArgumentList $username, $sec_password
- } else {
- $value = $Params.$property_name
-
- # The actual value wasn't set, skip adding this property
- if ($null -eq $value) {
- continue
- }
-
- if ($property.ReferenceClassName -eq "MSFT_KeyValuePair") {
- $key_value_pairs = [System.Collections.Generic.List`1[CimInstance]]@()
- foreach ($value_info in $value.GetEnumerator()) {
- $kvp = @{Key = $value_info.Key; Value = $value_info.Value.ToString()}
- $cim_instance = ConvertTo-CimInstance -Name $property_name -ClassName MSFT_KeyValuePair `
- -Value $kvp -Module $Module
- $key_value_pairs.Add($cim_instance) > $null
- }
- $value = $key_value_pairs.ToArray()
- } elseif ($null -ne $property.ReferenceClassName) {
- # Convert the dict to a CimInstance (or list of CimInstances)
- $convert_args = @{
- ClassName = $property.ReferenceClassName
- Module = $Module
- Name = $property_name
- Recurse = $true
- }
- if ($property_type.EndsWith("Array")) {
- $value = [System.Collections.Generic.List`1[CimInstance]]@()
- foreach ($raw in $Params.$property_name.GetEnumerator()) {
- $cim_instance = ConvertTo-CimInstance -Value $raw @convert_args
- $value.Add($cim_instance) > $null
- }
- $value = $value.ToArray() # Need to make sure we are dealing with an Array not a List
- } else {
- $value = ConvertTo-CimInstance -Value $value @convert_args
- }
- }
- }
- $dsc_properties.$property_name = $value
- }
-
- return $dsc_properties
-}
-
-Function Invoke-DscMethod {
- <#
- .SYNOPSIS
- Invokes the DSC Resource Method specified in another PS pipeline. This is
- done so we can retrieve the Verbose stream and return it back to the user
- for futher debugging.
- #>
- param(
- [Parameter(Mandatory=$true)][Ansible.Basic.AnsibleModule]$Module,
- [Parameter(Mandatory=$true)][String]$Method,
- [Parameter(Mandatory=$true)][Hashtable]$Arguments
- )
-
- # Invoke the DSC resource in a separate runspace so we can capture the Verbose output
- $ps = [PowerShell]::Create()
- $ps.AddCommand("Invoke-DscResource").AddParameter("Method", $Method) > $null
- $ps.AddParameters($Arguments) > $null
-
- $result = $ps.Invoke()
-
- # Pass the warnings through to the AnsibleModule return result
- foreach ($warning in $ps.Streams.Warning) {
- $Module.Warn($warning.Message)
- }
-
- # If running at a high enough verbosity, add the verbose output to the AnsibleModule return result
- if ($Module.Verbosity -ge 3) {
- $verbose_logs = [System.Collections.Generic.List`1[String]]@()
- foreach ($verbosity in $ps.Streams.Verbose) {
- $verbose_logs.Add($verbosity.Message) > $null
- }
- $Module.Result."verbose_$($Method.ToLower())" = $verbose_logs
- }
-
- if ($ps.HadErrors) {
- # Cannot pass in the ErrorRecord as it's a RemotingErrorRecord and doesn't contain the ScriptStackTrace
- # or other info that would be useful
- $Module.FailJson("Failed to invoke DSC $Method method: $($ps.Streams.Error[0].Exception.Message)")
- }
-
- return $result
-}
-
-# win_dsc is unique in that is builds the arg spec based on DSC Resource input. To get this info
-# we need to read the resource_name and module_version value which is done outside of Ansible.Basic
-if ($args.Length -gt 0) {
- $params = Get-Content -Path $args[0] | ConvertFrom-Json
-} else {
- $params = $complex_args
-}
-if (-not $params.ContainsKey("resource_name")) {
- $res = @{
- msg = "missing required argument: resource_name"
- failed = $true
- }
- Write-Output -InputObject (ConvertTo-Json -Compress -InputObject $res)
- exit 1
-}
-$resource_name = $params.resource_name
-
-if ($params.ContainsKey("module_version")) {
- $module_version = $params.module_version
-} else {
- $module_version = "latest"
-}
-
-$module_versions = (Get-DscResource -Name $resource_name -ErrorAction SilentlyContinue | Sort-Object -Property Version)
-$resource = $null
-if ($module_version -eq "latest" -and $null -ne $module_versions) {
- $resource = $module_versions[-1]
-} elseif ($module_version -ne "latest") {
- $resource = $module_versions | Where-Object { $_.Version -eq $module_version }
-}
-
-if (-not $resource) {
- if ($module_version -eq "latest") {
- $msg = "Resource '$resource_name' not found."
- } else {
- $msg = "Resource '$resource_name' with version '$module_version' not found."
- $msg += " Versions installed: '$($module_versions.Version -join "', '")'."
- }
-
- Write-Output -InputObject (ConvertTo-Json -Compress -InputObject @{ failed = $true; msg = $msg })
- exit 1
-}
-
-# Build the base args for the DSC Invocation based on the resource selected
-$dsc_args = @{
- Name = $resource.Name
-}
-
-# Binary resources are not working very well with that approach - need to guesstimate module name/version
-$module_version = $null
-if ($resource.Module) {
- $dsc_args.ModuleName = @{
- ModuleName = $resource.Module.Name
- ModuleVersion = $resource.Module.Version
- }
- $module_version = $resource.Module.Version.ToString()
-} else {
- $dsc_args.ModuleName = "PSDesiredStateConfiguration"
-}
-
-# To ensure the class registered with CIM is the one based on our version, we want to run the Get method so the DSC
-# engine updates the metadata propery. We don't care about any errors here
-try {
- Invoke-DscResource -Method Get -Property @{Fake="Fake"} @dsc_args > $null
-} catch {}
-
-# Dynamically build the option spec based on the resource_name specified and create the module object
-$spec = Get-OptionSpec -ClassName $resource.ResourceType
-$spec.supports_check_mode = $true
-$spec.options.module_version = @{ type = "str"; default = "latest" }
-$spec.options.resource_name = @{ type = "str"; required = $true }
-
-$module = [Ansible.Basic.AnsibleModule]::Create($args, $spec)
-$module.Result.reboot_required = $false
-$module.Result.module_version = $module_version
-
-# Build the DSC invocation arguments and invoke the resource
-$dsc_args.Property = ConvertTo-DscProperty -ClassName $resource.ResourceType -Module $module -Params $Module.Params
-$dsc_args.Verbose = $true
-
-$test_result = Invoke-DscMethod -Module $module -Method Test -Arguments $dsc_args
-if ($test_result.InDesiredState -ne $true) {
- if (-not $module.CheckMode) {
- $result = Invoke-DscMethod -Module $module -Method Set -Arguments $dsc_args
- $module.Result.reboot_required = $result.RebootRequired
- }
- $module.Result.changed = $true
-}
-
-$module.ExitJson()
diff --git a/test/support/windows-integration/plugins/modules/win_dsc.py b/test/support/windows-integration/plugins/modules/win_dsc.py
deleted file mode 100644
index 200d025e..00000000
--- a/test/support/windows-integration/plugins/modules/win_dsc.py
+++ /dev/null
@@ -1,183 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# Copyright: (c) 2015, Trond Hindenes <trond@hindenes.com>, and others
-# Copyright: (c) 2017, Ansible Project
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
-
-DOCUMENTATION = r'''
----
-module: win_dsc
-version_added: "2.4"
-short_description: Invokes a PowerShell DSC configuration
-description:
-- Configures a resource using PowerShell DSC.
-- Requires PowerShell version 5.0 or newer.
-- Most of the options for this module are dynamic and will vary depending on
- the DSC Resource specified in I(resource_name).
-- See :doc:`/user_guide/windows_dsc` for more information on how to use this module.
-options:
- resource_name:
- description:
- - The name of the DSC Resource to use.
- - Must be accessible to PowerShell using any of the default paths.
- type: str
- required: yes
- module_version:
- description:
- - Can be used to configure the exact version of the DSC resource to be
- invoked.
- - Useful if the target node has multiple versions installed of the module
- containing the DSC resource.
- - If not specified, the module will follow standard PowerShell convention
- and use the highest version available.
- type: str
- default: latest
- free_form:
- description:
- - The M(win_dsc) module takes in multiple free form options based on the
- DSC resource being invoked by I(resource_name).
- - There is no option actually named C(free_form) so see the examples.
- - This module will try and convert the option to the correct type required
- by the DSC resource and throw a warning if it fails.
- - If the type of the DSC resource option is a C(CimInstance) or
- C(CimInstance[]), this means the value should be a dictionary or list
- of dictionaries based on the values required by that option.
- - If the type of the DSC resource option is a C(PSCredential) then there
- needs to be 2 options set in the Ansible task definition suffixed with
- C(_username) and C(_password).
- - If the type of the DSC resource option is an array, then a list should be
- provided but a comma separated string also work. Use a list where
- possible as no escaping is required and it works with more complex types
- list C(CimInstance[]).
- - If the type of the DSC resource option is a C(DateTime), you should use
- a string in the form of an ISO 8901 string to ensure the exact date is
- used.
- - Since Ansible 2.8, Ansible will now validate the input fields against the
- DSC resource definition automatically. Older versions will silently
- ignore invalid fields.
- type: str
- required: true
-notes:
-- By default there are a few builtin resources that come with PowerShell 5.0,
- see U(https://docs.microsoft.com/en-us/powershell/scripting/dsc/resources/resources) for
- more information on these resources.
-- Custom DSC resources can be installed with M(win_psmodule) using the I(name)
- option.
-- The DSC engine run's each task as the SYSTEM account, any resources that need
- to be accessed with a different account need to have C(PsDscRunAsCredential)
- set.
-- To see the valid options for a DSC resource, run the module with C(-vvv) to
- show the possible module invocation. Default values are not shown in this
- output but are applied within the DSC engine.
-author:
-- Trond Hindenes (@trondhindenes)
-'''
-
-EXAMPLES = r'''
-- name: Extract zip file
- win_dsc:
- resource_name: Archive
- Ensure: Present
- Path: C:\Temp\zipfile.zip
- Destination: C:\Temp\Temp2
-
-- name: Install a Windows feature with the WindowsFeature resource
- win_dsc:
- resource_name: WindowsFeature
- Name: telnet-client
-
-- name: Edit HKCU reg key under specific user
- win_dsc:
- resource_name: Registry
- Ensure: Present
- Key: HKEY_CURRENT_USER\ExampleKey
- ValueName: TestValue
- ValueData: TestData
- PsDscRunAsCredential_username: '{{ansible_user}}'
- PsDscRunAsCredential_password: '{{ansible_password}}'
- no_log: true
-
-- name: Create file with multiple attributes
- win_dsc:
- resource_name: File
- DestinationPath: C:\ansible\dsc
- Attributes: # can also be a comma separated string, e.g. 'Hidden, System'
- - Hidden
- - System
- Ensure: Present
- Type: Directory
-
-- name: Call DSC resource with DateTime option
- win_dsc:
- resource_name: DateTimeResource
- DateTimeOption: '2019-02-22T13:57:31.2311892+00:00'
-
-# more complex example using custom DSC resource and dict values
-- name: Setup the xWebAdministration module
- win_psmodule:
- name: xWebAdministration
- state: present
-
-- name: Create IIS Website with Binding and Authentication options
- win_dsc:
- resource_name: xWebsite
- Ensure: Present
- Name: DSC Website
- State: Started
- PhysicalPath: C:\inetpub\wwwroot
- BindingInfo: # Example of a CimInstance[] DSC parameter (list of dicts)
- - Protocol: https
- Port: 1234
- CertificateStoreName: MY
- CertificateThumbprint: C676A89018C4D5902353545343634F35E6B3A659
- HostName: DSCTest
- IPAddress: '*'
- SSLFlags: '1'
- - Protocol: http
- Port: 4321
- IPAddress: '*'
- AuthenticationInfo: # Example of a CimInstance DSC parameter (dict)
- Anonymous: no
- Basic: true
- Digest: false
- Windows: yes
-'''
-
-RETURN = r'''
-module_version:
- description: The version of the dsc resource/module used.
- returned: always
- type: str
- sample: "1.0.1"
-reboot_required:
- description: Flag returned from the DSC engine indicating whether or not
- the machine requires a reboot for the invoked changes to take effect.
- returned: always
- type: bool
- sample: true
-verbose_test:
- description: The verbose output as a list from executing the DSC test
- method.
- returned: Ansible verbosity is -vvv or greater
- type: list
- sample: [
- "Perform operation 'Invoke CimMethod' with the following parameters, ",
- "[SERVER]: LCM: [Start Test ] [[File]DirectResourceAccess]",
- "Operation 'Invoke CimMethod' complete."
- ]
-verbose_set:
- description: The verbose output as a list from executing the DSC Set
- method.
- returned: Ansible verbosity is -vvv or greater and a change occurred
- type: list
- sample: [
- "Perform operation 'Invoke CimMethod' with the following parameters, ",
- "[SERVER]: LCM: [Start Set ] [[File]DirectResourceAccess]",
- "Operation 'Invoke CimMethod' complete."
- ]
-'''
diff --git a/test/support/windows-integration/plugins/modules/win_feature.ps1 b/test/support/windows-integration/plugins/modules/win_feature.ps1
deleted file mode 100644
index 9a7e1c30..00000000
--- a/test/support/windows-integration/plugins/modules/win_feature.ps1
+++ /dev/null
@@ -1,111 +0,0 @@
-#!powershell
-
-# Copyright: (c) 2014, Paul Durivage <paul.durivage@rackspace.com>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-#Requires -Module Ansible.ModuleUtils.Legacy
-
-Import-Module -Name ServerManager
-
-$result = @{
- changed = $false
-}
-
-$params = Parse-Args $args -supports_check_mode $true
-$check_mode = Get-AnsibleParam -obj $params -name "_ansible_check_mode" -type "bool" -default $false
-
-$name = Get-AnsibleParam -obj $params -name "name" -type "list" -failifempty $true
-$state = Get-AnsibleParam -obj $params -name "state" -type "str" -default "present" -validateset "present","absent"
-
-$include_sub_features = Get-AnsibleParam -obj $params -name "include_sub_features" -type "bool" -default $false
-$include_management_tools = Get-AnsibleParam -obj $params -name "include_management_tools" -type "bool" -default $false
-$source = Get-AnsibleParam -obj $params -name "source" -type "str"
-
-$install_cmdlet = $false
-if (Get-Command -Name Install-WindowsFeature -ErrorAction SilentlyContinue) {
- Set-Alias -Name Install-AnsibleWindowsFeature -Value Install-WindowsFeature
- Set-Alias -Name Uninstall-AnsibleWindowsFeature -Value Uninstall-WindowsFeature
- $install_cmdlet = $true
-} elseif (Get-Command -Name Add-WindowsFeature -ErrorAction SilentlyContinue) {
- Set-Alias -Name Install-AnsibleWindowsFeature -Value Add-WindowsFeature
- Set-Alias -Name Uninstall-AnsibleWindowsFeature -Value Remove-WindowsFeature
-} else {
- Fail-Json -obj $result -message "This version of Windows does not support the cmdlets Install-WindowsFeature or Add-WindowsFeature"
-}
-
-if ($state -eq "present") {
- $install_args = @{
- Name = $name
- IncludeAllSubFeature = $include_sub_features
- Restart = $false
- WhatIf = $check_mode
- ErrorAction = "Stop"
- }
-
- if ($install_cmdlet) {
- $install_args.IncludeManagementTools = $include_management_tools
- $install_args.Confirm = $false
- if ($source) {
- if (-not (Test-Path -Path $source)) {
- Fail-Json -obj $result -message "Failed to find source path $source for feature install"
- }
- $install_args.Source = $source
- }
- }
-
- try {
- $action_results = Install-AnsibleWindowsFeature @install_args
- } catch {
- Fail-Json -obj $result -message "Failed to install Windows Feature: $($_.Exception.Message)"
- }
-} else {
- $uninstall_args = @{
- Name = $name
- Restart = $false
- WhatIf = $check_mode
- ErrorAction = "Stop"
- }
- if ($install_cmdlet) {
- $uninstall_args.IncludeManagementTools = $include_management_tools
- }
-
- try {
- $action_results = Uninstall-AnsibleWindowsFeature @uninstall_args
- } catch {
- Fail-Json -obj $result -message "Failed to uninstall Windows Feature: $($_.Exception.Message)"
- }
-}
-
-# Loop through results and create a hash containing details about
-# each role/feature that is installed/removed
-# $action_results.FeatureResult is not empty if anything was changed
-$feature_results = @()
-foreach ($action_result in $action_results.FeatureResult) {
- $message = @()
- foreach ($msg in $action_result.Message) {
- $message += @{
- message_type = $msg.MessageType.ToString()
- error_code = $msg.ErrorCode
- text = $msg.Text
- }
- }
-
- $feature_results += @{
- id = $action_result.Id
- display_name = $action_result.DisplayName
- message = $message
- reboot_required = ConvertTo-Bool -obj $action_result.RestartNeeded
- skip_reason = $action_result.SkipReason.ToString()
- success = ConvertTo-Bool -obj $action_result.Success
- restart_needed = ConvertTo-Bool -obj $action_result.RestartNeeded
- }
- $result.changed = $true
-}
-$result.feature_result = $feature_results
-$result.success = ConvertTo-Bool -obj $action_results.Success
-$result.exitcode = $action_results.ExitCode.ToString()
-$result.reboot_required = ConvertTo-Bool -obj $action_results.RestartNeeded
-# controls whether Ansible will fail or not
-$result.failed = (-not $action_results.Success)
-
-Exit-Json -obj $result
diff --git a/test/support/windows-integration/plugins/modules/win_feature.py b/test/support/windows-integration/plugins/modules/win_feature.py
deleted file mode 100644
index 62e310b2..00000000
--- a/test/support/windows-integration/plugins/modules/win_feature.py
+++ /dev/null
@@ -1,149 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# Copyright: (c) 2014, Paul Durivage <paul.durivage@rackspace.com>
-# Copyright: (c) 2014, Trond Hindenes <trond@hindenes.com>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-# this is a windows documentation stub. actual code lives in the .ps1
-# file of the same name
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
-
-DOCUMENTATION = r'''
----
-module: win_feature
-version_added: "1.7"
-short_description: Installs and uninstalls Windows Features on Windows Server
-description:
- - Installs or uninstalls Windows Roles or Features on Windows Server.
- - This module uses the Add/Remove-WindowsFeature Cmdlets on Windows 2008 R2
- and Install/Uninstall-WindowsFeature Cmdlets on Windows 2012, which are not available on client os machines.
-options:
- name:
- description:
- - Names of roles or features to install as a single feature or a comma-separated list of features.
- - To list all available features use the PowerShell command C(Get-WindowsFeature).
- type: list
- required: yes
- state:
- description:
- - State of the features or roles on the system.
- type: str
- choices: [ absent, present ]
- default: present
- include_sub_features:
- description:
- - Adds all subfeatures of the specified feature.
- type: bool
- default: no
- include_management_tools:
- description:
- - Adds the corresponding management tools to the specified feature.
- - Not supported in Windows 2008 R2 and will be ignored.
- type: bool
- default: no
- source:
- description:
- - Specify a source to install the feature from.
- - Not supported in Windows 2008 R2 and will be ignored.
- - Can either be C({driveletter}:\sources\sxs) or C(\\{IP}\share\sources\sxs).
- type: str
- version_added: "2.1"
-seealso:
-- module: win_chocolatey
-- module: win_package
-author:
- - Paul Durivage (@angstwad)
- - Trond Hindenes (@trondhindenes)
-'''
-
-EXAMPLES = r'''
-- name: Install IIS (Web-Server only)
- win_feature:
- name: Web-Server
- state: present
-
-- name: Install IIS (Web-Server and Web-Common-Http)
- win_feature:
- name:
- - Web-Server
- - Web-Common-Http
- state: present
-
-- name: Install NET-Framework-Core from file
- win_feature:
- name: NET-Framework-Core
- source: C:\Temp\iso\sources\sxs
- state: present
-
-- name: Install IIS Web-Server with sub features and management tools
- win_feature:
- name: Web-Server
- state: present
- include_sub_features: yes
- include_management_tools: yes
- register: win_feature
-
-- name: Reboot if installing Web-Server feature requires it
- win_reboot:
- when: win_feature.reboot_required
-'''
-
-RETURN = r'''
-exitcode:
- description: The stringified exit code from the feature installation/removal command.
- returned: always
- type: str
- sample: Success
-feature_result:
- description: List of features that were installed or removed.
- returned: success
- type: complex
- sample:
- contains:
- display_name:
- description: Feature display name.
- returned: always
- type: str
- sample: "Telnet Client"
- id:
- description: A list of KB article IDs that apply to the update.
- returned: always
- type: int
- sample: 44
- message:
- description: Any messages returned from the feature subsystem that occurred during installation or removal of this feature.
- returned: always
- type: list
- elements: str
- sample: []
- reboot_required:
- description: True when the target server requires a reboot as a result of installing or removing this feature.
- returned: always
- type: bool
- sample: true
- restart_needed:
- description: DEPRECATED in Ansible 2.4 (refer to C(reboot_required) instead). True when the target server requires a reboot as a
- result of installing or removing this feature.
- returned: always
- type: bool
- sample: true
- skip_reason:
- description: The reason a feature installation or removal was skipped.
- returned: always
- type: str
- sample: NotSkipped
- success:
- description: If the feature installation or removal was successful.
- returned: always
- type: bool
- sample: true
-reboot_required:
- description: True when the target server requires a reboot to complete updates (no further updates can be installed until after a reboot).
- returned: success
- type: bool
- sample: true
-'''
diff --git a/test/support/windows-integration/plugins/modules/win_find.ps1 b/test/support/windows-integration/plugins/modules/win_find.ps1
deleted file mode 100644
index bc57c5ff..00000000
--- a/test/support/windows-integration/plugins/modules/win_find.ps1
+++ /dev/null
@@ -1,416 +0,0 @@
-#!powershell
-
-# Copyright: (c) 2016, Ansible Project
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-#AnsibleRequires -CSharpUtil Ansible.Basic
-#Requires -Module Ansible.ModuleUtils.LinkUtil
-
-$spec = @{
- options = @{
- paths = @{ type = "list"; elements = "str"; required = $true }
- age = @{ type = "str" }
- age_stamp = @{ type = "str"; default = "mtime"; choices = "mtime", "ctime", "atime" }
- file_type = @{ type = "str"; default = "file"; choices = "file", "directory" }
- follow = @{ type = "bool"; default = $false }
- hidden = @{ type = "bool"; default = $false }
- patterns = @{ type = "list"; elements = "str"; aliases = "regex", "regexp" }
- recurse = @{ type = "bool"; default = $false }
- size = @{ type = "str" }
- use_regex = @{ type = "bool"; default = $false }
- get_checksum = @{ type = "bool"; default = $true }
- checksum_algorithm = @{ type = "str"; default = "sha1"; choices = "md5", "sha1", "sha256", "sha384", "sha512" }
- }
- supports_check_mode = $true
-}
-
-$module = [Ansible.Basic.AnsibleModule]::Create($args, $spec)
-
-$paths = $module.Params.paths
-$age = $module.Params.age
-$age_stamp = $module.Params.age_stamp
-$file_type = $module.Params.file_type
-$follow = $module.Params.follow
-$hidden = $module.Params.hidden
-$patterns = $module.Params.patterns
-$recurse = $module.Params.recurse
-$size = $module.Params.size
-$use_regex = $module.Params.use_regex
-$get_checksum = $module.Params.get_checksum
-$checksum_algorithm = $module.Params.checksum_algorithm
-
-$module.Result.examined = 0
-$module.Result.files = @()
-$module.Result.matched = 0
-
-Load-LinkUtils
-
-Function Assert-Age {
- Param (
- [System.IO.FileSystemInfo]$File,
- [System.Int64]$Age,
- [System.String]$AgeStamp
- )
-
- $actual_age = switch ($AgeStamp) {
- mtime { $File.LastWriteTime.Ticks }
- ctime { $File.CreationTime.Ticks }
- atime { $File.LastAccessTime.Ticks }
- }
-
- if ($Age -ge 0) {
- return $Age -ge $actual_age
- } else {
- return ($Age * -1) -le $actual_age
- }
-}
-
-Function Assert-FileType {
- Param (
- [System.IO.FileSystemInfo]$File,
- [System.String]$FileType
- )
-
- $is_dir = $File.Attributes.HasFlag([System.IO.FileAttributes]::Directory)
- return ($FileType -eq 'directory' -and $is_dir) -or ($FileType -eq 'file' -and -not $is_dir)
-}
-
-Function Assert-FileHidden {
- Param (
- [System.IO.FileSystemInfo]$File,
- [Switch]$IsHidden
- )
-
- $file_is_hidden = $File.Attributes.HasFlag([System.IO.FileAttributes]::Hidden)
- return $IsHidden.IsPresent -eq $file_is_hidden
-}
-
-
-Function Assert-FileNamePattern {
- Param (
- [System.IO.FileSystemInfo]$File,
- [System.String[]]$Patterns,
- [Switch]$UseRegex
- )
-
- $valid_match = $false
- foreach ($pattern in $Patterns) {
- if ($UseRegex) {
- if ($File.Name -match $pattern) {
- $valid_match = $true
- break
- }
- } else {
- if ($File.Name -like $pattern) {
- $valid_match = $true
- break
- }
- }
- }
- return $valid_match
-}
-
-Function Assert-FileSize {
- Param (
- [System.IO.FileSystemInfo]$File,
- [System.Int64]$Size
- )
-
- if ($Size -ge 0) {
- return $File.Length -ge $Size
- } else {
- return $File.Length -le ($Size * -1)
- }
-}
-
-Function Get-FileChecksum {
- Param (
- [System.String]$Path,
- [System.String]$Algorithm
- )
-
- $sp = switch ($algorithm) {
- 'md5' { New-Object -TypeName System.Security.Cryptography.MD5CryptoServiceProvider }
- 'sha1' { New-Object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider }
- 'sha256' { New-Object -TypeName System.Security.Cryptography.SHA256CryptoServiceProvider }
- 'sha384' { New-Object -TypeName System.Security.Cryptography.SHA384CryptoServiceProvider }
- 'sha512' { New-Object -TypeName System.Security.Cryptography.SHA512CryptoServiceProvider }
- }
-
- $fp = [System.IO.File]::Open($Path, [System.IO.Filemode]::Open, [System.IO.FileAccess]::Read, [System.IO.FileShare]::ReadWrite)
- try {
- $hash = [System.BitConverter]::ToString($sp.ComputeHash($fp)).Replace("-", "").ToLower()
- } finally {
- $fp.Dispose()
- }
-
- return $hash
-}
-
-Function Search-Path {
- [CmdletBinding()]
- Param (
- [Parameter(Mandatory=$true)]
- [System.String]
- $Path,
-
- [Parameter(Mandatory=$true)]
- [AllowEmptyCollection()]
- [System.Collections.Generic.HashSet`1[System.String]]
- $CheckedPaths,
-
- [Parameter(Mandatory=$true)]
- [Object]
- $Module,
-
- [System.Int64]
- $Age,
-
- [System.String]
- $AgeStamp,
-
- [System.String]
- $FileType,
-
- [Switch]
- $Follow,
-
- [Switch]
- $GetChecksum,
-
- [Switch]
- $IsHidden,
-
- [System.String[]]
- $Patterns,
-
- [Switch]
- $Recurse,
-
- [System.Int64]
- $Size,
-
- [Switch]
- $UseRegex
- )
-
- $dir_obj = New-Object -TypeName System.IO.DirectoryInfo -ArgumentList $Path
- if ([Int32]$dir_obj.Attributes -eq -1) {
- $Module.Warn("Argument path '$Path' does not exist, skipping")
- return
- } elseif (-not $dir_obj.Attributes.HasFlag([System.IO.FileAttributes]::Directory)) {
- $Module.Warn("Argument path '$Path' is a file not a directory, skipping")
- return
- }
-
- $dir_files = @()
- try {
- $dir_files = $dir_obj.EnumerateFileSystemInfos("*", [System.IO.SearchOption]::TopDirectoryOnly)
- } catch [System.IO.DirectoryNotFoundException] { # Broken ReparsePoint/Symlink, cannot enumerate
- } catch [System.UnauthorizedAccessException] {} # No ListDirectory permissions, Get-ChildItem ignored this
-
- foreach ($dir_child in $dir_files) {
- if ($dir_child.Attributes.HasFlag([System.IO.FileAttributes]::Directory) -and $Recurse) {
- if ($Follow -or -not $dir_child.Attributes.HasFlag([System.IO.FileAttributes]::ReparsePoint)) {
- $PSBoundParameters.Remove('Path') > $null
- Search-Path -Path $dir_child.FullName @PSBoundParameters
- }
- }
-
- # Check to see if we've already encountered this path and skip if we have.
- if (-not $CheckedPaths.Add($dir_child.FullName.ToLowerInvariant())) {
- continue
- }
-
- $Module.Result.examined++
-
- if ($PSBoundParameters.ContainsKey('Age')) {
- $age_match = Assert-Age -File $dir_child -Age $Age -AgeStamp $AgeStamp
- } else {
- $age_match = $true
- }
-
- $file_type_match = Assert-FileType -File $dir_child -FileType $FileType
- $hidden_match = Assert-FileHidden -File $dir_child -IsHidden:$IsHidden
-
- if ($PSBoundParameters.ContainsKey('Patterns')) {
- $pattern_match = Assert-FileNamePattern -File $dir_child -Patterns $Patterns -UseRegex:$UseRegex.IsPresent
- } else {
- $pattern_match = $true
- }
-
- if ($PSBoundParameters.ContainsKey('Size')) {
- $size_match = Assert-FileSize -File $dir_child -Size $Size
- } else {
- $size_match = $true
- }
-
- if (-not ($age_match -and $file_type_match -and $hidden_match -and $pattern_match -and $size_match)) {
- continue
- }
-
- # It passed all our filters so add it
- $module.Result.matched++
-
- # TODO: Make this generic so it can be shared with win_find and win_stat.
- $epoch = New-Object -Type System.DateTime -ArgumentList 1970, 1, 1, 0, 0, 0, 0
- $file_info = @{
- attributes = $dir_child.Attributes.ToString()
- checksum = $null
- creationtime = (New-TimeSpan -Start $epoch -End $dir_child.CreationTime).TotalSeconds
- exists = $true
- extension = $null
- filename = $dir_child.Name
- isarchive = $dir_child.Attributes.HasFlag([System.IO.FileAttributes]::Archive)
- isdir = $dir_child.Attributes.HasFlag([System.IO.FileAttributes]::Directory)
- ishidden = $dir_child.Attributes.HasFlag([System.IO.FileAttributes]::Hidden)
- isreadonly = $dir_child.Attributes.HasFlag([System.IO.FileAttributes]::ReadOnly)
- isreg = $false
- isshared = $false
- lastaccesstime = (New-TimeSpan -Start $epoch -End $dir_child.LastAccessTime).TotalSeconds
- lastwritetime = (New-TimeSpan -Start $epoch -End $dir_child.LastWriteTime).TotalSeconds
- owner = $null
- path = $dir_child.FullName
- sharename = $null
- size = $null
- }
-
- try {
- $file_info.owner = $dir_child.GetAccessControl().Owner
- } catch {} # May not have rights to get the Owner, historical behaviour is to ignore.
-
- if ($dir_child.Attributes.HasFlag([System.IO.FileAttributes]::Directory)) {
- $share_info = Get-CimInstance -ClassName Win32_Share -Filter "Path='$($dir_child.FullName -replace '\\', '\\')'"
- if ($null -ne $share_info) {
- $file_info.isshared = $true
- $file_info.sharename = $share_info.Name
- }
- } else {
- $file_info.extension = $dir_child.Extension
- $file_info.isreg = $true
- $file_info.size = $dir_child.Length
-
- if ($GetChecksum) {
- try {
- $file_info.checksum = Get-FileChecksum -Path $dir_child.FullName -Algorithm $checksum_algorithm
- } catch {} # Just keep the checksum as $null in the case of a failure.
- }
- }
-
- # Append the link information if the path is a link
- $link_info = @{
- isjunction = $false
- islnk = $false
- nlink = 1
- lnk_source = $null
- lnk_target = $null
- hlnk_targets = @()
- }
- $link_stat = Get-Link -link_path $dir_child.FullName
- if ($null -ne $link_stat) {
- switch ($link_stat.Type) {
- "SymbolicLink" {
- $link_info.islnk = $true
- $link_info.isreg = $false
- $link_info.lnk_source = $link_stat.AbsolutePath
- $link_info.lnk_target = $link_stat.TargetPath
- break
- }
- "JunctionPoint" {
- $link_info.isjunction = $true
- $link_info.isreg = $false
- $link_info.lnk_source = $link_stat.AbsolutePath
- $link_info.lnk_target = $link_stat.TargetPath
- break
- }
- "HardLink" {
- $link_info.nlink = $link_stat.HardTargets.Count
-
- # remove current path from the targets
- $hlnk_targets = $link_info.HardTargets | Where-Object { $_ -ne $dir_child.FullName }
- $link_info.hlnk_targets = @($hlnk_targets)
- break
- }
- }
- }
- foreach ($kv in $link_info.GetEnumerator()) {
- $file_info.$($kv.Key) = $kv.Value
- }
-
- # Output the file_info object
- $file_info
- }
-}
-
-$search_params = @{
- CheckedPaths = [System.Collections.Generic.HashSet`1[System.String]]@()
- GetChecksum = $get_checksum
- Module = $module
- FileType = $file_type
- Follow = $follow
- IsHidden = $hidden
- Recurse = $recurse
-}
-
-if ($null -ne $age) {
- $seconds_per_unit = @{'s'=1; 'm'=60; 'h'=3600; 'd'=86400; 'w'=604800}
- $seconds_pattern = '^(-?\d+)(s|m|h|d|w)?$'
- $match = $age -match $seconds_pattern
- if ($Match) {
- $specified_seconds = [Int64]$Matches[1]
- if ($null -eq $Matches[2]) {
- $chosen_unit = 's'
- } else {
- $chosen_unit = $Matches[2]
- }
-
- $total_seconds = $specified_seconds * ($seconds_per_unit.$chosen_unit)
-
- if ($total_seconds -ge 0) {
- $search_params.Age = (Get-Date).AddSeconds($total_seconds * -1).Ticks
- } else {
- # Make sure we add the positive value of seconds to current time then make it negative for later comparisons.
- $age = (Get-Date).AddSeconds($total_seconds).Ticks
- $search_params.Age = $age * -1
- }
- $search_params.AgeStamp = $age_stamp
- } else {
- $module.FailJson("Invalid age pattern specified")
- }
-}
-
-if ($null -ne $patterns) {
- $search_params.Patterns = $patterns
- $search_params.UseRegex = $use_regex
-}
-
-if ($null -ne $size) {
- $bytes_per_unit = @{'b'=1; 'k'=1KB; 'm'=1MB; 'g'=1GB;'t'=1TB}
- $size_pattern = '^(-?\d+)(b|k|m|g|t)?$'
- $match = $size -match $size_pattern
- if ($Match) {
- $specified_size = [Int64]$Matches[1]
- if ($null -eq $Matches[2]) {
- $chosen_byte = 'b'
- } else {
- $chosen_byte = $Matches[2]
- }
-
- $search_params.Size = $specified_size * ($bytes_per_unit.$chosen_byte)
- } else {
- $module.FailJson("Invalid size pattern specified")
- }
-}
-
-$matched_files = foreach ($path in $paths) {
- # Ensure we pass in an absolute path. We use the ExecutionContext as this is based on the PSProvider path not the
- # process location which can be different.
- $abs_path = $ExecutionContext.SessionState.Path.GetUnresolvedProviderPathFromPSPath($path)
- Search-Path -Path $abs_path @search_params
-}
-
-# Make sure we sort the files in alphabetical order.
-$module.Result.files = @() + ($matched_files | Sort-Object -Property {$_.path})
-
-$module.ExitJson()
-
diff --git a/test/support/windows-integration/plugins/modules/win_find.py b/test/support/windows-integration/plugins/modules/win_find.py
deleted file mode 100644
index f506f956..00000000
--- a/test/support/windows-integration/plugins/modules/win_find.py
+++ /dev/null
@@ -1,345 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# Copyright: (c) 2016, Ansible Project
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-# this is a windows documentation stub. actual code lives in the .ps1
-# file of the same name
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
-
-DOCUMENTATION = r'''
----
-module: win_find
-version_added: "2.3"
-short_description: Return a list of files based on specific criteria
-description:
- - Return a list of files based on specified criteria.
- - Multiple criteria are AND'd together.
- - For non-Windows targets, use the M(find) module instead.
-options:
- age:
- description:
- - Select files or folders whose age is equal to or greater than
- the specified time.
- - Use a negative age to find files equal to or less than
- the specified time.
- - You can choose seconds, minutes, hours, days or weeks
- by specifying the first letter of an of
- those words (e.g., "2s", "10d", 1w").
- type: str
- age_stamp:
- description:
- - Choose the file property against which we compare C(age).
- - The default attribute we compare with is the last modification time.
- type: str
- choices: [ atime, ctime, mtime ]
- default: mtime
- checksum_algorithm:
- description:
- - Algorithm to determine the checksum of a file.
- - Will throw an error if the host is unable to use specified algorithm.
- type: str
- choices: [ md5, sha1, sha256, sha384, sha512 ]
- default: sha1
- file_type:
- description: Type of file to search for.
- type: str
- choices: [ directory, file ]
- default: file
- follow:
- description:
- - Set this to C(yes) to follow symlinks in the path.
- - This needs to be used in conjunction with C(recurse).
- type: bool
- default: no
- get_checksum:
- description:
- - Whether to return a checksum of the file in the return info (default sha1),
- use C(checksum_algorithm) to change from the default.
- type: bool
- default: yes
- hidden:
- description: Set this to include hidden files or folders.
- type: bool
- default: no
- paths:
- description:
- - List of paths of directories to search for files or folders in.
- - This can be supplied as a single path or a list of paths.
- type: list
- required: yes
- patterns:
- description:
- - One or more (powershell or regex) patterns to compare filenames with.
- - The type of pattern matching is controlled by C(use_regex) option.
- - The patterns restrict the list of files or folders to be returned based on the filenames.
- - For a file to be matched it only has to match with one pattern in a list provided.
- type: list
- aliases: [ "regex", "regexp" ]
- recurse:
- description:
- - Will recursively descend into the directory looking for files or folders.
- type: bool
- default: no
- size:
- description:
- - Select files or folders whose size is equal to or greater than the specified size.
- - Use a negative value to find files equal to or less than the specified size.
- - You can specify the size with a suffix of the byte type i.e. kilo = k, mega = m...
- - Size is not evaluated for symbolic links.
- type: str
- use_regex:
- description:
- - Will set patterns to run as a regex check if set to C(yes).
- type: bool
- default: no
-author:
-- Jordan Borean (@jborean93)
-'''
-
-EXAMPLES = r'''
-- name: Find files in path
- win_find:
- paths: D:\Temp
-
-- name: Find hidden files in path
- win_find:
- paths: D:\Temp
- hidden: yes
-
-- name: Find files in multiple paths
- win_find:
- paths:
- - C:\Temp
- - D:\Temp
-
-- name: Find files in directory while searching recursively
- win_find:
- paths: D:\Temp
- recurse: yes
-
-- name: Find files in directory while following symlinks
- win_find:
- paths: D:\Temp
- recurse: yes
- follow: yes
-
-- name: Find files with .log and .out extension using powershell wildcards
- win_find:
- paths: D:\Temp
- patterns: [ '*.log', '*.out' ]
-
-- name: Find files in path based on regex pattern
- win_find:
- paths: D:\Temp
- patterns: out_\d{8}-\d{6}.log
-
-- name: Find files older than 1 day
- win_find:
- paths: D:\Temp
- age: 86400
-
-- name: Find files older than 1 day based on create time
- win_find:
- paths: D:\Temp
- age: 86400
- age_stamp: ctime
-
-- name: Find files older than 1 day with unit syntax
- win_find:
- paths: D:\Temp
- age: 1d
-
-- name: Find files newer than 1 hour
- win_find:
- paths: D:\Temp
- age: -3600
-
-- name: Find files newer than 1 hour with unit syntax
- win_find:
- paths: D:\Temp
- age: -1h
-
-- name: Find files larger than 1MB
- win_find:
- paths: D:\Temp
- size: 1048576
-
-- name: Find files larger than 1GB with unit syntax
- win_find:
- paths: D:\Temp
- size: 1g
-
-- name: Find files smaller than 1MB
- win_find:
- paths: D:\Temp
- size: -1048576
-
-- name: Find files smaller than 1GB with unit syntax
- win_find:
- paths: D:\Temp
- size: -1g
-
-- name: Find folders/symlinks in multiple paths
- win_find:
- paths:
- - C:\Temp
- - D:\Temp
- file_type: directory
-
-- name: Find files and return SHA256 checksum of files found
- win_find:
- paths: C:\Temp
- get_checksum: yes
- checksum_algorithm: sha256
-
-- name: Find files and do not return the checksum
- win_find:
- paths: C:\Temp
- get_checksum: no
-'''
-
-RETURN = r'''
-examined:
- description: The number of files/folders that was checked.
- returned: always
- type: int
- sample: 10
-matched:
- description: The number of files/folders that match the criteria.
- returned: always
- type: int
- sample: 2
-files:
- description: Information on the files/folders that match the criteria returned as a list of dictionary elements
- for each file matched. The entries are sorted by the path value alphabetically.
- returned: success
- type: complex
- contains:
- attributes:
- description: attributes of the file at path in raw form.
- returned: success, path exists
- type: str
- sample: "Archive, Hidden"
- checksum:
- description: The checksum of a file based on checksum_algorithm specified.
- returned: success, path exists, path is a file, get_checksum == True
- type: str
- sample: 09cb79e8fc7453c84a07f644e441fd81623b7f98
- creationtime:
- description: The create time of the file represented in seconds since epoch.
- returned: success, path exists
- type: float
- sample: 1477984205.15
- exists:
- description: Whether the file exists, will always be true for M(win_find).
- returned: success, path exists
- type: bool
- sample: true
- extension:
- description: The extension of the file at path.
- returned: success, path exists, path is a file
- type: str
- sample: ".ps1"
- filename:
- description: The name of the file.
- returned: success, path exists
- type: str
- sample: temp
- hlnk_targets:
- description: List of other files pointing to the same file (hard links), excludes the current file.
- returned: success, path exists
- type: list
- sample:
- - C:\temp\file.txt
- - C:\Windows\update.log
- isarchive:
- description: If the path is ready for archiving or not.
- returned: success, path exists
- type: bool
- sample: true
- isdir:
- description: If the path is a directory or not.
- returned: success, path exists
- type: bool
- sample: true
- ishidden:
- description: If the path is hidden or not.
- returned: success, path exists
- type: bool
- sample: true
- isjunction:
- description: If the path is a junction point.
- returned: success, path exists
- type: bool
- sample: true
- islnk:
- description: If the path is a symbolic link.
- returned: success, path exists
- type: bool
- sample: true
- isreadonly:
- description: If the path is read only or not.
- returned: success, path exists
- type: bool
- sample: true
- isreg:
- description: If the path is a regular file or not.
- returned: success, path exists
- type: bool
- sample: true
- isshared:
- description: If the path is shared or not.
- returned: success, path exists
- type: bool
- sample: true
- lastaccesstime:
- description: The last access time of the file represented in seconds since epoch.
- returned: success, path exists
- type: float
- sample: 1477984205.15
- lastwritetime:
- description: The last modification time of the file represented in seconds since epoch.
- returned: success, path exists
- type: float
- sample: 1477984205.15
- lnk_source:
- description: The target of the symlink normalized for the remote filesystem.
- returned: success, path exists, path is a symbolic link or junction point
- type: str
- sample: C:\temp
- lnk_target:
- description: The target of the symlink. Note that relative paths remain relative, will return null if not a link.
- returned: success, path exists, path is a symbolic link or junction point
- type: str
- sample: temp
- nlink:
- description: Number of links to the file (hard links)
- returned: success, path exists
- type: int
- sample: 1
- owner:
- description: The owner of the file.
- returned: success, path exists
- type: str
- sample: BUILTIN\Administrators
- path:
- description: The full absolute path to the file.
- returned: success, path exists
- type: str
- sample: BUILTIN\Administrators
- sharename:
- description: The name of share if folder is shared.
- returned: success, path exists, path is a directory and isshared == True
- type: str
- sample: file-share
- size:
- description: The size in bytes of the file.
- returned: success, path exists, path is a file
- type: int
- sample: 1024
-'''
diff --git a/test/support/windows-integration/plugins/modules/win_format.ps1 b/test/support/windows-integration/plugins/modules/win_format.ps1
deleted file mode 100644
index b5fd3ae0..00000000
--- a/test/support/windows-integration/plugins/modules/win_format.ps1
+++ /dev/null
@@ -1,200 +0,0 @@
-#!powershell
-
-# Copyright: (c) 2019, Varun Chopra (@chopraaa) <v@chopraaa.com>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-#AnsibleRequires -CSharpUtil Ansible.Basic
-#AnsibleRequires -OSVersion 6.2
-
-Set-StrictMode -Version 2
-
-$ErrorActionPreference = "Stop"
-
-$spec = @{
- options = @{
- drive_letter = @{ type = "str" }
- path = @{ type = "str" }
- label = @{ type = "str" }
- new_label = @{ type = "str" }
- file_system = @{ type = "str"; choices = "ntfs", "refs", "exfat", "fat32", "fat" }
- allocation_unit_size = @{ type = "int" }
- large_frs = @{ type = "bool" }
- full = @{ type = "bool"; default = $false }
- compress = @{ type = "bool" }
- integrity_streams = @{ type = "bool" }
- force = @{ type = "bool"; default = $false }
- }
- mutually_exclusive = @(
- ,@('drive_letter', 'path', 'label')
- )
- required_one_of = @(
- ,@('drive_letter', 'path', 'label')
- )
- supports_check_mode = $true
-}
-
-$module = [Ansible.Basic.AnsibleModule]::Create($args, $spec)
-
-$drive_letter = $module.Params.drive_letter
-$path = $module.Params.path
-$label = $module.Params.label
-$new_label = $module.Params.new_label
-$file_system = $module.Params.file_system
-$allocation_unit_size = $module.Params.allocation_unit_size
-$large_frs = $module.Params.large_frs
-$full_format = $module.Params.full
-$compress_volume = $module.Params.compress
-$integrity_streams = $module.Params.integrity_streams
-$force_format = $module.Params.force
-
-# Some pre-checks
-if ($null -ne $drive_letter -and $drive_letter -notmatch "^[a-zA-Z]$") {
- $module.FailJson("The parameter drive_letter should be a single character A-Z")
-}
-if ($integrity_streams -eq $true -and $file_system -ne "refs") {
- $module.FailJson("Integrity streams can be enabled only on ReFS volumes. You specified: $($file_system)")
-}
-if ($compress_volume -eq $true) {
- if ($file_system -eq "ntfs") {
- if ($null -ne $allocation_unit_size -and $allocation_unit_size -gt 4096) {
- $module.FailJson("NTFS compression is not supported for allocation unit sizes above 4096")
- }
- }
- else {
- $module.FailJson("Compression can be enabled only on NTFS volumes. You specified: $($file_system)")
- }
-}
-
-function Get-AnsibleVolume {
- param(
- $DriveLetter,
- $Path,
- $Label
- )
-
- if ($null -ne $DriveLetter) {
- try {
- $volume = Get-Volume -DriveLetter $DriveLetter
- } catch {
- $module.FailJson("There was an error retrieving the volume using drive_letter $($DriveLetter): $($_.Exception.Message)", $_)
- }
- }
- elseif ($null -ne $Path) {
- try {
- $volume = Get-Volume -Path $Path
- } catch {
- $module.FailJson("There was an error retrieving the volume using path $($Path): $($_.Exception.Message)", $_)
- }
- }
- elseif ($null -ne $Label) {
- try {
- $volume = Get-Volume -FileSystemLabel $Label
- } catch {
- $module.FailJson("There was an error retrieving the volume using label $($Label): $($_.Exception.Message)", $_)
- }
- }
- else {
- $module.FailJson("Unable to locate volume: drive_letter, path and label were not specified")
- }
-
- return $volume
-}
-
-function Format-AnsibleVolume {
- param(
- $Path,
- $Label,
- $FileSystem,
- $Full,
- $UseLargeFRS,
- $Compress,
- $SetIntegrityStreams,
- $AllocationUnitSize
- )
- $parameters = @{
- Path = $Path
- Full = $Full
- }
- if ($null -ne $UseLargeFRS) {
- $parameters.Add("UseLargeFRS", $UseLargeFRS)
- }
- if ($null -ne $SetIntegrityStreams) {
- $parameters.Add("SetIntegrityStreams", $SetIntegrityStreams)
- }
- if ($null -ne $Compress){
- $parameters.Add("Compress", $Compress)
- }
- if ($null -ne $Label) {
- $parameters.Add("NewFileSystemLabel", $Label)
- }
- if ($null -ne $FileSystem) {
- $parameters.Add("FileSystem", $FileSystem)
- }
- if ($null -ne $AllocationUnitSize) {
- $parameters.Add("AllocationUnitSize", $AllocationUnitSize)
- }
-
- Format-Volume @parameters -Confirm:$false | Out-Null
-
-}
-
-$ansible_volume = Get-AnsibleVolume -DriveLetter $drive_letter -Path $path -Label $label
-$ansible_file_system = $ansible_volume.FileSystem
-$ansible_volume_size = $ansible_volume.Size
-$ansible_volume_alu = (Get-CimInstance -ClassName Win32_Volume -Filter "DeviceId = '$($ansible_volume.path.replace('\','\\'))'" -Property BlockSize).BlockSize
-
-$ansible_partition = Get-Partition -Volume $ansible_volume
-
-if (-not $force_format -and $null -ne $allocation_unit_size -and $ansible_volume_alu -ne 0 -and $null -ne $ansible_volume_alu -and $allocation_unit_size -ne $ansible_volume_alu) {
- $module.FailJson("Force format must be specified since target allocation unit size: $($allocation_unit_size) is different from the current allocation unit size of the volume: $($ansible_volume_alu)")
-}
-
-foreach ($access_path in $ansible_partition.AccessPaths) {
- if ($access_path -ne $Path) {
- if ($null -ne $file_system -and
- -not [string]::IsNullOrEmpty($ansible_file_system) -and
- $file_system -ne $ansible_file_system)
- {
- if (-not $force_format)
- {
- $no_files_in_volume = (Get-ChildItem -LiteralPath $access_path -ErrorAction SilentlyContinue | Measure-Object).Count -eq 0
- if($no_files_in_volume)
- {
- $module.FailJson("Force format must be specified since target file system: $($file_system) is different from the current file system of the volume: $($ansible_file_system.ToLower())")
- }
- else
- {
- $module.FailJson("Force format must be specified to format non-pristine volumes")
- }
- }
- }
- else
- {
- $pristine = -not $force_format
- }
- }
-}
-
-if ($force_format) {
- if (-not $module.CheckMode) {
- Format-AnsibleVolume -Path $ansible_volume.Path -Full $full_format -Label $new_label -FileSystem $file_system -SetIntegrityStreams $integrity_streams -UseLargeFRS $large_frs -Compress $compress_volume -AllocationUnitSize $allocation_unit_size
- }
- $module.Result.changed = $true
-}
-else {
- if ($pristine) {
- if ($null -eq $new_label) {
- $new_label = $ansible_volume.FileSystemLabel
- }
- # Conditions for formatting
- if ($ansible_volume_size -eq 0 -or
- $ansible_volume.FileSystemLabel -ne $new_label) {
- if (-not $module.CheckMode) {
- Format-AnsibleVolume -Path $ansible_volume.Path -Full $full_format -Label $new_label -FileSystem $file_system -SetIntegrityStreams $integrity_streams -UseLargeFRS $large_frs -Compress $compress_volume -AllocationUnitSize $allocation_unit_size
- }
- $module.Result.changed = $true
- }
- }
-}
-
-$module.ExitJson()
diff --git a/test/support/windows-integration/plugins/modules/win_format.py b/test/support/windows-integration/plugins/modules/win_format.py
deleted file mode 100644
index f8f18ed7..00000000
--- a/test/support/windows-integration/plugins/modules/win_format.py
+++ /dev/null
@@ -1,103 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# Copyright: (c) 2019, Varun Chopra (@chopraaa) <v@chopraaa.com>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-ANSIBLE_METADATA = {
- 'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'
-}
-
-DOCUMENTATION = r'''
-module: win_format
-version_added: '2.8'
-short_description: Formats an existing volume or a new volume on an existing partition on Windows
-description:
- - The M(win_format) module formats an existing volume or a new volume on an existing partition on Windows
-options:
- drive_letter:
- description:
- - Used to specify the drive letter of the volume to be formatted.
- type: str
- path:
- description:
- - Used to specify the path to the volume to be formatted.
- type: str
- label:
- description:
- - Used to specify the label of the volume to be formatted.
- type: str
- new_label:
- description:
- - Used to specify the new file system label of the formatted volume.
- type: str
- file_system:
- description:
- - Used to specify the file system to be used when formatting the target volume.
- type: str
- choices: [ ntfs, refs, exfat, fat32, fat ]
- allocation_unit_size:
- description:
- - Specifies the cluster size to use when formatting the volume.
- - If no cluster size is specified when you format a partition, defaults are selected based on
- the size of the partition.
- - This value must be a multiple of the physical sector size of the disk.
- type: int
- large_frs:
- description:
- - Specifies that large File Record System (FRS) should be used.
- type: bool
- compress:
- description:
- - Enable compression on the resulting NTFS volume.
- - NTFS compression is not supported where I(allocation_unit_size) is more than 4096.
- type: bool
- integrity_streams:
- description:
- - Enable integrity streams on the resulting ReFS volume.
- type: bool
- full:
- description:
- - A full format writes to every sector of the disk, takes much longer to perform than the
- default (quick) format, and is not recommended on storage that is thinly provisioned.
- - Specify C(true) for full format.
- type: bool
- force:
- description:
- - Specify if formatting should be forced for volumes that are not created from new partitions
- or if the source and target file system are different.
- type: bool
-notes:
- - Microsoft Windows Server 2012 or Microsoft Windows 8 or newer is required to use this module. To check if your system is compatible, see
- U(https://docs.microsoft.com/en-us/windows/desktop/sysinfo/operating-system-version).
- - One of three parameters (I(drive_letter), I(path) and I(label)) are mandatory to identify the target
- volume but more than one cannot be specified at the same time.
- - This module is idempotent if I(force) is not specified and file system labels remain preserved.
- - For more information, see U(https://docs.microsoft.com/en-us/previous-versions/windows/desktop/stormgmt/format-msft-volume)
-seealso:
- - module: win_disk_facts
- - module: win_partition
-author:
- - Varun Chopra (@chopraaa) <v@chopraaa.com>
-'''
-
-EXAMPLES = r'''
-- name: Create a partition with drive letter D and size 5 GiB
- win_partition:
- drive_letter: D
- partition_size: 5 GiB
- disk_number: 1
-
-- name: Full format the newly created partition as NTFS and label it
- win_format:
- drive_letter: D
- file_system: NTFS
- new_label: Formatted
- full: True
-'''
-
-RETURN = r'''
-#
-'''
diff --git a/test/support/windows-integration/plugins/modules/win_path.ps1 b/test/support/windows-integration/plugins/modules/win_path.ps1
deleted file mode 100644
index 04eb41a3..00000000
--- a/test/support/windows-integration/plugins/modules/win_path.ps1
+++ /dev/null
@@ -1,145 +0,0 @@
-#!powershell
-
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-#Requires -Module Ansible.ModuleUtils.Legacy
-
-Set-StrictMode -Version 2
-$ErrorActionPreference = "Stop"
-
-$system_path = "System\CurrentControlSet\Control\Session Manager\Environment"
-$user_path = "Environment"
-
-# list/arraylist methods don't allow IEqualityComparer override for case/backslash/quote-insensitivity, roll our own search
-Function Get-IndexOfPathElement ($list, [string]$value) {
- $idx = 0
- $value = $value.Trim('"').Trim('\')
- ForEach($el in $list) {
- If ([string]$el.Trim('"').Trim('\') -ieq $value) {
- return $idx
- }
-
- $idx++
- }
-
- return -1
-}
-
-# alters list in place, returns true if at least one element was added
-Function Add-Elements ($existing_elements, $elements_to_add) {
- $last_idx = -1
- $changed = $false
-
- ForEach($el in $elements_to_add) {
- $idx = Get-IndexOfPathElement $existing_elements $el
-
- # add missing elements at the end
- If ($idx -eq -1) {
- $last_idx = $existing_elements.Add($el)
- $changed = $true
- }
- ElseIf ($idx -lt $last_idx) {
- $existing_elements.RemoveAt($idx) | Out-Null
- $existing_elements.Add($el) | Out-Null
- $last_idx = $existing_elements.Count - 1
- $changed = $true
- }
- Else {
- $last_idx = $idx
- }
- }
-
- return $changed
-}
-
-# alters list in place, returns true if at least one element was removed
-Function Remove-Elements ($existing_elements, $elements_to_remove) {
- $count = $existing_elements.Count
-
- ForEach($el in $elements_to_remove) {
- $idx = Get-IndexOfPathElement $existing_elements $el
- $result.removed_idx = $idx
- If ($idx -gt -1) {
- $existing_elements.RemoveAt($idx)
- }
- }
-
- return $count -ne $existing_elements.Count
-}
-
-# PS registry provider doesn't allow access to unexpanded REG_EXPAND_SZ; fall back to .NET
-Function Get-RawPathVar ($scope) {
- If ($scope -eq "user") {
- $env_key = [Microsoft.Win32.Registry]::CurrentUser.OpenSubKey($user_path)
- }
- ElseIf ($scope -eq "machine") {
- $env_key = [Microsoft.Win32.Registry]::LocalMachine.OpenSubKey($system_path)
- }
-
- return $env_key.GetValue($var_name, "", [Microsoft.Win32.RegistryValueOptions]::DoNotExpandEnvironmentNames)
-}
-
-Function Set-RawPathVar($path_value, $scope) {
- If ($scope -eq "user") {
- $var_path = "HKCU:\" + $user_path
- }
- ElseIf ($scope -eq "machine") {
- $var_path = "HKLM:\" + $system_path
- }
-
- Set-ItemProperty $var_path -Name $var_name -Value $path_value -Type ExpandString | Out-Null
-
- return $path_value
-}
-
-$parsed_args = Parse-Args $args -supports_check_mode $true
-
-$result = @{changed=$false}
-
-$var_name = Get-AnsibleParam $parsed_args "name" -Default "PATH"
-$elements = Get-AnsibleParam $parsed_args "elements" -FailIfEmpty $result
-$state = Get-AnsibleParam $parsed_args "state" -Default "present" -ValidateSet "present","absent"
-$scope = Get-AnsibleParam $parsed_args "scope" -Default "machine" -ValidateSet "machine","user"
-
-$check_mode = Get-AnsibleParam $parsed_args "_ansible_check_mode" -Default $false
-
-If ($elements -is [string]) {
- $elements = @($elements)
-}
-
-If ($elements -isnot [Array]) {
- Fail-Json $result "elements must be a string or list of path strings"
-}
-
-$current_value = Get-RawPathVar $scope
-$result.path_value = $current_value
-
-# TODO: test case-canonicalization on wacky unicode values (eg turkish i)
-# TODO: detect and warn/fail on unparseable path? (eg, unbalanced quotes, invalid path chars)
-# TODO: detect and warn/fail if system path and Powershell isn't on it?
-
-$existing_elements = New-Object System.Collections.ArrayList
-
-# split on semicolons, accounting for quoted values with embedded semicolons (which may or may not be wrapped in whitespace)
-$pathsplit_re = [regex] '((?<q>\s*"[^"]+"\s*)|(?<q>[^;]+))(;$|$|;)'
-
-ForEach ($m in $pathsplit_re.Matches($current_value)) {
- $existing_elements.Add($m.Groups['q'].Value) | Out-Null
-}
-
-If ($state -eq "absent") {
- $result.changed = Remove-Elements $existing_elements $elements
-}
-ElseIf ($state -eq "present") {
- $result.changed = Add-Elements $existing_elements $elements
-}
-
-# calculate the new path value from the existing elements
-$path_value = [String]::Join(";", $existing_elements.ToArray())
-$result.path_value = $path_value
-
-If ($result.changed -and -not $check_mode) {
- Set-RawPathVar $path_value $scope | Out-Null
-}
-
-Exit-Json $result
diff --git a/test/support/windows-integration/plugins/modules/win_path.py b/test/support/windows-integration/plugins/modules/win_path.py
deleted file mode 100644
index 6404504f..00000000
--- a/test/support/windows-integration/plugins/modules/win_path.py
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# Copyright: (c) 2016, Red Hat | Ansible
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-# This is a windows documentation stub. Actual code lives in the .ps1
-# file of the same name
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'core'}
-
-DOCUMENTATION = r'''
----
-module: win_path
-version_added: "2.3"
-short_description: Manage Windows path environment variables
-description:
- - Allows element-based ordering, addition, and removal of Windows path environment variables.
-options:
- name:
- description:
- - Target path environment variable name.
- type: str
- default: PATH
- elements:
- description:
- - A single path element, or a list of path elements (ie, directories) to add or remove.
- - When multiple elements are included in the list (and C(state) is C(present)), the elements are guaranteed to appear in the same relative order
- in the resultant path value.
- - Variable expansions (eg, C(%VARNAME%)) are allowed, and are stored unexpanded in the target path element.
- - Any existing path elements not mentioned in C(elements) are always preserved in their current order.
- - New path elements are appended to the path, and existing path elements may be moved closer to the end to satisfy the requested ordering.
- - Paths are compared in a case-insensitive fashion, and trailing backslashes are ignored for comparison purposes. However, note that trailing
- backslashes in YAML require quotes.
- type: list
- required: yes
- state:
- description:
- - Whether the path elements specified in C(elements) should be present or absent.
- type: str
- choices: [ absent, present ]
- scope:
- description:
- - The level at which the environment variable specified by C(name) should be managed (either for the current user or global machine scope).
- type: str
- choices: [ machine, user ]
- default: machine
-notes:
- - This module is for modifying individual elements of path-like
- environment variables. For general-purpose management of other
- environment vars, use the M(win_environment) module.
- - This module does not broadcast change events.
- This means that the minority of windows applications which can have
- their environment changed without restarting will not be notified and
- therefore will need restarting to pick up new environment settings.
- - User level environment variables will require an interactive user to
- log out and in again before they become available.
-seealso:
-- module: win_environment
-author:
-- Matt Davis (@nitzmahone)
-'''
-
-EXAMPLES = r'''
-- name: Ensure that system32 and Powershell are present on the global system path, and in the specified order
- win_path:
- elements:
- - '%SystemRoot%\system32'
- - '%SystemRoot%\system32\WindowsPowerShell\v1.0'
-
-- name: Ensure that C:\Program Files\MyJavaThing is not on the current user's CLASSPATH
- win_path:
- name: CLASSPATH
- elements: C:\Program Files\MyJavaThing
- scope: user
- state: absent
-'''
diff --git a/test/support/windows-integration/plugins/modules/win_tempfile.py b/test/support/windows-integration/plugins/modules/win_tempfile.py
deleted file mode 100644
index 58dd6501..00000000
--- a/test/support/windows-integration/plugins/modules/win_tempfile.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/python
-# coding: utf-8 -*-
-
-# Copyright: (c) 2017, Dag Wieers <dag@wieers.com>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
-
-DOCUMENTATION = r'''
----
-module: win_tempfile
-version_added: "2.3"
-short_description: Creates temporary files and directories
-description:
- - Creates temporary files and directories.
- - For non-Windows targets, please use the M(tempfile) module instead.
-options:
- state:
- description:
- - Whether to create file or directory.
- type: str
- choices: [ directory, file ]
- default: file
- path:
- description:
- - Location where temporary file or directory should be created.
- - If path is not specified default system temporary directory (%TEMP%) will be used.
- type: path
- default: '%TEMP%'
- aliases: [ dest ]
- prefix:
- description:
- - Prefix of file/directory name created by module.
- type: str
- default: ansible.
- suffix:
- description:
- - Suffix of file/directory name created by module.
- type: str
- default: ''
-seealso:
-- module: tempfile
-author:
-- Dag Wieers (@dagwieers)
-'''
-
-EXAMPLES = r"""
-- name: Create temporary build directory
- win_tempfile:
- state: directory
- suffix: build
-
-- name: Create temporary file
- win_tempfile:
- state: file
- suffix: temp
-"""
-
-RETURN = r'''
-path:
- description: The absolute path to the created file or directory.
- returned: success
- type: str
- sample: C:\Users\Administrator\AppData\Local\Temp\ansible.bMlvdk
-'''
diff --git a/test/support/windows-integration/plugins/modules/win_template.py b/test/support/windows-integration/plugins/modules/win_template.py
deleted file mode 100644
index bd8b2492..00000000
--- a/test/support/windows-integration/plugins/modules/win_template.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-# this is a virtual module that is entirely implemented server side
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['stableinterface'],
- 'supported_by': 'core'}
-
-DOCUMENTATION = r'''
----
-module: win_template
-version_added: "1.9.2"
-short_description: Template a file out to a remote server
-options:
- backup:
- description:
- - Determine whether a backup should be created.
- - When set to C(yes), create a backup file including the timestamp information
- so you can get the original file back if you somehow clobbered it incorrectly.
- type: bool
- default: no
- version_added: '2.8'
- newline_sequence:
- default: '\r\n'
- force:
- version_added: '2.4'
-notes:
-- Beware fetching files from windows machines when creating templates because certain tools, such as Powershell ISE,
- and regedit's export facility add a Byte Order Mark as the first character of the file, which can cause tracebacks.
-- You can use the M(win_copy) module with the C(content:) option if you prefer the template inline, as part of the
- playbook.
-- For Linux you can use M(template) which uses '\\n' as C(newline_sequence) by default.
-seealso:
-- module: win_copy
-- module: copy
-- module: template
-author:
-- Jon Hawkesworth (@jhawkesworth)
-extends_documentation_fragment:
-- template_common
-'''
-
-EXAMPLES = r'''
-- name: Create a file from a Jinja2 template
- win_template:
- src: /mytemplates/file.conf.j2
- dest: C:\Temp\file.conf
-
-- name: Create a Unix-style file from a Jinja2 template
- win_template:
- src: unix/config.conf.j2
- dest: C:\share\unix\config.conf
- newline_sequence: '\n'
- backup: yes
-'''
-
-RETURN = r'''
-backup_file:
- description: Name of the backup file that was created.
- returned: if backup=yes
- type: str
- sample: C:\Path\To\File.txt.11540.20150212-220915.bak
-'''
diff --git a/test/units/_vendor/test_vendor.py b/test/units/_vendor/test_vendor.py
index fa9fdb25..cda0279d 100644
--- a/test/units/_vendor/test_vendor.py
+++ b/test/units/_vendor/test_vendor.py
@@ -9,7 +9,7 @@ import pkgutil
import pytest
import sys
-from units.compat.mock import MagicMock, NonCallableMagicMock, patch
+from mock import MagicMock, NonCallableMagicMock, patch
def reset_internal_vendor_package():
diff --git a/test/units/cli/galaxy/test_display_collection.py b/test/units/cli/galaxy/test_display_collection.py
index b1266124..c86227b0 100644
--- a/test/units/cli/galaxy/test_display_collection.py
+++ b/test/units/cli/galaxy/test_display_collection.py
@@ -14,7 +14,7 @@ from ansible.galaxy.dependency_resolution.dataclasses import Requirement
@pytest.fixture
def collection_object():
def _cobj(fqcn='sandwiches.ham'):
- return Requirement(fqcn, '1.5.0', None, 'galaxy')
+ return Requirement(fqcn, '1.5.0', None, 'galaxy', None)
return _cobj
diff --git a/test/units/cli/galaxy/test_execute_list_collection.py b/test/units/cli/galaxy/test_execute_list_collection.py
index acd865b5..e8a834d9 100644
--- a/test/units/cli/galaxy/test_execute_list_collection.py
+++ b/test/units/cli/galaxy/test_execute_list_collection.py
@@ -57,12 +57,14 @@ def mock_collection_objects(mocker):
'1.5.0',
None,
'dir',
+ None,
),
(
'sandwiches.reuben',
'2.5.0',
None,
'dir',
+ None,
),
)
@@ -72,12 +74,14 @@ def mock_collection_objects(mocker):
'1.0.0',
None,
'dir',
+ None,
),
(
'sandwiches.ham',
'1.0.0',
None,
'dir',
+ None,
),
)
@@ -97,12 +101,14 @@ def mock_from_path(mocker):
'1.5.0',
None,
'dir',
+ None,
),
(
'sandwiches.pbj',
'1.0.0',
None,
'dir',
+ None,
),
),
'sandwiches.ham': (
@@ -111,6 +117,7 @@ def mock_from_path(mocker):
'1.0.0',
None,
'dir',
+ None,
),
),
}
diff --git a/test/units/cli/galaxy/test_get_collection_widths.py b/test/units/cli/galaxy/test_get_collection_widths.py
index 67b20926..6e1cbf5e 100644
--- a/test/units/cli/galaxy/test_get_collection_widths.py
+++ b/test/units/cli/galaxy/test_get_collection_widths.py
@@ -13,11 +13,11 @@ from ansible.galaxy.dependency_resolution.dataclasses import Requirement
@pytest.fixture
def collection_objects():
- collection_ham = Requirement('sandwiches.ham', '1.5.0', None, 'galaxy')
+ collection_ham = Requirement('sandwiches.ham', '1.5.0', None, 'galaxy', None)
- collection_pbj = Requirement('sandwiches.pbj', '2.5', None, 'galaxy')
+ collection_pbj = Requirement('sandwiches.pbj', '2.5', None, 'galaxy', None)
- collection_reuben = Requirement('sandwiches.reuben', '4', None, 'galaxy')
+ collection_reuben = Requirement('sandwiches.reuben', '4', None, 'galaxy', None)
return [collection_ham, collection_pbj, collection_reuben]
@@ -27,7 +27,7 @@ def test_get_collection_widths(collection_objects):
def test_get_collection_widths_single_collection(mocker):
- mocked_collection = Requirement('sandwiches.club', '3.0.0', None, 'galaxy')
+ mocked_collection = Requirement('sandwiches.club', '3.0.0', None, 'galaxy', None)
# Make this look like it is not iterable
mocker.patch('ansible.cli.galaxy.is_iterable', return_value=False)
diff --git a/test/units/cli/test_cli.py b/test/units/cli/test_cli.py
index 09445a25..26285955 100644
--- a/test/units/cli/test_cli.py
+++ b/test/units/cli/test_cli.py
@@ -20,7 +20,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat import unittest
-from units.compat.mock import patch, MagicMock
+from mock import patch, MagicMock
from units.mock.loader import DictDataLoader
diff --git a/test/units/cli/test_console.py b/test/units/cli/test_console.py
index 3acc4faa..fb477bf3 100644
--- a/test/units/cli/test_console.py
+++ b/test/units/cli/test_console.py
@@ -20,7 +20,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat import unittest
-from units.compat.mock import patch
+from mock import patch
from ansible.cli.console import ConsoleCLI
diff --git a/test/units/cli/test_doc.py b/test/units/cli/test_doc.py
index 5cdf974b..576bdb28 100644
--- a/test/units/cli/test_doc.py
+++ b/test/units/cli/test_doc.py
@@ -5,6 +5,7 @@ __metaclass__ = type
import pytest
from ansible.cli.doc import DocCLI, RoleMixin
+from ansible.plugins.loader import module_loader
TTY_IFY_DATA = {
@@ -111,3 +112,19 @@ def test_rolemixin__build_doc_no_filter_match():
fqcn, doc = obj._build_doc(role_name, path, collection_name, argspec, entrypoint_filter)
assert fqcn == '.'.join([collection_name, role_name])
assert doc is None
+
+
+def test_builtin_modules_list():
+ args = ['ansible-doc', '-l', 'ansible.builtin', '-t', 'module']
+ obj = DocCLI(args=args)
+ obj.parse()
+ result = obj._list_plugins('module', module_loader)
+ assert len(result) > 0
+
+
+def test_legacy_modules_list():
+ args = ['ansible-doc', '-l', 'ansible.legacy', '-t', 'module']
+ obj = DocCLI(args=args)
+ obj.parse()
+ result = obj._list_plugins('module', module_loader)
+ assert len(result) > 0
diff --git a/test/units/cli/test_galaxy.py b/test/units/cli/test_galaxy.py
index 804e1345..1a6bfe04 100644
--- a/test/units/cli/test_galaxy.py
+++ b/test/units/cli/test_galaxy.py
@@ -41,7 +41,7 @@ from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.utils import context_objects as co
from ansible.utils.display import Display
from units.compat import unittest
-from units.compat.mock import patch, MagicMock
+from mock import patch, MagicMock
@pytest.fixture(autouse='function')
@@ -462,13 +462,7 @@ class TestGalaxyInitSkeleton(unittest.TestCase, ValidRoleTests):
@pytest.mark.parametrize('cli_args, expected', [
(['ansible-galaxy', 'collection', 'init', 'abc._def'], 0),
(['ansible-galaxy', 'collection', 'init', 'abc._def', '-vvv'], 3),
- (['ansible-galaxy', '-vv', 'collection', 'init', 'abc._def'], 2),
- # Due to our manual parsing we want to verify that -v set in the sub parser takes precedence. This behaviour is
- # deprecated and tests should be removed when the code that handles it is removed
- (['ansible-galaxy', '-vv', 'collection', 'init', 'abc._def', '-v'], 1),
- (['ansible-galaxy', '-vv', 'collection', 'init', 'abc._def', '-vvvv'], 4),
- (['ansible-galaxy', '-vvv', 'init', 'name'], 3),
- (['ansible-galaxy', '-vvvvv', 'init', '-v', 'name'], 1),
+ (['ansible-galaxy', 'collection', 'init', 'abc._def', '-vv'], 2),
])
def test_verbosity_arguments(cli_args, expected, monkeypatch):
# Mock out the functions so we don't actually execute anything
diff --git a/test/units/cli/test_vault.py b/test/units/cli/test_vault.py
index bb244a5a..76ffba2f 100644
--- a/test/units/cli/test_vault.py
+++ b/test/units/cli/test_vault.py
@@ -24,7 +24,7 @@ import os
import pytest
from units.compat import unittest
-from units.compat.mock import patch, MagicMock
+from mock import patch, MagicMock
from units.mock.vault_helper import TextVaultSecret
from ansible import context, errors
@@ -70,18 +70,18 @@ class TestVaultCli(unittest.TestCase):
mock_setup_vault_secrets.return_value = []
cli = VaultCLI(args=['ansible-vault', 'view', '/dev/null/foo'])
cli.parse()
- self.assertRaisesRegexp(errors.AnsibleOptionsError,
- "A vault password is required to use Ansible's Vault",
- cli.run)
+ self.assertRaisesRegex(errors.AnsibleOptionsError,
+ "A vault password is required to use Ansible's Vault",
+ cli.run)
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
def test_encrypt_missing_file_no_secret(self, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = []
cli = VaultCLI(args=['ansible-vault', 'encrypt', '/dev/null/foo'])
cli.parse()
- self.assertRaisesRegexp(errors.AnsibleOptionsError,
- "A vault password is required to use Ansible's Vault",
- cli.run)
+ self.assertRaisesRegex(errors.AnsibleOptionsError,
+ "A vault password is required to use Ansible's Vault",
+ cli.run)
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
@@ -209,11 +209,7 @@ class TestVaultCli(unittest.TestCase):
@pytest.mark.parametrize('cli_args, expected', [
(['ansible-vault', 'view', 'vault.txt'], 0),
(['ansible-vault', 'view', 'vault.txt', '-vvv'], 3),
- (['ansible-vault', '-vv', 'view', 'vault.txt'], 2),
- # Due to our manual parsing we want to verify that -v set in the sub parser takes precedence. This behaviour is
- # deprecated and tests should be removed when the code that handles it is removed
- (['ansible-vault', '-vv', 'view', 'vault.txt', '-v'], 1),
- (['ansible-vault', '-vv', 'view', 'vault.txt', '-vvvv'], 4),
+ (['ansible-vault', 'view', 'vault.txt', '-vv'], 2),
])
def test_verbosity_arguments(cli_args, expected, tmp_path_factory, monkeypatch):
# Add a password file so we don't get a prompt in the test
diff --git a/test/units/compat/builtins.py b/test/units/compat/builtins.py
deleted file mode 100644
index f60ee678..00000000
--- a/test/units/compat/builtins.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-#
-# Compat for python2.7
-#
-
-# One unittest needs to import builtins via __import__() so we need to have
-# the string that represents it
-try:
- import __builtin__
-except ImportError:
- BUILTINS = 'builtins'
-else:
- BUILTINS = '__builtin__'
diff --git a/test/units/compat/mock.py b/test/units/compat/mock.py
deleted file mode 100644
index 0972cd2e..00000000
--- a/test/units/compat/mock.py
+++ /dev/null
@@ -1,122 +0,0 @@
-# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-'''
-Compat module for Python3.x's unittest.mock module
-'''
-import sys
-
-# Python 2.7
-
-# Note: Could use the pypi mock library on python3.x as well as python2.x. It
-# is the same as the python3 stdlib mock library
-
-try:
- # Allow wildcard import because we really do want to import all of mock's
- # symbols into this compat shim
- # pylint: disable=wildcard-import,unused-wildcard-import
- from unittest.mock import *
-except ImportError:
- # Python 2
- # pylint: disable=wildcard-import,unused-wildcard-import
- try:
- from mock import *
- except ImportError:
- print('You need the mock library installed on python2.x to run tests')
-
-
-# Prior to 3.4.4, mock_open cannot handle binary read_data
-if sys.version_info >= (3,) and sys.version_info < (3, 4, 4):
- file_spec = None
-
- def _iterate_read_data(read_data):
- # Helper for mock_open:
- # Retrieve lines from read_data via a generator so that separate calls to
- # readline, read, and readlines are properly interleaved
- sep = b'\n' if isinstance(read_data, bytes) else '\n'
- data_as_list = [l + sep for l in read_data.split(sep)]
-
- if data_as_list[-1] == sep:
- # If the last line ended in a newline, the list comprehension will have an
- # extra entry that's just a newline. Remove this.
- data_as_list = data_as_list[:-1]
- else:
- # If there wasn't an extra newline by itself, then the file being
- # emulated doesn't have a newline to end the last line remove the
- # newline that our naive format() added
- data_as_list[-1] = data_as_list[-1][:-1]
-
- for line in data_as_list:
- yield line
-
- def mock_open(mock=None, read_data=''):
- """
- A helper function to create a mock to replace the use of `open`. It works
- for `open` called directly or used as a context manager.
-
- The `mock` argument is the mock object to configure. If `None` (the
- default) then a `MagicMock` will be created for you, with the API limited
- to methods or attributes available on standard file handles.
-
- `read_data` is a string for the `read` methoddline`, and `readlines` of the
- file handle to return. This is an empty string by default.
- """
- def _readlines_side_effect(*args, **kwargs):
- if handle.readlines.return_value is not None:
- return handle.readlines.return_value
- return list(_data)
-
- def _read_side_effect(*args, **kwargs):
- if handle.read.return_value is not None:
- return handle.read.return_value
- return type(read_data)().join(_data)
-
- def _readline_side_effect():
- if handle.readline.return_value is not None:
- while True:
- yield handle.readline.return_value
- for line in _data:
- yield line
-
- global file_spec
- if file_spec is None:
- import _io
- file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))
-
- if mock is None:
- mock = MagicMock(name='open', spec=open)
-
- handle = MagicMock(spec=file_spec)
- handle.__enter__.return_value = handle
-
- _data = _iterate_read_data(read_data)
-
- handle.write.return_value = None
- handle.read.return_value = None
- handle.readline.return_value = None
- handle.readlines.return_value = None
-
- handle.read.side_effect = _read_side_effect
- handle.readline.side_effect = _readline_side_effect()
- handle.readlines.side_effect = _readlines_side_effect
-
- mock.return_value = handle
- return mock
diff --git a/test/units/compat/unittest.py b/test/units/compat/unittest.py
index 98f08ad6..77bb4c05 100644
--- a/test/units/compat/unittest.py
+++ b/test/units/compat/unittest.py
@@ -36,3 +36,7 @@ if sys.version_info < (2, 7):
print('You need unittest2 installed on python2.6.x to run tests')
else:
from unittest import *
+
+ if not hasattr(TestCase, 'assertRaisesRegex'):
+ # added in Python 3.2
+ TestCase.assertRaisesRegex = TestCase.assertRaisesRegexp
diff --git a/test/units/errors/test_errors.py b/test/units/errors/test_errors.py
index 136a2695..deb3dc0b 100644
--- a/test/units/errors/test_errors.py
+++ b/test/units/errors/test_errors.py
@@ -21,8 +21,7 @@ __metaclass__ = type
from units.compat import unittest
-from units.compat.builtins import BUILTINS
-from units.compat.mock import mock_open, patch
+from mock import mock_open, patch
from ansible.errors import AnsibleError
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject
@@ -87,7 +86,7 @@ class TestErrors(unittest.TestCase):
m = mock_open()
m.return_value.readlines.return_value = ['this is line 1\n']
- with patch('{0}.open'.format(BUILTINS), m):
+ with patch('builtins.open', m):
# this line will be found in the file
self.obj.ansible_pos = ('foo.yml', 1, 1)
e = AnsibleError(self.message, self.obj)
@@ -110,7 +109,7 @@ class TestErrors(unittest.TestCase):
m = mock_open()
m.return_value.readlines.return_value = ['this line has unicode \xf0\x9f\x98\xa8 in it!\n']
- with patch('{0}.open'.format(BUILTINS), m):
+ with patch('builtins.open', m):
# this line will be found in the file
self.obj.ansible_pos = ('foo.yml', 1, 1)
e = AnsibleError(self.unicode_message, self.obj)
@@ -125,7 +124,7 @@ class TestErrors(unittest.TestCase):
m = mock_open()
m.return_value.readlines.return_value = ['this is line 1\n', 'this is line 2\n', 'this is line 3\n']
- with patch('{0}.open'.format(BUILTINS), m):
+ with patch('builtins.open', m):
# If the error occurs in the last line of the file, use the correct index to get the line
# and avoid the IndexError
self.obj.ansible_pos = ('foo.yml', 4, 1)
@@ -141,7 +140,7 @@ class TestErrors(unittest.TestCase):
m = mock_open()
m.return_value.readlines.return_value = ['this is line 1\n', 'this is line 2\n', 'this is line 3\n', ' \n', ' \n', ' ']
- with patch('{0}.open'.format(BUILTINS), m):
+ with patch('builtins.open', m):
self.obj.ansible_pos = ('foo.yml', 5, 1)
e = AnsibleError(self.message, self.obj)
self.assertEqual(
diff --git a/test/units/executor/module_common/test_module_common.py b/test/units/executor/module_common/test_module_common.py
index 04bae85d..fa6add8c 100644
--- a/test/units/executor/module_common/test_module_common.py
+++ b/test/units/executor/module_common/test_module_common.py
@@ -113,8 +113,11 @@ class TestGetShebang:
with pytest.raises(InterpreterDiscoveryRequiredError):
amc._get_shebang(u'/usr/bin/python', {}, templar)
+ def test_python_interpreter(self, templar):
+ assert amc._get_shebang(u'/usr/bin/python3.8', {}, templar) == ('#!/usr/bin/python3.8', u'/usr/bin/python3.8')
+
def test_non_python_interpreter(self, templar):
- assert amc._get_shebang(u'/usr/bin/ruby', {}, templar) == (None, u'/usr/bin/ruby')
+ assert amc._get_shebang(u'/usr/bin/ruby', {}, templar) == ('#!/usr/bin/ruby', u'/usr/bin/ruby')
def test_interpreter_set_in_task_vars(self, templar):
assert amc._get_shebang(u'/usr/bin/python', {u'ansible_python_interpreter': u'/usr/bin/pypy'}, templar) == \
diff --git a/test/units/executor/test_interpreter_discovery.py b/test/units/executor/test_interpreter_discovery.py
index 5919d39f..5efdd378 100644
--- a/test/units/executor/test_interpreter_discovery.py
+++ b/test/units/executor/test_interpreter_discovery.py
@@ -6,7 +6,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from units.compat.mock import MagicMock
+from mock import MagicMock
from ansible.executor.interpreter_discovery import discover_interpreter
from ansible.module_utils._text import to_text
diff --git a/test/units/executor/test_play_iterator.py b/test/units/executor/test_play_iterator.py
index 395ab686..3ced9e3c 100644
--- a/test/units/executor/test_play_iterator.py
+++ b/test/units/executor/test_play_iterator.py
@@ -20,9 +20,9 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat import unittest
-from units.compat.mock import patch, MagicMock
+from mock import patch, MagicMock
-from ansible.executor.play_iterator import HostState, PlayIterator
+from ansible.executor.play_iterator import HostState, PlayIterator, IteratingStates, FailedStates
from ansible.playbook import Playbook
from ansible.playbook.play_context import PlayContext
@@ -51,7 +51,6 @@ class TestPlayIterator(unittest.TestCase):
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
def test_play_iterator(self):
- # import epdb; epdb.st()
fake_loader = DictDataLoader({
"test_play.yml": """
- hosts: all
@@ -429,7 +428,7 @@ class TestPlayIterator(unittest.TestCase):
# iterate past first task
_, task = itr.get_next_task_for_host(hosts[0])
- while(task and task.action != 'debug'):
+ while (task and task.action != 'debug'):
_, task = itr.get_next_task_for_host(hosts[0])
if task is None:
@@ -443,21 +442,51 @@ class TestPlayIterator(unittest.TestCase):
res_state = itr._insert_tasks_into_state(s_copy, task_list=[])
self.assertEqual(res_state, s_copy)
- s_copy.fail_state = itr.FAILED_TASKS
+ s_copy.fail_state = FailedStates.TASKS
res_state = itr._insert_tasks_into_state(s_copy, task_list=[MagicMock()])
self.assertEqual(res_state, s_copy)
# but if we've failed with a rescue/always block
mock_task = MagicMock()
- s_copy.run_state = itr.ITERATING_RESCUE
+ s_copy.run_state = IteratingStates.RESCUE
res_state = itr._insert_tasks_into_state(s_copy, task_list=[mock_task])
self.assertEqual(res_state, s_copy)
self.assertIn(mock_task, res_state._blocks[res_state.cur_block].rescue)
- itr._host_states[hosts[0].name] = res_state
+ itr.set_state_for_host(hosts[0].name, res_state)
(next_state, next_task) = itr.get_next_task_for_host(hosts[0], peek=True)
self.assertEqual(next_task, mock_task)
- itr._host_states[hosts[0].name] = s
+ itr.set_state_for_host(hosts[0].name, s)
# test a regular insertion
s_copy = s.copy()
res_state = itr._insert_tasks_into_state(s_copy, task_list=[MagicMock()])
+
+ def test_iterating_states_deprecation_class_attr(self):
+ assert PlayIterator.ITERATING_SETUP == IteratingStates.SETUP
+ assert PlayIterator.ITERATING_TASKS == IteratingStates.TASKS
+ assert PlayIterator.ITERATING_RESCUE == IteratingStates.RESCUE
+ assert PlayIterator.ITERATING_ALWAYS == IteratingStates.ALWAYS
+ assert PlayIterator.ITERATING_COMPLETE == IteratingStates.COMPLETE
+
+ def test_failed_states_deprecation_class_attr(self):
+ assert PlayIterator.FAILED_NONE == FailedStates.NONE
+ assert PlayIterator.FAILED_SETUP == FailedStates.SETUP
+ assert PlayIterator.FAILED_TASKS == FailedStates.TASKS
+ assert PlayIterator.FAILED_RESCUE == FailedStates.RESCUE
+ assert PlayIterator.FAILED_ALWAYS == FailedStates.ALWAYS
+
+ def test_iterating_states_deprecation_instance_attr(self):
+ iterator = PlayIterator(MagicMock(), MagicMock(), MagicMock(), MagicMock(), MagicMock())
+ assert iterator.ITERATING_SETUP == IteratingStates.SETUP
+ assert iterator.ITERATING_TASKS == IteratingStates.TASKS
+ assert iterator.ITERATING_RESCUE == IteratingStates.RESCUE
+ assert iterator.ITERATING_ALWAYS == IteratingStates.ALWAYS
+ assert iterator.ITERATING_COMPLETE == IteratingStates.COMPLETE
+
+ def test_failed_states_deprecation_instance_attr(self):
+ iterator = PlayIterator(MagicMock(), MagicMock(), MagicMock(), MagicMock(), MagicMock())
+ assert iterator.FAILED_NONE == FailedStates.NONE
+ assert iterator.FAILED_SETUP == FailedStates.SETUP
+ assert iterator.FAILED_TASKS == FailedStates.TASKS
+ assert iterator.FAILED_RESCUE == FailedStates.RESCUE
+ assert iterator.FAILED_ALWAYS == FailedStates.ALWAYS
diff --git a/test/units/executor/test_playbook_executor.py b/test/units/executor/test_playbook_executor.py
index 529eda36..350f7c2d 100644
--- a/test/units/executor/test_playbook_executor.py
+++ b/test/units/executor/test_playbook_executor.py
@@ -20,7 +20,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat import unittest
-from units.compat.mock import MagicMock
+from mock import MagicMock
from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.playbook import Playbook
diff --git a/test/units/executor/test_task_executor.py b/test/units/executor/test_task_executor.py
index 8c01b339..30d609a1 100644
--- a/test/units/executor/test_task_executor.py
+++ b/test/units/executor/test_task_executor.py
@@ -22,7 +22,7 @@ __metaclass__ = type
import mock
from units.compat import unittest
-from units.compat.mock import patch, MagicMock
+from mock import patch, MagicMock
from ansible.errors import AnsibleError
from ansible.executor.task_executor import TaskExecutor, remove_omit
from ansible.plugins.loader import action_loader, lookup_loader
diff --git a/test/units/executor/test_task_queue_manager_callbacks.py b/test/units/executor/test_task_queue_manager_callbacks.py
index 6c0ceee0..b6b1159d 100644
--- a/test/units/executor/test_task_queue_manager_callbacks.py
+++ b/test/units/executor/test_task_queue_manager_callbacks.py
@@ -19,7 +19,7 @@
from __future__ import (absolute_import, division, print_function)
from units.compat import unittest
-from units.compat.mock import MagicMock
+from mock import MagicMock
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.playbook import Playbook
diff --git a/test/units/executor/test_task_result.py b/test/units/executor/test_task_result.py
index 3ce210de..ee5c7198 100644
--- a/test/units/executor/test_task_result.py
+++ b/test/units/executor/test_task_result.py
@@ -20,7 +20,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat import unittest
-from units.compat.mock import patch, MagicMock
+from mock import patch, MagicMock
from ansible.executor.task_result import TaskResult
diff --git a/test/units/galaxy/test_api.py b/test/units/galaxy/test_api.py
index 8081c792..733f99b5 100644
--- a/test/units/galaxy/test_api.py
+++ b/test/units/galaxy/test_api.py
@@ -16,7 +16,7 @@ import tempfile
import time
from io import BytesIO, StringIO
-from units.compat.mock import MagicMock
+from mock import MagicMock
import ansible.constants as C
from ansible import context
@@ -509,15 +509,15 @@ def test_wait_import_task_with_failure(server_url, api_version, token_type, toke
},
'messages': [
{
- 'level': 'error',
+ 'level': 'ERrOR',
'message': u'Somé error',
},
{
- 'level': 'warning',
+ 'level': 'WARNiNG',
'message': u'Some wärning',
},
{
- 'level': 'info',
+ 'level': 'INFO',
'message': u'Somé info',
},
],
@@ -549,7 +549,7 @@ def test_wait_import_task_with_failure(server_url, api_version, token_type, toke
assert mock_display.mock_calls[0][1][0] == 'Waiting until Galaxy import task %s has completed' % full_import_uri
assert mock_vvv.call_count == 1
- assert mock_vvv.mock_calls[0][1][0] == u'Galaxy import message: info - Somé info'
+ assert mock_vvv.mock_calls[0][1][0] == u'Galaxy import message: INFO - Somé info'
assert mock_warn.call_count == 1
assert mock_warn.mock_calls[0][1][0] == u'Galaxy import warning message: Some wärning'
@@ -582,15 +582,15 @@ def test_wait_import_task_with_failure_no_error(server_url, api_version, token_t
'error': {},
'messages': [
{
- 'level': 'error',
+ 'level': 'ERROR',
'message': u'Somé error',
},
{
- 'level': 'warning',
+ 'level': 'WARNING',
'message': u'Some wärning',
},
{
- 'level': 'info',
+ 'level': 'INFO',
'message': u'Somé info',
},
],
@@ -622,7 +622,7 @@ def test_wait_import_task_with_failure_no_error(server_url, api_version, token_t
assert mock_display.mock_calls[0][1][0] == 'Waiting until Galaxy import task %s has completed' % full_import_uri
assert mock_vvv.call_count == 1
- assert mock_vvv.mock_calls[0][1][0] == u'Galaxy import message: info - Somé info'
+ assert mock_vvv.mock_calls[0][1][0] == u'Galaxy import message: INFO - Somé info'
assert mock_warn.call_count == 1
assert mock_warn.mock_calls[0][1][0] == u'Galaxy import warning message: Some wärning'
@@ -704,6 +704,7 @@ def test_get_collection_version_metadata_no_version(api_version, token_type, ver
mock_open = MagicMock()
mock_open.side_effect = [
StringIO(to_text(json.dumps({
+ 'href': 'https://galaxy.server.com/api/{api}/namespace/name/versions/{version}/'.format(api=api_version, version=version),
'download_url': 'https://downloadme.com',
'artifact': {
'sha256': 'ac47b6fac117d7c171812750dacda655b04533cf56b31080b82d1c0db3c9d80f',
@@ -741,6 +742,85 @@ def test_get_collection_version_metadata_no_version(api_version, token_type, ver
assert mock_open.mock_calls[0][2]['headers']['Authorization'] == '%s my token' % token_type
+@pytest.mark.parametrize('api_version, token_type, token_ins, version', [
+ ('v2', None, None, '2.1.13'),
+ ('v3', 'Bearer', KeycloakToken(auth_url='https://api.test/api/automation-hub/'), '1.0.0'),
+])
+def test_get_collection_signatures_backwards_compat(api_version, token_type, token_ins, version, monkeypatch):
+ api = get_test_galaxy_api('https://galaxy.server.com/api/', api_version, token_ins=token_ins)
+
+ if token_ins:
+ mock_token_get = MagicMock()
+ mock_token_get.return_value = 'my token'
+ monkeypatch.setattr(token_ins, 'get', mock_token_get)
+
+ mock_open = MagicMock()
+ mock_open.side_effect = [
+ StringIO("{}")
+ ]
+ monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
+
+ actual = api.get_collection_signatures('namespace', 'collection', version)
+ assert actual == []
+
+ assert mock_open.call_count == 1
+ assert mock_open.mock_calls[0][1][0] == '%s%s/collections/namespace/collection/versions/%s/' \
+ % (api.api_server, api_version, version)
+
+ # v2 calls dont need auth, so no authz header or token_type
+ if token_type:
+ assert mock_open.mock_calls[0][2]['headers']['Authorization'] == '%s my token' % token_type
+
+
+@pytest.mark.parametrize('api_version, token_type, token_ins, version', [
+ ('v2', None, None, '2.1.13'),
+ ('v3', 'Bearer', KeycloakToken(auth_url='https://api.test/api/automation-hub/'), '1.0.0'),
+])
+def test_get_collection_signatures(api_version, token_type, token_ins, version, monkeypatch):
+ api = get_test_galaxy_api('https://galaxy.server.com/api/', api_version, token_ins=token_ins)
+
+ if token_ins:
+ mock_token_get = MagicMock()
+ mock_token_get.return_value = 'my token'
+ monkeypatch.setattr(token_ins, 'get', mock_token_get)
+
+ mock_open = MagicMock()
+ mock_open.side_effect = [
+ StringIO(to_text(json.dumps({
+ 'signatures': [
+ {
+ "signature": "-----BEGIN PGP SIGNATURE-----\nSIGNATURE1\n-----END PGP SIGNATURE-----\n",
+ "pubkey_fingerprint": "FINGERPRINT",
+ "signing_service": "ansible-default",
+ "pulp_created": "2022-01-14T14:05:53.835605Z",
+ },
+ {
+ "signature": "-----BEGIN PGP SIGNATURE-----\nSIGNATURE2\n-----END PGP SIGNATURE-----\n",
+ "pubkey_fingerprint": "FINGERPRINT",
+ "signing_service": "ansible-default",
+ "pulp_created": "2022-01-14T14:05:53.835605Z",
+ },
+ ],
+ }))),
+ ]
+ monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
+
+ actual = api.get_collection_signatures('namespace', 'collection', version)
+
+ assert actual == [
+ "-----BEGIN PGP SIGNATURE-----\nSIGNATURE1\n-----END PGP SIGNATURE-----\n",
+ "-----BEGIN PGP SIGNATURE-----\nSIGNATURE2\n-----END PGP SIGNATURE-----\n"
+ ]
+
+ assert mock_open.call_count == 1
+ assert mock_open.mock_calls[0][1][0] == '%s%s/collections/namespace/collection/versions/%s/' \
+ % (api.api_server, api_version, version)
+
+ # v2 calls dont need auth, so no authz header or token_type
+ if token_type:
+ assert mock_open.mock_calls[0][2]['headers']['Authorization'] == '%s my token' % token_type
+
+
@pytest.mark.parametrize('api_version, token_type, token_ins, response', [
('v2', None, None, {
'count': 2,
diff --git a/test/units/galaxy/test_collection.py b/test/units/galaxy/test_collection.py
index 65243df1..53d042fe 100644
--- a/test/units/galaxy/test_collection.py
+++ b/test/units/galaxy/test_collection.py
@@ -15,7 +15,7 @@ import uuid
from hashlib import sha256
from io import BytesIO
-from units.compat.mock import MagicMock, mock_open, patch
+from mock import MagicMock, mock_open, patch
import ansible.constants as C
from ansible import context
@@ -217,6 +217,42 @@ def server_config(monkeypatch):
return server1, server2, server3
+@pytest.mark.parametrize(
+ 'required_signature_count,valid',
+ [
+ ("1", True),
+ ("+1", True),
+ ("all", True),
+ ("+all", True),
+ ("-1", False),
+ ("invalid", False),
+ ("1.5", False),
+ ("+", False),
+ ]
+)
+def test_cli_options(required_signature_count, valid, monkeypatch):
+ cli_args = [
+ 'ansible-galaxy',
+ 'collection',
+ 'install',
+ 'namespace.collection:1.0.0',
+ '--keyring',
+ '~/.ansible/pubring.kbx',
+ '--required-valid-signature-count',
+ required_signature_count
+ ]
+
+ galaxy_cli = GalaxyCLI(args=cli_args)
+ mock_execute_install = MagicMock()
+ monkeypatch.setattr(galaxy_cli, '_execute_install_collection', mock_execute_install)
+
+ if valid:
+ galaxy_cli.run()
+ else:
+ with pytest.raises(SystemExit, match='2') as error:
+ galaxy_cli.run()
+
+
@pytest.mark.parametrize('global_ignore_certs', [True, False])
def test_validate_certs(global_ignore_certs, monkeypatch):
cli_args = [
diff --git a/test/units/galaxy/test_collection_install.py b/test/units/galaxy/test_collection_install.py
index d4565fd5..e34472f2 100644
--- a/test/units/galaxy/test_collection_install.py
+++ b/test/units/galaxy/test_collection_install.py
@@ -17,7 +17,8 @@ import tarfile
import yaml
from io import BytesIO, StringIO
-from units.compat.mock import MagicMock
+from mock import MagicMock, patch
+from unittest import mock
import ansible.module_utils.six.moves.urllib.error as urllib_error
@@ -27,6 +28,7 @@ from ansible.errors import AnsibleError
from ansible.galaxy import collection, api, dependency_resolution
from ansible.galaxy.dependency_resolution.dataclasses import Candidate, Requirement
from ansible.module_utils._text import to_bytes, to_native, to_text
+from ansible.module_utils.common.process import get_bin_path
from ansible.utils import context_objects as co
from ansible.utils.display import Display
@@ -168,9 +170,26 @@ def collection_artifact(request, tmp_path_factory):
def galaxy_server():
context.CLIARGS._store = {'ignore_certs': False}
galaxy_api = api.GalaxyAPI(None, 'test_server', 'https://galaxy.ansible.com')
+ galaxy_api.get_collection_signatures = MagicMock(return_value=[])
return galaxy_api
+def test_concrete_artifact_manager_scm_no_executable(monkeypatch):
+ url = 'https://github.com/org/repo'
+ version = 'commitish'
+ mock_subprocess_check_call = MagicMock()
+ monkeypatch.setattr(collection.concrete_artifact_manager.subprocess, 'check_call', mock_subprocess_check_call)
+ mock_mkdtemp = MagicMock(return_value='')
+ monkeypatch.setattr(collection.concrete_artifact_manager, 'mkdtemp', mock_mkdtemp)
+
+ error = re.escape(
+ "Could not find git executable to extract the collection from the Git repository `https://github.com/org/repo`"
+ )
+ with mock.patch.dict(os.environ, {"PATH": ""}):
+ with pytest.raises(AnsibleError, match=error):
+ collection.concrete_artifact_manager._extract_collection_from_git(url, version, b'path')
+
+
@pytest.mark.parametrize(
'url,version,trailing_slash',
[
@@ -193,10 +212,12 @@ def test_concrete_artifact_manager_scm_cmd(url, version, trailing_slash, monkeyp
repo = 'https://github.com/org/repo'
if trailing_slash:
repo += '/'
- clone_cmd = ('git', 'clone', repo, '')
+
+ git_executable = get_bin_path('git')
+ clone_cmd = (git_executable, 'clone', repo, '')
assert mock_subprocess_check_call.call_args_list[0].args[0] == clone_cmd
- assert mock_subprocess_check_call.call_args_list[1].args[0] == ('git', 'checkout', 'commitish')
+ assert mock_subprocess_check_call.call_args_list[1].args[0] == (git_executable, 'checkout', 'commitish')
@pytest.mark.parametrize(
@@ -222,10 +243,11 @@ def test_concrete_artifact_manager_scm_cmd_shallow(url, version, trailing_slash,
repo = 'https://github.com/org/repo'
if trailing_slash:
repo += '/'
- shallow_clone_cmd = ('git', 'clone', '--depth=1', repo, '')
+ git_executable = get_bin_path('git')
+ shallow_clone_cmd = (git_executable, 'clone', '--depth=1', repo, '')
assert mock_subprocess_check_call.call_args_list[0].args[0] == shallow_clone_cmd
- assert mock_subprocess_check_call.call_args_list[1].args[0] == ('git', 'checkout', 'HEAD')
+ assert mock_subprocess_check_call.call_args_list[1].args[0] == (git_executable, 'checkout', 'HEAD')
def test_build_requirement_from_path(collection_artifact):
@@ -449,7 +471,9 @@ def test_build_requirement_from_name(galaxy_server, monkeypatch, tmp_path_factor
requirements = cli._require_one_of_collections_requirements(
collections, requirements_file, artifacts_manager=concrete_artifact_cm
)['collections']
- actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, True, False, False)['namespace.collection']
+ actual = collection._resolve_depenency_map(
+ requirements, [galaxy_server], concrete_artifact_cm, None, True, False, False, False
+ )['namespace.collection']
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
@@ -466,7 +490,7 @@ def test_build_requirement_from_name_with_prerelease(galaxy_server, monkeypatch,
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
mock_get_info = MagicMock()
- mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.1', None, None, {})
+ mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.1', None, None, {}, None, None)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
@@ -476,7 +500,9 @@ def test_build_requirement_from_name_with_prerelease(galaxy_server, monkeypatch,
requirements = cli._require_one_of_collections_requirements(
['namespace.collection'], None, artifacts_manager=concrete_artifact_cm
)['collections']
- actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, True, False, False)['namespace.collection']
+ actual = collection._resolve_depenency_map(
+ requirements, [galaxy_server], concrete_artifact_cm, None, True, False, False, False
+ )['namespace.collection']
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
@@ -494,7 +520,7 @@ def test_build_requirment_from_name_with_prerelease_explicit(galaxy_server, monk
mock_get_info = MagicMock()
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.1-beta.1', None, None,
- {})
+ {}, None, None)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
@@ -504,7 +530,9 @@ def test_build_requirment_from_name_with_prerelease_explicit(galaxy_server, monk
requirements = cli._require_one_of_collections_requirements(
['namespace.collection:2.0.1-beta.1'], None, artifacts_manager=concrete_artifact_cm
)['collections']
- actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, True, False, False)['namespace.collection']
+ actual = collection._resolve_depenency_map(
+ requirements, [galaxy_server], concrete_artifact_cm, None, True, False, False, False
+ )['namespace.collection']
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
@@ -521,7 +549,7 @@ def test_build_requirement_from_name_second_server(galaxy_server, monkeypatch, t
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
mock_get_info = MagicMock()
- mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '1.0.3', None, None, {})
+ mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '1.0.3', None, None, {}, None, None)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
broken_server = copy.copy(galaxy_server)
@@ -538,7 +566,7 @@ def test_build_requirement_from_name_second_server(galaxy_server, monkeypatch, t
['namespace.collection:>1.0.1'], None, artifacts_manager=concrete_artifact_cm
)['collections']
actual = collection._resolve_depenency_map(
- requirements, [broken_server, galaxy_server], concrete_artifact_cm, None, True, False, False
+ requirements, [broken_server, galaxy_server], concrete_artifact_cm, None, True, False, False, False
)['namespace.collection']
assert actual.namespace == u'namespace'
@@ -569,7 +597,7 @@ def test_build_requirement_from_name_missing(galaxy_server, monkeypatch, tmp_pat
expected = "Failed to resolve the requested dependencies map. Could not satisfy the following requirements:\n* namespace.collection:* (direct request)"
with pytest.raises(AnsibleError, match=re.escape(expected)):
- collection._resolve_depenency_map(requirements, [galaxy_server, galaxy_server], concrete_artifact_cm, None, False, True, False)
+ collection._resolve_depenency_map(requirements, [galaxy_server, galaxy_server], concrete_artifact_cm, None, False, True, False, False)
def test_build_requirement_from_name_401_unauthorized(galaxy_server, monkeypatch, tmp_path_factory):
@@ -589,7 +617,7 @@ def test_build_requirement_from_name_401_unauthorized(galaxy_server, monkeypatch
expected = "error (HTTP Code: 401, Message: msg)"
with pytest.raises(api.GalaxyError, match=re.escape(expected)):
- collection._resolve_depenency_map(requirements, [galaxy_server, galaxy_server], concrete_artifact_cm, None, False, False, False)
+ collection._resolve_depenency_map(requirements, [galaxy_server, galaxy_server], concrete_artifact_cm, None, False, False, False, False)
def test_build_requirement_from_name_single_version(galaxy_server, monkeypatch, tmp_path_factory):
@@ -608,7 +636,7 @@ def test_build_requirement_from_name_single_version(galaxy_server, monkeypatch,
mock_get_info = MagicMock()
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.0', None, None,
- {})
+ {}, None, None)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:==2.0.0'])
@@ -616,7 +644,7 @@ def test_build_requirement_from_name_single_version(galaxy_server, monkeypatch,
['namespace.collection:==2.0.0'], None, artifacts_manager=concrete_artifact_cm
)['collections']
- actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False)['namespace.collection']
+ actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False, False)['namespace.collection']
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
@@ -644,7 +672,7 @@ def test_build_requirement_from_name_multiple_versions_one_match(galaxy_server,
mock_get_info = MagicMock()
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.1', None, None,
- {})
+ {}, None, None)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:>=2.0.1,<2.0.2'])
@@ -652,7 +680,7 @@ def test_build_requirement_from_name_multiple_versions_one_match(galaxy_server,
['namespace.collection:>=2.0.1,<2.0.2'], None, artifacts_manager=concrete_artifact_cm
)['collections']
- actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False)['namespace.collection']
+ actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False, False)['namespace.collection']
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
@@ -678,7 +706,7 @@ def test_build_requirement_from_name_multiple_version_results(galaxy_server, mon
monkeypatch.setattr(dependency_resolution.providers.CollectionDependencyProvider, 'find_matches', mock_find_matches)
mock_get_info = MagicMock()
- mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.5', None, None, {})
+ mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.5', None, None, {}, None, None)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
mock_get_versions = MagicMock()
@@ -693,7 +721,7 @@ def test_build_requirement_from_name_multiple_version_results(galaxy_server, mon
['namespace.collection:!=2.0.2'], None, artifacts_manager=concrete_artifact_cm
)['collections']
- actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False)['namespace.collection']
+ actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False, False)['namespace.collection']
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
@@ -712,7 +740,7 @@ def test_candidate_with_conflict(monkeypatch, tmp_path_factory, galaxy_server):
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
mock_get_info = MagicMock()
- mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.5', None, None, {})
+ mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.5', None, None, {}, None, None)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
mock_get_versions = MagicMock()
@@ -727,7 +755,7 @@ def test_candidate_with_conflict(monkeypatch, tmp_path_factory, galaxy_server):
expected = "Failed to resolve the requested dependencies map. Could not satisfy the following requirements:\n"
expected += "* namespace.collection:!=2.0.5 (direct request)"
with pytest.raises(AnsibleError, match=re.escape(expected)):
- collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False)
+ collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False, False)
def test_dep_candidate_with_conflict(monkeypatch, tmp_path_factory, galaxy_server):
@@ -735,8 +763,8 @@ def test_dep_candidate_with_conflict(monkeypatch, tmp_path_factory, galaxy_serve
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
mock_get_info_return = [
- api.CollectionVersionMetadata('parent', 'collection', '2.0.5', None, None, {'namespace.collection': '!=1.0.0'}),
- api.CollectionVersionMetadata('namespace', 'collection', '1.0.0', None, None, {}),
+ api.CollectionVersionMetadata('parent', 'collection', '2.0.5', None, None, {'namespace.collection': '!=1.0.0'}, None, None),
+ api.CollectionVersionMetadata('namespace', 'collection', '1.0.0', None, None, {}, None, None),
]
mock_get_info = MagicMock(side_effect=mock_get_info_return)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
@@ -752,12 +780,12 @@ def test_dep_candidate_with_conflict(monkeypatch, tmp_path_factory, galaxy_serve
expected = "Failed to resolve the requested dependencies map. Could not satisfy the following requirements:\n"
expected += "* namespace.collection:!=1.0.0 (dependency of parent.collection:2.0.5)"
with pytest.raises(AnsibleError, match=re.escape(expected)):
- collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False)
+ collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False, False)
def test_install_installed_collection(monkeypatch, tmp_path_factory, galaxy_server):
- mock_installed_collections = MagicMock(return_value=[Candidate('namespace.collection', '1.2.3', None, 'dir')])
+ mock_installed_collections = MagicMock(return_value=[Candidate('namespace.collection', '1.2.3', None, 'dir', None)])
monkeypatch.setattr(collection, 'find_existing_collections', mock_installed_collections)
@@ -768,7 +796,7 @@ def test_install_installed_collection(monkeypatch, tmp_path_factory, galaxy_serv
monkeypatch.setattr(Display, 'display', mock_display)
mock_get_info = MagicMock()
- mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '1.2.3', None, None, {})
+ mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '1.2.3', None, None, {}, None, None)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
mock_get_versions = MagicMock(return_value=['1.2.3', '1.3.0'])
@@ -795,7 +823,7 @@ def test_install_collection(collection_artifact, monkeypatch):
collection_path = os.path.join(output_path, b'ansible_namespace', b'collection')
os.makedirs(os.path.join(collection_path, b'delete_me')) # Create a folder to verify the install cleans out the dir
- candidate = Candidate('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file')
+ candidate = Candidate('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)
collection.install(candidate, to_text(output_path), concrete_artifact_cm)
# Ensure the temp directory is empty, nothing is left behind
@@ -834,7 +862,7 @@ def test_install_collection_with_download(galaxy_server, collection_artifact, mo
mock_download.return_value = collection_tar
monkeypatch.setattr(concrete_artifact_cm, 'get_galaxy_artifact_path', mock_download)
- req = Requirement('ansible_namespace.collection', '0.1.0', 'https://downloadme.com', 'galaxy')
+ req = Candidate('ansible_namespace.collection', '0.1.0', 'https://downloadme.com', 'galaxy', None)
collection.install(req, to_text(collections_dir), concrete_artifact_cm)
actual_files = os.listdir(collection_path)
@@ -862,8 +890,8 @@ def test_install_collections_from_tar(collection_artifact, monkeypatch):
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
- requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file')]
- collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm)
+ requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
+ collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True)
assert os.path.isdir(collection_path)
@@ -898,8 +926,8 @@ def test_install_collections_existing_without_force(collection_artifact, monkeyp
assert os.path.isdir(collection_path)
- requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file')]
- collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm)
+ requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
+ collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True)
assert os.path.isdir(collection_path)
@@ -930,8 +958,8 @@ def test_install_missing_metadata_warning(collection_artifact, monkeypatch):
os.unlink(b_path)
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
- requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file')]
- collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm)
+ requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
+ collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True)
display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2] and len(m[1]) == 1]
@@ -951,8 +979,8 @@ def test_install_collection_with_circular_dependency(collection_artifact, monkey
monkeypatch.setattr(Display, 'display', mock_display)
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
- requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file')]
- collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm)
+ requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
+ collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True)
assert os.path.isdir(collection_path)
@@ -975,3 +1003,50 @@ def test_install_collection_with_circular_dependency(collection_artifact, monkey
assert display_msgs[1] == "Starting collection install process"
assert display_msgs[2] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" % to_text(collection_path)
assert display_msgs[3] == "ansible_namespace.collection:0.1.0 was installed successfully"
+
+
+@pytest.mark.parametrize(
+ "signatures,required_successful_count,ignore_errors,expected_success",
+ [
+ ([], 'all', [], True),
+ (["good_signature"], 'all', [], True),
+ (["good_signature", collection.gpg.GpgBadArmor(status='failed')], 'all', [], False),
+ ([collection.gpg.GpgBadArmor(status='failed')], 'all', [], False),
+ # This is expected to succeed because ignored does not increment failed signatures.
+ # "all" signatures is not a specific number, so all == no (non-ignored) signatures in this case.
+ ([collection.gpg.GpgBadArmor(status='failed')], 'all', ["BADARMOR"], True),
+ ([collection.gpg.GpgBadArmor(status='failed'), "good_signature"], 'all', ["BADARMOR"], True),
+ ([], '+all', [], False),
+ ([collection.gpg.GpgBadArmor(status='failed')], '+all', ["BADARMOR"], False),
+ ([], '1', [], True),
+ ([], '+1', [], False),
+ (["good_signature"], '2', [], False),
+ (["good_signature", collection.gpg.GpgBadArmor(status='failed')], '2', [], False),
+ # This is expected to fail because ignored does not increment successful signatures.
+ # 2 signatures are required, but only 1 is successful.
+ (["good_signature", collection.gpg.GpgBadArmor(status='failed')], '2', ["BADARMOR"], False),
+ (["good_signature", "good_signature"], '2', [], True),
+ ]
+)
+def test_verify_file_signatures(signatures, required_successful_count, ignore_errors, expected_success):
+ # type: (List[bool], int, bool, bool) -> None
+
+ def gpg_error_generator(results):
+ for result in results:
+ if isinstance(result, collection.gpg.GpgBaseError):
+ yield result
+
+ fqcn = 'ns.coll'
+ manifest_file = 'MANIFEST.json'
+ keyring = '~/.ansible/pubring.kbx'
+
+ with patch.object(collection, 'run_gpg_verify', MagicMock(return_value=("somestdout", 0,))):
+ with patch.object(collection, 'parse_gpg_errors', MagicMock(return_value=gpg_error_generator(signatures))):
+ assert collection.verify_file_signatures(
+ fqcn,
+ manifest_file,
+ signatures,
+ keyring,
+ required_successful_count,
+ ignore_errors
+ ) == expected_success
diff --git a/test/units/galaxy/test_role_install.py b/test/units/galaxy/test_role_install.py
index cf990b55..687fcac1 100644
--- a/test/units/galaxy/test_role_install.py
+++ b/test/units/galaxy/test_role_install.py
@@ -8,6 +8,7 @@ __metaclass__ = type
import os
+import functools
import pytest
import tempfile
@@ -59,9 +60,9 @@ def mock_NamedTemporaryFile(mocker, **args):
return mock_ntf
-@pytest.fixture(autouse=True)
-def init_test(monkeypatch):
- monkeypatch.setattr(tempfile, 'NamedTemporaryFile', mock_NamedTemporaryFile)
+@pytest.fixture
+def init_mock_temp_file(mocker, monkeypatch):
+ monkeypatch.setattr(tempfile, 'NamedTemporaryFile', functools.partial(mock_NamedTemporaryFile, mocker))
@pytest.fixture(autouse=True)
@@ -74,7 +75,7 @@ def mock_role_download_api(mocker, monkeypatch):
return mock_role_api
-def test_role_download_github(mocker, galaxy_server, mock_role_download_api, monkeypatch):
+def test_role_download_github(init_mock_temp_file, mocker, galaxy_server, mock_role_download_api, monkeypatch):
mock_api = mocker.MagicMock()
mock_api.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/"}}'),
@@ -89,7 +90,7 @@ def test_role_download_github(mocker, galaxy_server, mock_role_download_api, mon
assert mock_role_download_api.mock_calls[0][1][0] == 'https://github.com/test_owner/test_role/archive/0.0.1.tar.gz'
-def test_role_download_github_default_version(mocker, galaxy_server, mock_role_download_api, monkeypatch):
+def test_role_download_github_default_version(init_mock_temp_file, mocker, galaxy_server, mock_role_download_api, monkeypatch):
mock_api = mocker.MagicMock()
mock_api.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/"}}'),
@@ -104,7 +105,7 @@ def test_role_download_github_default_version(mocker, galaxy_server, mock_role_d
assert mock_role_download_api.mock_calls[0][1][0] == 'https://github.com/test_owner/test_role/archive/0.0.2.tar.gz'
-def test_role_download_github_no_download_url_for_version(mocker, galaxy_server, mock_role_download_api, monkeypatch):
+def test_role_download_github_no_download_url_for_version(init_mock_temp_file, mocker, galaxy_server, mock_role_download_api, monkeypatch):
mock_api = mocker.MagicMock()
mock_api.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/"}}'),
@@ -119,7 +120,7 @@ def test_role_download_github_no_download_url_for_version(mocker, galaxy_server,
assert mock_role_download_api.mock_calls[0][1][0] == 'https://github.com/test_owner/test_role/archive/0.0.1.tar.gz'
-def test_role_download_url(mocker, galaxy_server, mock_role_download_api, monkeypatch):
+def test_role_download_url(init_mock_temp_file, mocker, galaxy_server, mock_role_download_api, monkeypatch):
mock_api = mocker.MagicMock()
mock_api.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/"}}'),
@@ -135,7 +136,7 @@ def test_role_download_url(mocker, galaxy_server, mock_role_download_api, monkey
assert mock_role_download_api.mock_calls[0][1][0] == 'http://localhost:8080/test_owner/test_role/0.0.1.tar.gz'
-def test_role_download_url_default_version(mocker, galaxy_server, mock_role_download_api, monkeypatch):
+def test_role_download_url_default_version(init_mock_temp_file, mocker, galaxy_server, mock_role_download_api, monkeypatch):
mock_api = mocker.MagicMock()
mock_api.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/"}}'),
diff --git a/test/units/galaxy/test_token.py b/test/units/galaxy/test_token.py
index 13426688..98dec5bf 100644
--- a/test/units/galaxy/test_token.py
+++ b/test/units/galaxy/test_token.py
@@ -8,7 +8,7 @@ __metaclass__ = type
import os
import pytest
-from units.compat.mock import MagicMock
+from mock import MagicMock
import ansible.constants as C
from ansible.cli.galaxy import GalaxyCLI, SERVER_DEF
diff --git a/test/units/mock/path.py b/test/units/mock/path.py
index 721dc293..dc51a143 100644
--- a/test/units/mock/path.py
+++ b/test/units/mock/path.py
@@ -1,7 +1,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from units.compat.mock import MagicMock
+from mock import MagicMock
from ansible.utils.path import unfrackpath
diff --git a/test/units/module_utils/basic/test_argument_spec.py b/test/units/module_utils/basic/test_argument_spec.py
index 24bbe2e9..20bfb01e 100644
--- a/test/units/module_utils/basic/test_argument_spec.py
+++ b/test/units/module_utils/basic/test_argument_spec.py
@@ -12,7 +12,7 @@ import os
import pytest
-from units.compat.mock import MagicMock
+from mock import MagicMock
from ansible.module_utils import basic
from ansible.module_utils.api import basic_auth_argument_spec, rate_limit_argument_spec, retry_argument_spec
from ansible.module_utils.common import warnings
@@ -709,3 +709,16 @@ def test_no_log_none(stdin, capfd):
# makes it into am.no_log_values. Instead we can check for the warning
# emitted by am._log_invocation.
assert len(get_warning_messages()) > 0
+
+
+@pytest.mark.parametrize("stdin", [{"pass": "testing"}], indirect=["stdin"])
+def test_no_log_alias(stdin, capfd):
+ """Given module parameters that use an alias for a parameter that matches
+ PASSWORD_MATCH and has no_log=True set, a warning should not be issued.
+ """
+ arg_spec = {
+ "other_pass": {"no_log": True, "aliases": ["pass"]},
+ }
+ am = basic.AnsibleModule(arg_spec)
+
+ assert len(get_warning_messages()) == 0
diff --git a/test/units/module_utils/basic/test_deprecate_warn.py b/test/units/module_utils/basic/test_deprecate_warn.py
index 7fd54ce0..581ba6da 100644
--- a/test/units/module_utils/basic/test_deprecate_warn.py
+++ b/test/units/module_utils/basic/test_deprecate_warn.py
@@ -71,7 +71,7 @@ def test_deprecate_without_list(am, capfd):
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
-def test_deprecate_without_list(am, capfd):
+def test_deprecate_without_list_version_date_not_set(am, capfd):
with pytest.raises(AssertionError) as ctx:
am.deprecate('Simple deprecation warning', date='', version='')
assert ctx.value.args[0] == "implementation error -- version and date must not both be set"
diff --git a/test/units/module_utils/basic/test_filesystem.py b/test/units/module_utils/basic/test_filesystem.py
index 37d1c553..92e2c46e 100644
--- a/test/units/module_utils/basic/test_filesystem.py
+++ b/test/units/module_utils/basic/test_filesystem.py
@@ -9,7 +9,7 @@ __metaclass__ = type
from units.mock.procenv import ModuleTestCase
-from units.compat.mock import patch, MagicMock
+from mock import patch, MagicMock
from ansible.module_utils.six.moves import builtins
realimport = builtins.__import__
@@ -134,3 +134,27 @@ class TestOtherFilesystem(ModuleTestCase):
with patch('os.lchown', side_effect=OSError) as m:
self.assertRaises(SystemExit, am.set_group_if_different, '/path/to/file', 'root', False)
+
+ def test_module_utils_basic_ansible_module_set_directory_attributes_if_different(self):
+ from ansible.module_utils import basic
+ basic._ANSIBLE_ARGS = None
+
+ am = basic.AnsibleModule(
+ argument_spec=dict(),
+ )
+
+ file_args = {
+ 'path': '/path/to/file',
+ 'mode': None,
+ 'owner': None,
+ 'group': None,
+ 'seuser': None,
+ 'serole': None,
+ 'setype': None,
+ 'selevel': None,
+ 'secontext': [None, None, None],
+ 'attributes': None,
+ }
+
+ self.assertEqual(am.set_directory_attributes_if_different(file_args, True), True)
+ self.assertEqual(am.set_directory_attributes_if_different(file_args, False), False)
diff --git a/test/units/module_utils/basic/test_get_module_path.py b/test/units/module_utils/basic/test_get_module_path.py
index 6ff4a3bc..2d0b8dd0 100644
--- a/test/units/module_utils/basic/test_get_module_path.py
+++ b/test/units/module_utils/basic/test_get_module_path.py
@@ -9,7 +9,7 @@ __metaclass__ = type
from units.mock.procenv import ModuleTestCase
-from units.compat.mock import patch
+from mock import patch
from ansible.module_utils.six.moves import builtins
realimport = builtins.__import__
diff --git a/test/units/module_utils/basic/test_imports.py b/test/units/module_utils/basic/test_imports.py
index d1a5f379..79ab971f 100644
--- a/test/units/module_utils/basic/test_imports.py
+++ b/test/units/module_utils/basic/test_imports.py
@@ -12,7 +12,7 @@ import sys
from units.mock.procenv import ModuleTestCase
from units.compat import unittest
-from units.compat.mock import patch
+from mock import patch
from ansible.module_utils.six.moves import builtins
realimport = builtins.__import__
diff --git a/test/units/module_utils/basic/test_platform_distribution.py b/test/units/module_utils/basic/test_platform_distribution.py
index 3c1afb7d..6579bee9 100644
--- a/test/units/module_utils/basic/test_platform_distribution.py
+++ b/test/units/module_utils/basic/test_platform_distribution.py
@@ -9,7 +9,7 @@ __metaclass__ = type
import pytest
-from units.compat.mock import patch
+from mock import patch
from ansible.module_utils.six.moves import builtins
diff --git a/test/units/module_utils/basic/test_selinux.py b/test/units/module_utils/basic/test_selinux.py
index 3a34d314..600ff6b3 100644
--- a/test/units/module_utils/basic/test_selinux.py
+++ b/test/units/module_utils/basic/test_selinux.py
@@ -11,7 +11,8 @@ import errno
import json
import pytest
-from ...compat.mock import mock_open, patch
+from mock import mock_open, patch
+
from ansible.module_utils import basic
from ansible.module_utils.common.text.converters import to_bytes
from ansible.module_utils.six.moves import builtins
diff --git a/test/units/module_utils/basic/test_set_cwd.py b/test/units/module_utils/basic/test_set_cwd.py
index 159236b7..77418601 100644
--- a/test/units/module_utils/basic/test_set_cwd.py
+++ b/test/units/module_utils/basic/test_set_cwd.py
@@ -13,7 +13,7 @@ import tempfile
import pytest
-from units.compat.mock import patch, MagicMock
+from mock import patch, MagicMock
from ansible.module_utils._text import to_bytes
from ansible.module_utils import basic
diff --git a/test/units/module_utils/basic/test_tmpdir.py b/test/units/module_utils/basic/test_tmpdir.py
index 818cb9b1..eec8f62c 100644
--- a/test/units/module_utils/basic/test_tmpdir.py
+++ b/test/units/module_utils/basic/test_tmpdir.py
@@ -13,7 +13,7 @@ import tempfile
import pytest
-from units.compat.mock import patch, MagicMock
+from mock import patch, MagicMock
from ansible.module_utils._text import to_bytes
from ansible.module_utils import basic
diff --git a/test/units/module_utils/common/arg_spec/test_aliases.py b/test/units/module_utils/common/arg_spec/test_aliases.py
index f4c96c74..1c1e243a 100644
--- a/test/units/module_utils/common/arg_spec/test_aliases.py
+++ b/test/units/module_utils/common/arg_spec/test_aliases.py
@@ -95,6 +95,11 @@ def test_aliases(arg_spec, parameters, expected, deprecation, warning):
assert isinstance(result, ValidationResult)
assert result.validated_parameters == expected
assert result.error_messages == []
+ assert result._aliases == {
+ alias: param
+ for param, value in arg_spec.items()
+ for alias in value.get("aliases", [])
+ }
if deprecation:
assert deprecation == result._deprecations[0]
diff --git a/test/units/module_utils/common/arg_spec/test_validate_invalid.py b/test/units/module_utils/common/arg_spec/test_validate_invalid.py
index 5384ee22..7302e8aa 100644
--- a/test/units/module_utils/common/arg_spec/test_validate_invalid.py
+++ b/test/units/module_utils/common/arg_spec/test_validate_invalid.py
@@ -100,6 +100,14 @@ INVALID_SPECS = [
{'req': None},
set(),
"missing required arguments: req"
+ ),
+ (
+ 'blank_values',
+ {'ch_param': {'elements': 'str', 'type': 'list', 'choices': ['a', 'b']}},
+ {'ch_param': ['']},
+ {'ch_param': ['']},
+ set(),
+ "value of ch_param must be one or more of"
)
]
diff --git a/test/units/module_utils/common/test_locale.py b/test/units/module_utils/common/test_locale.py
index 9d959860..f8fea476 100644
--- a/test/units/module_utils/common/test_locale.py
+++ b/test/units/module_utils/common/test_locale.py
@@ -5,7 +5,7 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
-from units.compat.mock import MagicMock
+from mock import MagicMock
from ansible.module_utils.common.locale import get_best_parsable_locale
diff --git a/test/units/module_utils/common/test_sys_info.py b/test/units/module_utils/common/test_sys_info.py
index 18aafe53..63101a81 100644
--- a/test/units/module_utils/common/test_sys_info.py
+++ b/test/units/module_utils/common/test_sys_info.py
@@ -9,7 +9,7 @@ __metaclass__ = type
import pytest
-from units.compat.mock import patch
+from mock import patch
from ansible.module_utils.six.moves import builtins
diff --git a/test/units/module_utils/common/validation/test_check_required_if.py b/test/units/module_utils/common/validation/test_check_required_if.py
index 5b4b7983..4189164a 100644
--- a/test/units/module_utils/common/validation/test_check_required_if.py
+++ b/test/units/module_utils/common/validation/test_check_required_if.py
@@ -53,7 +53,7 @@ def test_check_required_if_missing_multiple():
assert to_native(e.value) == expected
-def test_check_required_if_missing_multiple():
+def test_check_required_if_missing_multiple_with_context():
arguments_terms = [["state", "present", ("path", "owner")]]
params = {
"state": "present",
diff --git a/test/units/module_utils/facts/base.py b/test/units/module_utils/facts/base.py
index 33d3087b..23e620cb 100644
--- a/test/units/module_utils/facts/base.py
+++ b/test/units/module_utils/facts/base.py
@@ -20,7 +20,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat import unittest
-from units.compat.mock import Mock, patch
+from mock import Mock, patch
class BaseFactsTest(unittest.TestCase):
diff --git a/test/units/module_utils/facts/hardware/linux_data.py b/test/units/module_utils/facts/hardware/linux_data.py
index 8e056769..3879188d 100644
--- a/test/units/module_utils/facts/hardware/linux_data.py
+++ b/test/units/module_utils/facts/hardware/linux_data.py
@@ -583,3 +583,51 @@ CPU_INFO_TEST_SCENARIOS = [
},
},
]
+
+SG_INQ_OUTPUTS = ["""
+Identify controller for /dev/nvme0n1:
+ Model number: Amazon Elastic Block Store
+ Serial number: vol0123456789
+ Firmware revision: 1.0
+ Version: 0.0
+ No optional admin command support
+ No optional NVM command support
+ PCI vendor ID VID/SSVID: 0x1d0f/0x1d0f
+ IEEE OUI Identifier: 0xa002dc
+ Controller ID: 0x0
+ Number of namespaces: 1
+ Maximum data transfer size: 64 pages
+ Namespace 1 (deduced from device name):
+ Namespace size/capacity: 62914560/62914560 blocks
+ Namespace utilization: 0 blocks
+ Number of LBA formats: 1
+ Index LBA size: 0
+ LBA format 0 support: <-- active
+ Logical block size: 512 bytes
+ Approximate namespace size: 32 GB
+ Metadata size: 0 bytes
+ Relative performance: Best [0x0]
+""", """
+Identify controller for /dev/nvme0n1:
+ Model number: Amazon Elastic Block Store
+ Unit serial number: vol0123456789
+ Firmware revision: 1.0
+ Version: 0.0
+ No optional admin command support
+ No optional NVM command support
+ PCI vendor ID VID/SSVID: 0x1d0f/0x1d0f
+ IEEE OUI Identifier: 0xa002dc
+ Controller ID: 0x0
+ Number of namespaces: 1
+ Maximum data transfer size: 64 pages
+ Namespace 1 (deduced from device name):
+ Namespace size/capacity: 62914560/62914560 blocks
+ Namespace utilization: 0 blocks
+ Number of LBA formats: 1
+ Index LBA size: 0
+ LBA format 0 support: <-- active
+ Logical block size: 512 bytes
+ Approximate namespace size: 32 GB
+ Metadata size: 0 bytes
+ Relative performance: Best [0x0]
+"""]
diff --git a/test/units/module_utils/facts/hardware/test_linux.py b/test/units/module_utils/facts/hardware/test_linux.py
index 6e77683a..1d584593 100644
--- a/test/units/module_utils/facts/hardware/test_linux.py
+++ b/test/units/module_utils/facts/hardware/test_linux.py
@@ -19,13 +19,13 @@ __metaclass__ = type
import os
from units.compat import unittest
-from units.compat.mock import Mock, patch
+from mock import Mock, patch
from ansible.module_utils.facts import timeout
from ansible.module_utils.facts.hardware import linux
-from . linux_data import LSBLK_OUTPUT, LSBLK_OUTPUT_2, LSBLK_UUIDS, MTAB, MTAB_ENTRIES, BIND_MOUNTS, STATVFS_INFO, UDEVADM_UUID, UDEVADM_OUTPUT
+from . linux_data import LSBLK_OUTPUT, LSBLK_OUTPUT_2, LSBLK_UUIDS, MTAB, MTAB_ENTRIES, BIND_MOUNTS, STATVFS_INFO, UDEVADM_UUID, UDEVADM_OUTPUT, SG_INQ_OUTPUTS
with open(os.path.join(os.path.dirname(__file__), '../fixtures/findmount_output.txt')) as f:
FINDMNT_OUTPUT = f.read()
@@ -173,3 +173,26 @@ class TestFactsLinuxHardwareGetMountFacts(unittest.TestCase):
udevadm_uuid = lh._udevadm_uuid('mock_device')
self.assertEqual(udevadm_uuid, '57b1a3e7-9019-4747-9809-7ec52bba9179')
+
+ def test_get_sg_inq_serial(self):
+ # Valid outputs
+ for sq_inq_output in SG_INQ_OUTPUTS:
+ module = Mock()
+ module.run_command = Mock(return_value=(0, sq_inq_output, '')) # (rc, out, err)
+ lh = linux.LinuxHardware(module=module, load_on_init=False)
+ sg_inq_serial = lh._get_sg_inq_serial('/usr/bin/sg_inq', 'nvme0n1')
+ self.assertEqual(sg_inq_serial, 'vol0123456789')
+
+ # Invalid output
+ module = Mock()
+ module.run_command = Mock(return_value=(0, '', '')) # (rc, out, err)
+ lh = linux.LinuxHardware(module=module, load_on_init=False)
+ sg_inq_serial = lh._get_sg_inq_serial('/usr/bin/sg_inq', 'nvme0n1')
+ self.assertEqual(sg_inq_serial, None)
+
+ # Non zero rc
+ module = Mock()
+ module.run_command = Mock(return_value=(42, '', 'Error 42')) # (rc, out, err)
+ lh = linux.LinuxHardware(module=module, load_on_init=False)
+ sg_inq_serial = lh._get_sg_inq_serial('/usr/bin/sg_inq', 'nvme0n1')
+ self.assertEqual(sg_inq_serial, None)
diff --git a/test/units/module_utils/facts/network/test_fc_wwn.py b/test/units/module_utils/facts/network/test_fc_wwn.py
index 32a3a43d..27d45234 100644
--- a/test/units/module_utils/facts/network/test_fc_wwn.py
+++ b/test/units/module_utils/facts/network/test_fc_wwn.py
@@ -6,7 +6,7 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils.facts.network import fc_wwn
-from units.compat.mock import Mock
+from mock import Mock
# AIX lsdev
diff --git a/test/units/module_utils/facts/network/test_generic_bsd.py b/test/units/module_utils/facts/network/test_generic_bsd.py
index afb698c5..79cc4815 100644
--- a/test/units/module_utils/facts/network/test_generic_bsd.py
+++ b/test/units/module_utils/facts/network/test_generic_bsd.py
@@ -18,7 +18,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from units.compat.mock import Mock
+from mock import Mock
from units.compat import unittest
from ansible.module_utils.facts.network import generic_bsd
diff --git a/test/units/module_utils/facts/network/test_iscsi_get_initiator.py b/test/units/module_utils/facts/network/test_iscsi_get_initiator.py
index 2048ba2a..78e5c960 100644
--- a/test/units/module_utils/facts/network/test_iscsi_get_initiator.py
+++ b/test/units/module_utils/facts/network/test_iscsi_get_initiator.py
@@ -6,7 +6,7 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils.facts.network import iscsi
-from units.compat.mock import Mock
+from mock import Mock
# AIX # lsattr -E -l iscsi0
diff --git a/test/units/module_utils/facts/other/test_facter.py b/test/units/module_utils/facts/other/test_facter.py
index 7466338e..517265d3 100644
--- a/test/units/module_utils/facts/other/test_facter.py
+++ b/test/units/module_utils/facts/other/test_facter.py
@@ -19,7 +19,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from units.compat.mock import Mock, patch
+from mock import Mock, patch
from .. base import BaseFactsTest
diff --git a/test/units/module_utils/facts/other/test_ohai.py b/test/units/module_utils/facts/other/test_ohai.py
index 42a72d97..38fb67f4 100644
--- a/test/units/module_utils/facts/other/test_ohai.py
+++ b/test/units/module_utils/facts/other/test_ohai.py
@@ -19,7 +19,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from units.compat.mock import Mock, patch
+from mock import Mock, patch
from .. base import BaseFactsTest
diff --git a/test/units/module_utils/facts/system/distribution/conftest.py b/test/units/module_utils/facts/system/distribution/conftest.py
index d27b97f0..0282a7fc 100644
--- a/test/units/module_utils/facts/system/distribution/conftest.py
+++ b/test/units/module_utils/facts/system/distribution/conftest.py
@@ -8,7 +8,7 @@ __metaclass__ = type
import pytest
-from units.compat.mock import Mock
+from mock import Mock
@pytest.fixture
diff --git a/test/units/module_utils/facts/system/distribution/fixtures/deepin_20.4.json b/test/units/module_utils/facts/system/distribution/fixtures/deepin_20.4.json
new file mode 100644
index 00000000..ca5d50dd
--- /dev/null
+++ b/test/units/module_utils/facts/system/distribution/fixtures/deepin_20.4.json
@@ -0,0 +1,29 @@
+{
+ "name": "Deepin 20.4",
+ "distro": {
+ "codename": "apricot",
+ "id": "Deepin",
+ "name": "Deepin",
+ "version": "20.4",
+ "version_best": "20.4",
+ "os_release_info": {},
+ "lsb_release_info": {}
+ },
+ "input": {
+ "/etc/os-release": "PRETTY_NAME=\"Deepin 20.4\"\nNAME=\"Deepin\"\nVERSION_ID=\"20.4\"\nVERSION=\"20.4\"\nVERSION_CODENAME=\"apricot\"\nID=Deepin\nHOME_URL=\"https://www.deepin.org/\"\nBUG_REPORT_URL=\"https://bbs.deepin.org/\"\n",
+ "/etc/lsb-release": "DISTRIB_ID=Deepin\nDISTRIB_RELEASE=20.4\nDISTRIB_DESCRIPTION=\"Deepin 20.4\"\nDISTRIB_CODENAME=apricot\n",
+ "/usr/lib/os-release": "PRETTY_NAME=\"Deepin 20.4\"\nNAME=\"Deepin\"\nVERSION_ID=\"20.4\"\nVERSION=\"20.4\"\nVERSION_CODENAME=\"apricot\"\nID=Deepin\nHOME_URL=\"https://www.deepin.org/\"\nBUG_REPORT_URL=\"https://bbs.deepin.org/\"\n"
+ },
+ "platform.dist": [
+ "Deepin",
+ "20.4",
+ "apricot"
+ ],
+ "result": {
+ "distribution": "Deepin",
+ "distribution_version": "20.4",
+ "distribution_release": "apricot",
+ "distribution_major_version": "20",
+ "os_family": "Debian"
+ }
+}
diff --git a/test/units/module_utils/facts/system/distribution/fixtures/eurolinux_8.5.json b/test/units/module_utils/facts/system/distribution/fixtures/eurolinux_8.5.json
new file mode 100644
index 00000000..add1b731
--- /dev/null
+++ b/test/units/module_utils/facts/system/distribution/fixtures/eurolinux_8.5.json
@@ -0,0 +1,46 @@
+{
+ "name": "EuroLinux 8.5",
+ "distro": {
+ "codename": "Tirana",
+ "id": "eurolinux",
+ "name": "EuroLinux",
+ "version": "8.5",
+ "version_best": "8.5",
+ "lsb_release_info": {},
+ "os_release_info": {
+ "name": "EuroLinux",
+ "version": "8.5 (Tirana)",
+ "id": "eurolinux",
+ "id_like": "rhel fedora centos",
+ "version_id": "8.5",
+ "platform_id": "platform:el8",
+ "pretty_name": "EuroLinux 8.5 (Tirana)",
+ "ansi_color": "0;34",
+ "cpe_name": "cpe:/o:eurolinux:eurolinux:8",
+ "home_url": "https://www.euro-linux.com/",
+ "bug_report_url": "https://github.com/EuroLinux/eurolinux-distro-bugs-and-rfc/",
+ "redhat_support_product": "EuroLinux",
+ "redhat_support_product_version": "8",
+ "codename": "Tirana"
+ }
+ },
+ "input": {
+ "/etc/redhat-release": "EuroLinux release 8.5 (Tirana) \n",
+ "/etc/system-release": "EuroLinux release 8.5 (Tirana) \n",
+ "/etc/os-release": "NAME=\"EuroLinux\"\nVERSION=\"8.5 (Tirana)\"\nID=\"eurolinux\"\nID_LIKE=\"rhel fedora centos\"\nVERSION_ID=\"8.5\"\nPLATFORM_ID=\"platform:el8\"\nPRETTY_NAME=\"EuroLinux 8.5 (Tirana)\"\nANSI_COLOR=\"0;34\"\nCPE_NAME=\"cpe:/o:eurolinux:eurolinux:8\"\nHOME_URL=\"https://www.euro-linux.com/\"\nBUG_REPORT_URL=\"https://github.com/EuroLinux/eurolinux-distro-bugs-and-rfc/\"\nREDHAT_SUPPORT_PRODUCT=\"EuroLinux\"\nREDHAT_SUPPORT_PRODUCT_VERSION=\"8\"\n",
+ "/usr/lib/os-release": "NAME=\"EuroLinux\"\nVERSION=\"8.5 (Tirana)\"\nID=\"eurolinux\"\nID_LIKE=\"rhel fedora centos\"\nVERSION_ID=\"8.5\"\nPLATFORM_ID=\"platform:el8\"\nPRETTY_NAME=\"EuroLinux 8.5 (Tirana)\"\nANSI_COLOR=\"0;34\"\nCPE_NAME=\"cpe:/o:eurolinux:eurolinux:8\"\nHOME_URL=\"https://www.euro-linux.com/\"\nBUG_REPORT_URL=\"https://github.com/EuroLinux/eurolinux-distro-bugs-and-rfc/\"\nREDHAT_SUPPORT_PRODUCT=\"EuroLinux\"\nREDHAT_SUPPORT_PRODUCT_VERSION=\"8\"\n"
+ },
+ "platform.dist": [
+ "eurolinux",
+ "8.5",
+ "Tirana"
+ ],
+ "result": {
+ "distribution": "EuroLinux",
+ "distribution_version": "8.5",
+ "distribution_release": "Tirana",
+ "distribution_major_version": "8",
+ "os_family": "RedHat"
+ },
+ "platform.release": "4.18.0-348.2.1.el8_5.x86_64"
+}
diff --git a/test/units/module_utils/facts/system/distribution/fixtures/tencentos_3_1.json b/test/units/module_utils/facts/system/distribution/fixtures/tencentos_3_1.json
new file mode 100644
index 00000000..f1051dd6
--- /dev/null
+++ b/test/units/module_utils/facts/system/distribution/fixtures/tencentos_3_1.json
@@ -0,0 +1,50 @@
+{
+ "name": "TencentOS 3.1",
+ "distro": {
+ "codename": "Final",
+ "id": "tencentos",
+ "name": "TencentOS Server",
+ "version": "3.1",
+ "version_best": "3.1",
+ "lsb_release_info": {},
+ "os_release_info": {
+ "name": "TencentOS Server",
+ "version": "3.1 (Final)",
+ "id": "tencentos",
+ "id_like": "rhel fedora centos",
+ "version_id": "3.1",
+ "platform_id": "platform:el8",
+ "pretty_name": "TencentOS Server 3.1 (Final)",
+ "ansi_color": "0;31",
+ "cpe_name": "cpe:/o:tencentos:tencentos:3",
+ "home_url": "https://tlinux.qq.com/",
+ "bug_report_url": "https://tlinux.qq.com/",
+ "centos_mantisbt_project": "CentOS-8",
+ "centos_mantisbt_project_version": "8",
+ "redhat_support_product": "centos",
+ "redhat_support_product_version": "8",
+ "name_orig": "CentOS Linux",
+ "codename": "Final"
+ }
+ },
+ "input": {
+ "/etc/centos-release": "NAME=\"TencentOS Server\"\nVERSION=\"3.1 (Final)\"\nID=\"tencentos\"\nID_LIKE=\"rhel fedora centos\"\nVERSION_ID=\"3.1\"\nPLATFORM_ID=\"platform:el8\"\nPRETTY_NAME=\"TencentOS Server 3.1 (Final)\"\nANSI_COLOR=\"0;31\"\nCPE_NAME=\"cpe:/o:tencentos:tencentos:3\"\nHOME_URL=\"https://tlinux.qq.com/\"\nBUG_REPORT_URL=\"https://tlinux.qq.com/\"\n\nCENTOS_MANTISBT_PROJECT=\"CentOS-8\"\nCENTOS_MANTISBT_PROJECT_VERSION=\"8\"\nREDHAT_SUPPORT_PRODUCT=\"centos\"\nREDHAT_SUPPORT_PRODUCT_VERSION=\"8\"\nNAME_ORIG=\"CentOS Linux\"\n",
+ "/etc/redhat-release": "CentOS Linux release 8.4.2105 (Core)\n",
+ "/etc/system-release": "NAME=\"TencentOS Server\"\nVERSION=\"3.1 (Final)\"\nID=\"tencentos\"\nID_LIKE=\"rhel fedora centos\"\nVERSION_ID=\"3.1\"\nPLATFORM_ID=\"platform:el8\"\nPRETTY_NAME=\"TencentOS Server 3.1 (Final)\"\nANSI_COLOR=\"0;31\"\nCPE_NAME=\"cpe:/o:tencentos:tencentos:3\"\nHOME_URL=\"https://tlinux.qq.com/\"\nBUG_REPORT_URL=\"https://tlinux.qq.com/\"\n\nCENTOS_MANTISBT_PROJECT=\"CentOS-8\"\nCENTOS_MANTISBT_PROJECT_VERSION=\"8\"\nREDHAT_SUPPORT_PRODUCT=\"centos\"\nREDHAT_SUPPORT_PRODUCT_VERSION=\"8\"\nNAME_ORIG=\"CentOS Linux\"\n",
+ "/etc/os-release": "NAME=\"TencentOS Server\"\nVERSION=\"3.1 (Final)\"\nID=\"tencentos\"\nID_LIKE=\"rhel fedora centos\"\nVERSION_ID=\"3.1\"\nPLATFORM_ID=\"platform:el8\"\nPRETTY_NAME=\"TencentOS Server 3.1 (Final)\"\nANSI_COLOR=\"0;31\"\nCPE_NAME=\"cpe:/o:tencentos:tencentos:3\"\nHOME_URL=\"https://tlinux.qq.com/\"\nBUG_REPORT_URL=\"https://tlinux.qq.com/\"\n\nCENTOS_MANTISBT_PROJECT=\"CentOS-8\"\nCENTOS_MANTISBT_PROJECT_VERSION=\"8\"\nREDHAT_SUPPORT_PRODUCT=\"centos\"\nREDHAT_SUPPORT_PRODUCT_VERSION=\"8\"\nNAME_ORIG=\"CentOS Linux\"\n",
+ "/usr/lib/os-release": "NAME=\"CentOS Linux\"\nVERSION=\"8\"\nID=\"centos\"\nID_LIKE=\"rhel fedora\"\nVERSION_ID=\"8\"\nPLATFORM_ID=\"platform:el8\"\nPRETTY_NAME=\"CentOS Linux 8\"\nANSI_COLOR=\"0;31\"\nCPE_NAME=\"cpe:/o:centos:centos:8\"\nHOME_URL=\"https://centos.org/\"\nBUG_REPORT_URL=\"https://bugs.centos.org/\"\nCENTOS_MANTISBT_PROJECT=\"CentOS-8\"\nCENTOS_MANTISBT_PROJECT_VERSION=\"8\"\n"
+ },
+ "platform.dist": [
+ "tencentos",
+ "3.1",
+ "Final"
+ ],
+ "result": {
+ "distribution": "TencentOS",
+ "distribution_version": "3.1",
+ "distribution_release": "Final",
+ "distribution_major_version": "3",
+ "os_family": "RedHat"
+ },
+ "platform.release": "5.4.32-19-0001"
+} \ No newline at end of file
diff --git a/test/units/module_utils/facts/system/distribution/fixtures/uos_20.json b/test/units/module_utils/facts/system/distribution/fixtures/uos_20.json
new file mode 100644
index 00000000..d51f62de
--- /dev/null
+++ b/test/units/module_utils/facts/system/distribution/fixtures/uos_20.json
@@ -0,0 +1,29 @@
+{
+ "name": "Uos 20",
+ "distro": {
+ "codename": "fou",
+ "id": "Uos",
+ "name": "Uos",
+ "version": "20",
+ "version_best": "20",
+ "os_release_info": {},
+ "lsb_release_info": {}
+ },
+ "input": {
+ "/etc/os-release": "PRETTY_NAME=\"UnionTech OS Server 20\"\nNAME=\"UnionTech OS Server 20\"\nVERSION_ID=\"20\"\nVERSION=\"20\"\nID=UOS\nHOME_URL=\"https://www.chinauos.com/\"\nBUG_REPORT_URL=\"https://bbs.chinauos.com/\"\nVERSION_CODENAME=fou",
+ "/etc/lsb-release": "DISTRIB_ID=uos\nDISTRIB_RELEASE=20\nDISTRIB_DESCRIPTION=\"UnionTech OS Server 20\"\nDISTRIB_CODENAME=fou\n",
+ "/usr/lib/os-release": "PRETTY_NAME=\"UnionTech OS Server 20\"\nNAME=\"UnionTech OS Server 20\"\nVERSION_ID=\"20\"\nVERSION=\"20\"\nID=UOS\nHOME_URL=\"https://www.chinauos.com/\"\nBUG_REPORT_URL=\"https://bbs.chinauos.com/\"\nVERSION_CODENAME=fou"
+ },
+ "platform.dist": [
+ "uos",
+ "20",
+ "fou"
+ ],
+ "result": {
+ "distribution": "Uos",
+ "distribution_version": "20",
+ "distribution_release": "fou",
+ "distribution_major_version": "20",
+ "os_family": "Debian"
+ }
+}
diff --git a/test/units/module_utils/facts/system/test_lsb.py b/test/units/module_utils/facts/system/test_lsb.py
index e2ed2ec0..890bddb6 100644
--- a/test/units/module_utils/facts/system/test_lsb.py
+++ b/test/units/module_utils/facts/system/test_lsb.py
@@ -19,7 +19,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from units.compat.mock import Mock, patch
+from mock import Mock, patch
from .. base import BaseFactsTest
diff --git a/test/units/module_utils/facts/test_ansible_collector.py b/test/units/module_utils/facts/test_ansible_collector.py
index 47d88df9..e1d60c3d 100644
--- a/test/units/module_utils/facts/test_ansible_collector.py
+++ b/test/units/module_utils/facts/test_ansible_collector.py
@@ -21,7 +21,7 @@ __metaclass__ = type
# for testing
from units.compat import unittest
-from units.compat.mock import Mock, patch
+from mock import Mock, patch
from ansible.module_utils.facts import collector
from ansible.module_utils.facts import ansible_collector
diff --git a/test/units/module_utils/facts/test_collector.py b/test/units/module_utils/facts/test_collector.py
index 9eab89f7..4fc4bc5f 100644
--- a/test/units/module_utils/facts/test_collector.py
+++ b/test/units/module_utils/facts/test_collector.py
@@ -265,12 +265,12 @@ class TestGetCollectorNames(unittest.TestCase):
valid_subsets = frozenset(['my_fact', 'something_else'])
minimal_gather_subset = frozenset(['my_fact'])
- self.assertRaisesRegexp(TypeError,
- r'Bad subset .* given to Ansible.*allowed\:.*all,.*my_fact.*',
- collector.get_collector_names,
- valid_subsets=valid_subsets,
- minimal_gather_subset=minimal_gather_subset,
- gather_subset=['my_fact', 'not_a_valid_gather_subset'])
+ self.assertRaisesRegex(TypeError,
+ r'Bad subset .* given to Ansible.*allowed\:.*all,.*my_fact.*',
+ collector.get_collector_names,
+ valid_subsets=valid_subsets,
+ minimal_gather_subset=minimal_gather_subset,
+ gather_subset=['my_fact', 'not_a_valid_gather_subset'])
class TestFindUnresolvedRequires(unittest.TestCase):
@@ -349,10 +349,10 @@ class TestResolveRequires(unittest.TestCase):
all_fact_subsets = {'env': [default_collectors.EnvFactCollector],
'network': [default_collectors.LinuxNetworkCollector],
'virtual': [default_collectors.LinuxVirtualCollector]}
- self.assertRaisesRegexp(collector.UnresolvedFactDep,
- 'unresolved fact dep.*required_thing2',
- collector.resolve_requires,
- unresolved, all_fact_subsets)
+ self.assertRaisesRegex(collector.UnresolvedFactDep,
+ 'unresolved fact dep.*required_thing2',
+ collector.resolve_requires,
+ unresolved, all_fact_subsets)
def test(self):
unresolved = ['env', 'network']
@@ -556,8 +556,8 @@ class TestCollectorClassesFromGatherSubset(unittest.TestCase):
def test_unknown_collector(self):
# something claims 'unknown_collector' is a valid gather_subset, but there is
# no FactCollector mapped to 'unknown_collector'
- self.assertRaisesRegexp(TypeError,
- r'Bad subset.*unknown_collector.*given to Ansible.*allowed\:.*all,.*env.*',
- self._classes,
- all_collector_classes=default_collectors.collectors,
- gather_subset=['env', 'unknown_collector'])
+ self.assertRaisesRegex(TypeError,
+ r'Bad subset.*unknown_collector.*given to Ansible.*allowed\:.*all,.*env.*',
+ self._classes,
+ all_collector_classes=default_collectors.collectors,
+ gather_subset=['env', 'unknown_collector'])
diff --git a/test/units/module_utils/facts/test_collectors.py b/test/units/module_utils/facts/test_collectors.py
index 5492582b..a6f12b56 100644
--- a/test/units/module_utils/facts/test_collectors.py
+++ b/test/units/module_utils/facts/test_collectors.py
@@ -19,7 +19,9 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from units.compat.mock import Mock, patch
+import pytest
+
+from mock import Mock, patch
from . base import BaseFactsTest
@@ -369,7 +371,8 @@ class TestServiceMgrFacts(BaseFactsTest):
@patch('ansible.module_utils.facts.system.service_mgr.ServiceMgrFactCollector.is_systemd_managed', return_value=False)
@patch('ansible.module_utils.facts.system.service_mgr.ServiceMgrFactCollector.is_systemd_managed_offline', return_value=False)
@patch('ansible.module_utils.facts.system.service_mgr.os.path.exists', return_value=False)
- def test_service_mgr_runit(self, mock_gfc, mock_ism, mock_ismo, mock_ope):
+ @pytest.mark.skip(reason='faulty test')
+ def test_service_mgr_runit_one(self, mock_gfc, mock_ism, mock_ismo, mock_ope):
# no /proc/1/comm, ps returns non-0
# should fallback to 'service'
module = self._mock_module()
@@ -394,7 +397,8 @@ class TestServiceMgrFacts(BaseFactsTest):
@patch('ansible.module_utils.facts.system.service_mgr.ServiceMgrFactCollector.is_systemd_managed', return_value=False)
@patch('ansible.module_utils.facts.system.service_mgr.ServiceMgrFactCollector.is_systemd_managed_offline', return_value=False)
@patch('ansible.module_utils.facts.system.service_mgr.os.path.exists', return_value=False)
- def test_service_mgr_runit(self, mock_gfc, mock_ism, mock_ismo, mock_ope):
+ @pytest.mark.skip(reason='faulty test')
+ def test_service_mgr_runit_two(self, mock_gfc, mock_ism, mock_ismo, mock_ope):
# no /proc/1/comm, ps fails, distro and system are clowncar
# should end up return 'sys11'
module = self._mock_module()
diff --git a/test/units/module_utils/facts/test_facts.py b/test/units/module_utils/facts/test_facts.py
index c794f031..a49616fc 100644
--- a/test/units/module_utils/facts/test_facts.py
+++ b/test/units/module_utils/facts/test_facts.py
@@ -26,7 +26,7 @@ import pytest
# for testing
from units.compat import unittest
-from units.compat.mock import Mock, patch
+from mock import Mock, patch
from ansible.module_utils import facts
from ansible.module_utils.facts import hardware
diff --git a/test/units/module_utils/facts/test_sysctl.py b/test/units/module_utils/facts/test_sysctl.py
index c369b610..66336925 100644
--- a/test/units/module_utils/facts/test_sysctl.py
+++ b/test/units/module_utils/facts/test_sysctl.py
@@ -26,7 +26,7 @@ import pytest
# for testing
from units.compat import unittest
-from units.compat.mock import patch, MagicMock, mock_open, Mock
+from mock import patch, MagicMock, mock_open, Mock
from ansible.module_utils.facts.sysctl import get_sysctl
diff --git a/test/units/module_utils/facts/test_utils.py b/test/units/module_utils/facts/test_utils.py
index 28cb5d31..70db0475 100644
--- a/test/units/module_utils/facts/test_utils.py
+++ b/test/units/module_utils/facts/test_utils.py
@@ -18,7 +18,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat import unittest
-from units.compat.mock import patch
+from mock import patch
from ansible.module_utils.facts import utils
diff --git a/test/units/module_utils/urls/test_RedirectHandlerFactory.py b/test/units/module_utils/urls/test_RedirectHandlerFactory.py
index aa3500a1..7bbe4b5b 100644
--- a/test/units/module_utils/urls/test_RedirectHandlerFactory.py
+++ b/test/units/module_utils/urls/test_RedirectHandlerFactory.py
@@ -130,9 +130,11 @@ def test_redir_validate_certs(urllib_req, request_body, mocker):
assert opener_mock.add_handler.call_count == int(not HAS_SSLCONTEXT)
-def test_redir_http_error_308_urllib2(urllib_req, request_body):
+def test_redir_http_error_308_urllib2(urllib_req, request_body, mocker):
+ redir_mock = mocker.patch.object(urllib_request.HTTPRedirectHandler, 'redirect_request')
handler = RedirectHandlerFactory('urllib2', False)
inst = handler()
- with pytest.raises(urllib_error.HTTPError):
- inst.redirect_request(urllib_req, request_body, 308, '308 Permanent Redirect', {}, 'https://docs.ansible.com/')
+ inst.redirect_request(urllib_req, request_body, 308, '308 Permanent Redirect', {}, 'https://docs.ansible.com/')
+
+ assert redir_mock.call_count == 1
diff --git a/test/units/modules/test_apt.py b/test/units/modules/test_apt.py
index 3daf3c11..78dbbade 100644
--- a/test/units/modules/test_apt.py
+++ b/test/units/modules/test_apt.py
@@ -4,7 +4,8 @@ __metaclass__ = type
import collections
import sys
-from units.compat import mock
+import mock
+
from units.compat import unittest
try:
diff --git a/test/units/modules/test_apt_key.py b/test/units/modules/test_apt_key.py
index e348db0c..39339d76 100644
--- a/test/units/modules/test_apt_key.py
+++ b/test/units/modules/test_apt_key.py
@@ -3,7 +3,8 @@ __metaclass__ = type
import os
-from units.compat import mock
+import mock
+
from units.compat import unittest
from ansible.modules import apt_key
diff --git a/test/units/modules/test_async_wrapper.py b/test/units/modules/test_async_wrapper.py
index 37b1fda3..eacb9361 100644
--- a/test/units/modules/test_async_wrapper.py
+++ b/test/units/modules/test_async_wrapper.py
@@ -11,7 +11,7 @@ import tempfile
import pytest
-from units.compat.mock import patch, MagicMock
+from mock import patch, MagicMock
from ansible.modules import async_wrapper
from pprint import pprint
diff --git a/test/units/modules/test_hostname.py b/test/units/modules/test_hostname.py
index 2771293e..804ecf74 100644
--- a/test/units/modules/test_hostname.py
+++ b/test/units/modules/test_hostname.py
@@ -1,7 +1,11 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from units.compat.mock import patch, MagicMock, mock_open
+import os
+import shutil
+import tempfile
+
+from mock import patch, MagicMock, mock_open
from ansible.module_utils import basic
from ansible.module_utils.common._utils import get_all_subclasses
from ansible.modules import hostname
@@ -33,3 +37,111 @@ class TestHostname(ModuleTestCase):
self.assertFalse(
m.return_value.write.called,
msg='%s called write, should not have' % str(cls))
+
+ def test_all_named_strategies_exist(self):
+ """Loop through the STRATS and see if anything is missing."""
+ for _name, prefix in hostname.STRATS.items():
+ classname = "%sStrategy" % prefix
+ cls = getattr(hostname, classname, None)
+
+ if cls is None:
+ self.assertFalse(
+ cls is None, "%s is None, should be a subclass" % classname
+ )
+ else:
+ self.assertTrue(issubclass(cls, hostname.BaseStrategy))
+
+
+class TestRedhatStrategy(ModuleTestCase):
+ def setUp(self):
+ super(TestRedhatStrategy, self).setUp()
+ self.testdir = tempfile.mkdtemp(prefix='ansible-test-hostname-')
+ self.network_file = os.path.join(self.testdir, "network")
+
+ def tearDown(self):
+ super(TestRedhatStrategy, self).tearDown()
+ shutil.rmtree(self.testdir, ignore_errors=True)
+
+ @property
+ def instance(self):
+ self.module = MagicMock()
+ instance = hostname.RedHatStrategy(self.module)
+ instance.NETWORK_FILE = self.network_file
+ return instance
+
+ def test_get_permanent_hostname_missing(self):
+ self.assertIsNone(self.instance.get_permanent_hostname())
+ self.assertTrue(self.module.fail_json.called)
+ self.module.fail_json.assert_called_with(
+ "Unable to locate HOSTNAME entry in %s" % self.network_file
+ )
+
+ def test_get_permanent_hostname_line_missing(self):
+ with open(self.network_file, "w") as f:
+ f.write("# some other content\n")
+ self.assertIsNone(self.instance.get_permanent_hostname())
+ self.module.fail_json.assert_called_with(
+ "Unable to locate HOSTNAME entry in %s" % self.network_file
+ )
+
+ def test_get_permanent_hostname_existing(self):
+ with open(self.network_file, "w") as f:
+ f.write(
+ "some other content\n"
+ "HOSTNAME=foobar\n"
+ "more content\n"
+ )
+ self.assertEqual(self.instance.get_permanent_hostname(), "foobar")
+
+ def test_get_permanent_hostname_existing_whitespace(self):
+ with open(self.network_file, "w") as f:
+ f.write(
+ "some other content\n"
+ " HOSTNAME=foobar \n"
+ "more content\n"
+ )
+ self.assertEqual(self.instance.get_permanent_hostname(), "foobar")
+
+ def test_set_permanent_hostname_missing(self):
+ self.instance.set_permanent_hostname("foobar")
+ with open(self.network_file) as f:
+ self.assertEqual(f.read(), "HOSTNAME=foobar\n")
+
+ def test_set_permanent_hostname_line_missing(self):
+ with open(self.network_file, "w") as f:
+ f.write("# some other content\n")
+ self.instance.set_permanent_hostname("foobar")
+ with open(self.network_file) as f:
+ self.assertEqual(f.read(), "# some other content\nHOSTNAME=foobar\n")
+
+ def test_set_permanent_hostname_existing(self):
+ with open(self.network_file, "w") as f:
+ f.write(
+ "some other content\n"
+ "HOSTNAME=spam\n"
+ "more content\n"
+ )
+ self.instance.set_permanent_hostname("foobar")
+ with open(self.network_file) as f:
+ self.assertEqual(
+ f.read(),
+ "some other content\n"
+ "HOSTNAME=foobar\n"
+ "more content\n"
+ )
+
+ def test_set_permanent_hostname_existing_whitespace(self):
+ with open(self.network_file, "w") as f:
+ f.write(
+ "some other content\n"
+ " HOSTNAME=spam \n"
+ "more content\n"
+ )
+ self.instance.set_permanent_hostname("foobar")
+ with open(self.network_file) as f:
+ self.assertEqual(
+ f.read(),
+ "some other content\n"
+ "HOSTNAME=foobar\n"
+ "more content\n"
+ )
diff --git a/test/units/modules/test_iptables.py b/test/units/modules/test_iptables.py
index 5a55434f..5953334b 100644
--- a/test/units/modules/test_iptables.py
+++ b/test/units/modules/test_iptables.py
@@ -1,7 +1,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from units.compat.mock import patch
+from mock import patch
from ansible.module_utils import basic
from ansible.modules import iptables
from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
@@ -171,8 +171,8 @@ class TestIptables(ModuleTestCase):
})
commands_results = [
- (1, '', ''),
- (0, '', '')
+ (1, '', ''), # check_rule_present
+ (0, '', ''), # check_chain_present
]
with patch.object(basic.AnsibleModule, 'run_command') as run_command:
@@ -181,7 +181,7 @@ class TestIptables(ModuleTestCase):
iptables.main()
self.assertTrue(result.exception.args[0]['changed'])
- self.assertEqual(run_command.call_count, 1)
+ self.assertEqual(run_command.call_count, 2)
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t',
@@ -207,8 +207,9 @@ class TestIptables(ModuleTestCase):
})
commands_results = [
- (1, '', ''),
- (0, '', '')
+ (1, '', ''), # check_rule_present
+ (0, '', ''), # check_chain_present
+ (0, '', ''),
]
with patch.object(basic.AnsibleModule, 'run_command') as run_command:
@@ -217,7 +218,7 @@ class TestIptables(ModuleTestCase):
iptables.main()
self.assertTrue(result.exception.args[0]['changed'])
- self.assertEqual(run_command.call_count, 2)
+ self.assertEqual(run_command.call_count, 3)
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t',
@@ -231,7 +232,7 @@ class TestIptables(ModuleTestCase):
'-j',
'ACCEPT'
])
- self.assertEqual(run_command.call_args_list[1][0][0], [
+ self.assertEqual(run_command.call_args_list[2][0][0], [
'/sbin/iptables',
'-t',
'filter',
@@ -261,7 +262,8 @@ class TestIptables(ModuleTestCase):
})
commands_results = [
- (1, '', ''),
+ (1, '', ''), # check_rule_present
+ (0, '', ''), # check_chain_present
]
with patch.object(basic.AnsibleModule, 'run_command') as run_command:
@@ -270,7 +272,7 @@ class TestIptables(ModuleTestCase):
iptables.main()
self.assertTrue(result.exception.args[0]['changed'])
- self.assertEqual(run_command.call_count, 1)
+ self.assertEqual(run_command.call_count, 2)
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t',
@@ -308,8 +310,9 @@ class TestIptables(ModuleTestCase):
})
commands_results = [
- (1, '', ''),
- (0, '', '')
+ (1, '', ''), # check_rule_present
+ (0, '', ''), # check_chain_present
+ (0, '', ''),
]
with patch.object(basic.AnsibleModule, 'run_command') as run_command:
@@ -318,7 +321,7 @@ class TestIptables(ModuleTestCase):
iptables.main()
self.assertTrue(result.exception.args[0]['changed'])
- self.assertEqual(run_command.call_count, 2)
+ self.assertEqual(run_command.call_count, 3)
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t',
@@ -340,7 +343,7 @@ class TestIptables(ModuleTestCase):
'--to-ports',
'8600'
])
- self.assertEqual(run_command.call_args_list[1][0][0], [
+ self.assertEqual(run_command.call_args_list[2][0][0], [
'/sbin/iptables',
'-t',
'nat',
@@ -1006,3 +1009,184 @@ class TestIptables(ModuleTestCase):
'-m', 'set',
'--match-set', 'banned_hosts', 'src,dst'
])
+
+ def test_chain_creation(self):
+ """Test chain creation when absent"""
+ set_module_args({
+ 'chain': 'FOOBAR',
+ 'state': 'present',
+ 'chain_management': True,
+ })
+
+ commands_results = [
+ (1, '', ''), # check_rule_present
+ (1, '', ''), # check_chain_present
+ (0, '', ''), # create_chain
+ (0, '', ''), # append_rule
+ ]
+
+ with patch.object(basic.AnsibleModule, 'run_command') as run_command:
+ run_command.side_effect = commands_results
+ with self.assertRaises(AnsibleExitJson) as result:
+ iptables.main()
+ self.assertTrue(result.exception.args[0]['changed'])
+
+ self.assertEqual(run_command.call_count, 4)
+
+ self.assertEqual(run_command.call_args_list[0][0][0], [
+ '/sbin/iptables',
+ '-t', 'filter',
+ '-C', 'FOOBAR',
+ ])
+
+ self.assertEqual(run_command.call_args_list[1][0][0], [
+ '/sbin/iptables',
+ '-t', 'filter',
+ '-L', 'FOOBAR',
+ ])
+
+ self.assertEqual(run_command.call_args_list[2][0][0], [
+ '/sbin/iptables',
+ '-t', 'filter',
+ '-N', 'FOOBAR',
+ ])
+
+ self.assertEqual(run_command.call_args_list[3][0][0], [
+ '/sbin/iptables',
+ '-t', 'filter',
+ '-A', 'FOOBAR',
+ ])
+
+ commands_results = [
+ (0, '', ''), # check_rule_present
+ ]
+
+ with patch.object(basic.AnsibleModule, 'run_command') as run_command:
+ run_command.side_effect = commands_results
+ with self.assertRaises(AnsibleExitJson) as result:
+ iptables.main()
+ self.assertFalse(result.exception.args[0]['changed'])
+
+ def test_chain_creation_check_mode(self):
+ """Test chain creation when absent"""
+ set_module_args({
+ 'chain': 'FOOBAR',
+ 'state': 'present',
+ 'chain_management': True,
+ '_ansible_check_mode': True,
+ })
+
+ commands_results = [
+ (1, '', ''), # check_rule_present
+ (1, '', ''), # check_chain_present
+ ]
+
+ with patch.object(basic.AnsibleModule, 'run_command') as run_command:
+ run_command.side_effect = commands_results
+ with self.assertRaises(AnsibleExitJson) as result:
+ iptables.main()
+ self.assertTrue(result.exception.args[0]['changed'])
+
+ self.assertEqual(run_command.call_count, 2)
+
+ self.assertEqual(run_command.call_args_list[0][0][0], [
+ '/sbin/iptables',
+ '-t', 'filter',
+ '-C', 'FOOBAR',
+ ])
+
+ self.assertEqual(run_command.call_args_list[1][0][0], [
+ '/sbin/iptables',
+ '-t', 'filter',
+ '-L', 'FOOBAR',
+ ])
+
+ commands_results = [
+ (0, '', ''), # check_rule_present
+ ]
+
+ with patch.object(basic.AnsibleModule, 'run_command') as run_command:
+ run_command.side_effect = commands_results
+ with self.assertRaises(AnsibleExitJson) as result:
+ iptables.main()
+ self.assertFalse(result.exception.args[0]['changed'])
+
+ def test_chain_deletion(self):
+ """Test chain deletion when present"""
+ set_module_args({
+ 'chain': 'FOOBAR',
+ 'state': 'absent',
+ 'chain_management': True,
+ })
+
+ commands_results = [
+ (0, '', ''), # check_chain_present
+ (0, '', ''), # delete_chain
+ ]
+
+ with patch.object(basic.AnsibleModule, 'run_command') as run_command:
+ run_command.side_effect = commands_results
+ with self.assertRaises(AnsibleExitJson) as result:
+ iptables.main()
+ self.assertTrue(result.exception.args[0]['changed'])
+
+ self.assertEqual(run_command.call_count, 2)
+
+ self.assertEqual(run_command.call_args_list[0][0][0], [
+ '/sbin/iptables',
+ '-t', 'filter',
+ '-L', 'FOOBAR',
+ ])
+
+ self.assertEqual(run_command.call_args_list[1][0][0], [
+ '/sbin/iptables',
+ '-t', 'filter',
+ '-X', 'FOOBAR',
+ ])
+
+ commands_results = [
+ (1, '', ''), # check_rule_present
+ ]
+
+ with patch.object(basic.AnsibleModule, 'run_command') as run_command:
+ run_command.side_effect = commands_results
+ with self.assertRaises(AnsibleExitJson) as result:
+ iptables.main()
+ self.assertFalse(result.exception.args[0]['changed'])
+
+ def test_chain_deletion_check_mode(self):
+ """Test chain deletion when present"""
+ set_module_args({
+ 'chain': 'FOOBAR',
+ 'state': 'absent',
+ 'chain_management': True,
+ '_ansible_check_mode': True,
+ })
+
+ commands_results = [
+ (0, '', ''), # check_chain_present
+ ]
+
+ with patch.object(basic.AnsibleModule, 'run_command') as run_command:
+ run_command.side_effect = commands_results
+ with self.assertRaises(AnsibleExitJson) as result:
+ iptables.main()
+ self.assertTrue(result.exception.args[0]['changed'])
+
+ self.assertEqual(run_command.call_count, 1)
+
+ self.assertEqual(run_command.call_args_list[0][0][0], [
+ '/sbin/iptables',
+ '-t', 'filter',
+ '-L', 'FOOBAR',
+ ])
+
+ commands_results = [
+ (1, '', ''), # check_rule_present
+ ]
+
+ with patch.object(basic.AnsibleModule, 'run_command') as run_command:
+ run_command.side_effect = commands_results
+ with self.assertRaises(AnsibleExitJson) as result:
+ iptables.main()
+ self.assertFalse(result.exception.args[0]['changed'])
diff --git a/test/units/modules/test_service_facts.py b/test/units/modules/test_service_facts.py
index 07f6827e..3a180dc9 100644
--- a/test/units/modules/test_service_facts.py
+++ b/test/units/modules/test_service_facts.py
@@ -6,7 +6,7 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
from units.compat import unittest
-from units.compat.mock import patch
+from mock import patch
from ansible.module_utils import basic
from ansible.modules.service_facts import AIXScanService
diff --git a/test/units/modules/utils.py b/test/units/modules/utils.py
index 6d169e36..92f4ceab 100644
--- a/test/units/modules/utils.py
+++ b/test/units/modules/utils.py
@@ -4,7 +4,7 @@ __metaclass__ = type
import json
from units.compat import unittest
-from units.compat.mock import patch
+from mock import patch
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
diff --git a/test/units/parsing/test_ajson.py b/test/units/parsing/test_ajson.py
index c38f43ea..55e758e2 100644
--- a/test/units/parsing/test_ajson.py
+++ b/test/units/parsing/test_ajson.py
@@ -10,10 +10,10 @@ import json
import pytest
+from collections.abc import Mapping
from datetime import date, datetime
from pytz import timezone as tz
-from ansible.module_utils.common._collections_compat import Mapping
from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder
from ansible.parsing.yaml.objects import AnsibleVaultEncryptedUnicode
from ansible.utils.unsafe_proxy import AnsibleUnsafeText
diff --git a/test/units/parsing/test_dataloader.py b/test/units/parsing/test_dataloader.py
index 3cc8d451..ed365b13 100644
--- a/test/units/parsing/test_dataloader.py
+++ b/test/units/parsing/test_dataloader.py
@@ -22,7 +22,7 @@ __metaclass__ = type
import os
from units.compat import unittest
-from units.compat.mock import patch, mock_open
+from mock import patch, mock_open
from ansible.errors import AnsibleParserError, yaml_strings, AnsibleFileNotFound
from ansible.parsing.vault import AnsibleVaultError
from ansible.module_utils._text import to_text
@@ -138,8 +138,8 @@ class TestDataLoader(unittest.TestCase):
self.assertTrue(self._loader.is_directory(os.path.dirname(__file__)))
def test_get_file_contents_none_path(self):
- self.assertRaisesRegexp(AnsibleParserError, 'Invalid filename',
- self._loader._get_file_contents, None)
+ self.assertRaisesRegex(AnsibleParserError, 'Invalid filename',
+ self._loader._get_file_contents, None)
def test_get_file_contents_non_existent_path(self):
self.assertRaises(AnsibleFileNotFound, self._loader._get_file_contents, '/non_existent_file')
@@ -169,7 +169,7 @@ class TestPathDwimRelativeStackDataLoader(unittest.TestCase):
self._loader = DataLoader()
def test_none(self):
- self.assertRaisesRegexp(AnsibleFileNotFound, 'on the Ansible Controller', self._loader.path_dwim_relative_stack, None, None, None)
+ self.assertRaisesRegex(AnsibleFileNotFound, 'on the Ansible Controller', self._loader.path_dwim_relative_stack, None, None, None)
def test_empty_strings(self):
self.assertEqual(self._loader.path_dwim_relative_stack('', '', ''), './')
@@ -218,7 +218,7 @@ class TestDataLoaderWithVault(unittest.TestCase):
self.assertRaises(AnsibleVaultError, self._loader.get_real_file, self.test_vault_data_path)
def test_get_real_file_not_a_path(self):
- self.assertRaisesRegexp(AnsibleParserError, 'Invalid filename', self._loader.get_real_file, None)
+ self.assertRaisesRegex(AnsibleParserError, 'Invalid filename', self._loader.get_real_file, None)
@patch.multiple(DataLoader, path_exists=lambda s, x: True, is_file=lambda s, x: True)
def test_parse_from_vault_1_1_file(self):
diff --git a/test/units/parsing/test_mod_args.py b/test/units/parsing/test_mod_args.py
index 50c3b331..5d3f5d25 100644
--- a/test/units/parsing/test_mod_args.py
+++ b/test/units/parsing/test_mod_args.py
@@ -118,7 +118,7 @@ class TestModArgsDwim:
assert err.value.args[0] == msg
- def test_multiple_actions(self):
+ def test_multiple_actions_ping_shell(self):
args_dict = {'ping': 'data=hi', 'shell': 'echo hi'}
m = ModuleArgsParser(args_dict)
with pytest.raises(AnsibleParserError) as err:
diff --git a/test/units/parsing/vault/test_vault.py b/test/units/parsing/vault/test_vault.py
index 0a9e395b..f92d451c 100644
--- a/test/units/parsing/vault/test_vault.py
+++ b/test/units/parsing/vault/test_vault.py
@@ -30,7 +30,7 @@ from binascii import hexlify
import pytest
from units.compat import unittest
-from units.compat.mock import patch, MagicMock
+from mock import patch, MagicMock
from ansible import errors
from ansible.module_utils import six
@@ -51,18 +51,18 @@ class TestUnhexlify(unittest.TestCase):
def test_odd_length(self):
b_data = b'123456789abcdefghijklmnopqrstuvwxyz'
- self.assertRaisesRegexp(vault.AnsibleVaultFormatError,
- '.*Vault format unhexlify error.*',
- vault._unhexlify,
- b_data)
+ self.assertRaisesRegex(vault.AnsibleVaultFormatError,
+ '.*Vault format unhexlify error.*',
+ vault._unhexlify,
+ b_data)
def test_nonhex(self):
b_data = b'6z36316566653264333665333637623064303639353237620a636366633565663263336335656532'
- self.assertRaisesRegexp(vault.AnsibleVaultFormatError,
- '.*Vault format unhexlify error.*Non-hexadecimal digit found',
- vault._unhexlify,
- b_data)
+ self.assertRaisesRegex(vault.AnsibleVaultFormatError,
+ '.*Vault format unhexlify error.*Non-hexadecimal digit found',
+ vault._unhexlify,
+ b_data)
class TestParseVaulttext(unittest.TestCase):
@@ -91,10 +91,10 @@ class TestParseVaulttext(unittest.TestCase):
b_vaulttext_envelope = to_bytes(vaulttext_envelope, errors='strict', encoding='utf-8')
b_vaulttext, b_version, cipher_name, vault_id = vault.parse_vaulttext_envelope(b_vaulttext_envelope)
- self.assertRaisesRegexp(vault.AnsibleVaultFormatError,
- '.*Vault format unhexlify error.*Non-hexadecimal digit found',
- vault.parse_vaulttext,
- b_vaulttext_envelope)
+ self.assertRaisesRegex(vault.AnsibleVaultFormatError,
+ '.*Vault format unhexlify error.*Non-hexadecimal digit found',
+ vault.parse_vaulttext,
+ b_vaulttext_envelope)
class TestVaultSecret(unittest.TestCase):
@@ -133,18 +133,18 @@ class TestPromptVaultSecret(unittest.TestCase):
@patch('ansible.parsing.vault.display.prompt', side_effect=EOFError)
def test_prompt_eoferror(self, mock_display_prompt):
secret = vault.PromptVaultSecret(vault_id='test_id')
- self.assertRaisesRegexp(vault.AnsibleVaultError,
- 'EOFError.*test_id',
- secret.load)
+ self.assertRaisesRegex(vault.AnsibleVaultError,
+ 'EOFError.*test_id',
+ secret.load)
@patch('ansible.parsing.vault.display.prompt', side_effect=['first_password', 'second_password'])
def test_prompt_passwords_dont_match(self, mock_display_prompt):
secret = vault.PromptVaultSecret(vault_id='test_id',
prompt_formats=['Vault password: ',
'Confirm Vault password: '])
- self.assertRaisesRegexp(errors.AnsibleError,
- 'Passwords do not match',
- secret.load)
+ self.assertRaisesRegex(errors.AnsibleError,
+ 'Passwords do not match',
+ secret.load)
class TestFileVaultSecret(unittest.TestCase):
@@ -200,9 +200,9 @@ class TestFileVaultSecret(unittest.TestCase):
fake_loader = DictDataLoader({tmp_file.name: ''})
secret = vault.FileVaultSecret(loader=fake_loader, filename=tmp_file.name)
- self.assertRaisesRegexp(vault.AnsibleVaultPasswordError,
- 'Invalid vault password was provided from file.*%s' % tmp_file.name,
- secret.load)
+ self.assertRaisesRegex(vault.AnsibleVaultPasswordError,
+ 'Invalid vault password was provided from file.*%s' % tmp_file.name,
+ secret.load)
os.unlink(tmp_file.name)
@@ -241,9 +241,9 @@ class TestFileVaultSecret(unittest.TestCase):
fake_loader = DictDataLoader({filename: 'sdfadf'})
secret = vault.FileVaultSecret(loader=fake_loader, filename=filename)
- self.assertRaisesRegexp(errors.AnsibleError,
- '.*Could not read vault password file.*/dev/null/foobar.*Not a directory',
- secret.load)
+ self.assertRaisesRegex(errors.AnsibleError,
+ '.*Could not read vault password file.*/dev/null/foobar.*Not a directory',
+ secret.load)
def test_file_not_found(self):
tmp_file = tempfile.NamedTemporaryFile()
@@ -253,9 +253,9 @@ class TestFileVaultSecret(unittest.TestCase):
fake_loader = DictDataLoader({filename: 'sdfadf'})
secret = vault.FileVaultSecret(loader=fake_loader, filename=filename)
- self.assertRaisesRegexp(errors.AnsibleError,
- '.*Could not read vault password file.*%s.*' % filename,
- secret.load)
+ self.assertRaisesRegex(errors.AnsibleError,
+ '.*Could not read vault password file.*%s.*' % filename,
+ secret.load)
class TestScriptVaultSecret(unittest.TestCase):
@@ -285,9 +285,9 @@ class TestScriptVaultSecret(unittest.TestCase):
secret = vault.ScriptVaultSecret()
with patch.object(secret, 'loader') as mock_loader:
mock_loader.is_executable = MagicMock(return_value=True)
- self.assertRaisesRegexp(vault.AnsibleVaultPasswordError,
- 'Invalid vault password was provided from script',
- secret.load)
+ self.assertRaisesRegex(vault.AnsibleVaultPasswordError,
+ 'Invalid vault password was provided from script',
+ secret.load)
@patch('ansible.parsing.vault.subprocess.Popen')
def test_read_file_os_error(self, mock_popen):
@@ -296,9 +296,9 @@ class TestScriptVaultSecret(unittest.TestCase):
secret = vault.ScriptVaultSecret()
with patch.object(secret, 'loader') as mock_loader:
mock_loader.is_executable = MagicMock(return_value=True)
- self.assertRaisesRegexp(errors.AnsibleError,
- 'Problem running vault password script.*',
- secret.load)
+ self.assertRaisesRegex(errors.AnsibleError,
+ 'Problem running vault password script.*',
+ secret.load)
@patch('ansible.parsing.vault.subprocess.Popen')
def test_read_file_not_executable(self, mock_popen):
@@ -306,9 +306,9 @@ class TestScriptVaultSecret(unittest.TestCase):
secret = vault.ScriptVaultSecret()
with patch.object(secret, 'loader') as mock_loader:
mock_loader.is_executable = MagicMock(return_value=False)
- self.assertRaisesRegexp(vault.AnsibleVaultError,
- 'The vault password script .* was not executable',
- secret.load)
+ self.assertRaisesRegex(vault.AnsibleVaultError,
+ 'The vault password script .* was not executable',
+ secret.load)
@patch('ansible.parsing.vault.subprocess.Popen')
def test_read_file_non_zero_return_code(self, mock_popen):
@@ -319,9 +319,9 @@ class TestScriptVaultSecret(unittest.TestCase):
secret = vault.ScriptVaultSecret(filename='/dev/null/some_vault_secret')
with patch.object(secret, 'loader') as mock_loader:
mock_loader.is_executable = MagicMock(return_value=True)
- self.assertRaisesRegexp(errors.AnsibleError,
- r'Vault password script.*returned non-zero \(%s\): %s' % (rc, stderr),
- secret.load)
+ self.assertRaisesRegex(errors.AnsibleError,
+ r'Vault password script.*returned non-zero \(%s\): %s' % (rc, stderr),
+ secret.load)
class TestScriptIsClient(unittest.TestCase):
@@ -382,11 +382,11 @@ class TestGetFileVaultSecret(unittest.TestCase):
filename = '/dev/null/foobar'
fake_loader = DictDataLoader({filename: 'sdfadf'})
- self.assertRaisesRegexp(errors.AnsibleError,
- '.*The vault password file %s was not found.*' % filename,
- vault.get_file_vault_secret,
- filename=filename,
- loader=fake_loader)
+ self.assertRaisesRegex(errors.AnsibleError,
+ '.*The vault password file %s was not found.*' % filename,
+ vault.get_file_vault_secret,
+ filename=filename,
+ loader=fake_loader)
def test_file_not_found(self):
tmp_file = tempfile.NamedTemporaryFile()
@@ -395,11 +395,11 @@ class TestGetFileVaultSecret(unittest.TestCase):
fake_loader = DictDataLoader({filename: 'sdfadf'})
- self.assertRaisesRegexp(errors.AnsibleError,
- '.*The vault password file %s was not found.*' % filename,
- vault.get_file_vault_secret,
- filename=filename,
- loader=fake_loader)
+ self.assertRaisesRegex(errors.AnsibleError,
+ '.*The vault password file %s was not found.*' % filename,
+ vault.get_file_vault_secret,
+ filename=filename,
+ loader=fake_loader)
class TestVaultIsEncrypted(unittest.TestCase):
@@ -645,10 +645,10 @@ class TestVaultLib(unittest.TestCase):
v = vault.VaultLib(vault_secrets)
plaintext = u'Some text to encrypt in a café'
- self.assertRaisesRegexp(vault.AnsibleVaultError,
- '.*A vault password must be specified to encrypt data.*',
- v.encrypt,
- plaintext)
+ self.assertRaisesRegex(vault.AnsibleVaultError,
+ '.*A vault password must be specified to encrypt data.*',
+ v.encrypt,
+ plaintext)
def test_format_vaulttext_envelope(self):
cipher_name = "TEST"
@@ -712,10 +712,10 @@ class TestVaultLib(unittest.TestCase):
v_none = vault.VaultLib(None)
# so set secrets None explicitly
v_none.secrets = None
- self.assertRaisesRegexp(vault.AnsibleVaultError,
- '.*A vault password must be specified to decrypt data.*',
- v_none.decrypt,
- b_vaulttext)
+ self.assertRaisesRegex(vault.AnsibleVaultError,
+ '.*A vault password must be specified to decrypt data.*',
+ v_none.decrypt,
+ b_vaulttext)
def test_encrypt_decrypt_aes256_empty_secrets(self):
vault_secrets = self._vault_secrets_from_password('default', 'ansible')
@@ -727,10 +727,10 @@ class TestVaultLib(unittest.TestCase):
vault_secrets_empty = []
v_none = vault.VaultLib(vault_secrets_empty)
- self.assertRaisesRegexp(vault.AnsibleVaultError,
- '.*Attempting to decrypt but no vault secrets found.*',
- v_none.decrypt,
- b_vaulttext)
+ self.assertRaisesRegex(vault.AnsibleVaultError,
+ '.*Attempting to decrypt but no vault secrets found.*',
+ v_none.decrypt,
+ b_vaulttext)
def test_encrypt_decrypt_aes256_multiple_secrets_all_wrong(self):
plaintext = u'Some text to encrypt in a café'
@@ -740,11 +740,11 @@ class TestVaultLib(unittest.TestCase):
('wrong-password', TextVaultSecret('wrong-password'))]
v_multi = vault.VaultLib(vault_secrets)
- self.assertRaisesRegexp(errors.AnsibleError,
- '.*Decryption failed.*',
- v_multi.decrypt,
- b_vaulttext,
- filename='/dev/null/fake/filename')
+ self.assertRaisesRegex(errors.AnsibleError,
+ '.*Decryption failed.*',
+ v_multi.decrypt,
+ b_vaulttext,
+ filename='/dev/null/fake/filename')
def test_encrypt_decrypt_aes256_multiple_secrets_one_valid(self):
plaintext = u'Some text to encrypt in a café'
diff --git a/test/units/parsing/vault/test_vault_editor.py b/test/units/parsing/vault/test_vault_editor.py
index 1483bb52..3f19b893 100644
--- a/test/units/parsing/vault/test_vault_editor.py
+++ b/test/units/parsing/vault/test_vault_editor.py
@@ -27,7 +27,7 @@ from io import BytesIO, StringIO
import pytest
from units.compat import unittest
-from units.compat.mock import patch
+from mock import patch
from ansible import errors
from ansible.parsing import vault
@@ -142,11 +142,11 @@ class TestVaultEditor(unittest.TestCase):
ve = self._vault_editor()
- self.assertRaisesRegexp(errors.AnsibleError,
- error_txt,
- ve._edit_file_helper,
- src_file_path,
- self.vault_secret)
+ self.assertRaisesRegex(errors.AnsibleError,
+ error_txt,
+ ve._edit_file_helper,
+ src_file_path,
+ self.vault_secret)
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_helper_symlink_target(self, mock_sp_call):
@@ -249,11 +249,11 @@ class TestVaultEditor(unittest.TestCase):
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
- self.assertRaisesRegexp(errors.AnsibleError,
- 'The value for the new_password to rekey',
- ve.rekey_file,
- src_file_path,
- None)
+ self.assertRaisesRegex(errors.AnsibleError,
+ 'The value for the new_password to rekey',
+ ve.rekey_file,
+ src_file_path,
+ None)
def test_rekey_file_not_encrypted(self):
self._test_dir = self._create_test_dir()
@@ -264,10 +264,10 @@ class TestVaultEditor(unittest.TestCase):
ve = self._vault_editor()
new_password = 'password2:electricbugaloo'
- self.assertRaisesRegexp(errors.AnsibleError,
- 'input is not vault encrypted data',
- ve.rekey_file,
- src_file_path, new_password)
+ self.assertRaisesRegex(errors.AnsibleError,
+ 'input is not vault encrypted data',
+ ve.rekey_file,
+ src_file_path, new_password)
def test_plaintext(self):
self._test_dir = self._create_test_dir()
@@ -288,10 +288,10 @@ class TestVaultEditor(unittest.TestCase):
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
- self.assertRaisesRegexp(errors.AnsibleError,
- 'input is not vault encrypted data',
- ve.plaintext,
- src_file_path)
+ self.assertRaisesRegex(errors.AnsibleError,
+ 'input is not vault encrypted data',
+ ve.plaintext,
+ src_file_path)
def test_encrypt_file(self):
self._test_dir = self._create_test_dir()
@@ -426,10 +426,10 @@ class TestVaultEditor(unittest.TestCase):
mock_sp_call.side_effect = faux_editor
ve = self._vault_editor()
- self.assertRaisesRegexp(errors.AnsibleError,
- 'input is not vault encrypted data',
- ve.edit_file,
- src_file_path)
+ self.assertRaisesRegex(errors.AnsibleError,
+ 'input is not vault encrypted data',
+ ve.edit_file,
+ src_file_path)
def test_create_file_exists(self):
self._test_dir = self._create_test_dir()
@@ -437,11 +437,11 @@ class TestVaultEditor(unittest.TestCase):
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
ve = self._vault_editor()
- self.assertRaisesRegexp(errors.AnsibleError,
- 'please use .edit. instead',
- ve.create_file,
- src_file_path,
- self.vault_secret)
+ self.assertRaisesRegex(errors.AnsibleError,
+ 'please use .edit. instead',
+ ve.create_file,
+ src_file_path,
+ self.vault_secret)
def test_decrypt_file_exception(self):
self._test_dir = self._create_test_dir()
@@ -449,10 +449,10 @@ class TestVaultEditor(unittest.TestCase):
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
ve = self._vault_editor()
- self.assertRaisesRegexp(errors.AnsibleError,
- 'input is not vault encrypted data',
- ve.decrypt_file,
- src_file_path)
+ self.assertRaisesRegex(errors.AnsibleError,
+ 'input is not vault encrypted data',
+ ve.decrypt_file,
+ src_file_path)
@patch.object(vault.VaultEditor, '_editor_shell_command')
def test_create_file(self, mock_editor_shell_command):
diff --git a/test/units/parsing/yaml/test_loader.py b/test/units/parsing/yaml/test_loader.py
index fbe69a97..117f80a7 100644
--- a/test/units/parsing/yaml/test_loader.py
+++ b/test/units/parsing/yaml/test_loader.py
@@ -20,13 +20,13 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+from collections.abc import Sequence, Set, Mapping
from io import StringIO
from units.compat import unittest
from ansible import errors
from ansible.module_utils.six import text_type, binary_type
-from ansible.module_utils.common._collections_compat import Sequence, Set, Mapping
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.parsing import vault
from ansible.parsing.yaml.objects import AnsibleVaultEncryptedUnicode
@@ -280,11 +280,11 @@ class TestAnsibleLoaderVault(unittest.TestCase, YamlTestUtils):
different_vault_string = data_from_yaml['different_secret']
self.assertEqual(vault_string, another_vault_string)
- self.assertNotEquals(vault_string, different_vault_string)
+ self.assertNotEqual(vault_string, different_vault_string)
# More testing of __eq__/__ne__
self.assertTrue('some string' != vault_string)
- self.assertNotEquals('some string', vault_string)
+ self.assertNotEqual('some string', vault_string)
# Note this is a compare of the str/unicode of these, they are different types
# so we want to test self == other, and other == self etc
diff --git a/test/units/parsing/yaml/test_objects.py b/test/units/parsing/yaml/test_objects.py
index d4529eed..f64b708f 100644
--- a/test/units/parsing/yaml/test_objects.py
+++ b/test/units/parsing/yaml/test_objects.py
@@ -52,7 +52,7 @@ class TestAnsibleVaultUnicodeNoVault(unittest.TestCase, YamlTestUtils):
self.assertIsInstance(avu, objects.AnsibleVaultEncryptedUnicode)
self.assertTrue(avu.vault is None)
# AnsibleVaultEncryptedUnicode without a vault should never == any string
- self.assertNotEquals(avu, seq)
+ self.assertNotEqual(avu, seq)
def assert_values(self, seq):
avu = objects.AnsibleVaultEncryptedUnicode(seq)
diff --git a/test/units/playbook/role/test_include_role.py b/test/units/playbook/role/test_include_role.py
index 7a04b35f..79821b40 100644
--- a/test/units/playbook/role/test_include_role.py
+++ b/test/units/playbook/role/test_include_role.py
@@ -20,7 +20,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat import unittest
-from units.compat.mock import patch
+from mock import patch
from ansible.playbook import Play
from ansible.playbook.role_include import IncludeRole
diff --git a/test/units/playbook/role/test_role.py b/test/units/playbook/role/test_role.py
index 3aa30b8b..dacbc79c 100644
--- a/test/units/playbook/role/test_role.py
+++ b/test/units/playbook/role/test_role.py
@@ -19,11 +19,12 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+from collections.abc import Container
+
from units.compat import unittest
-from units.compat.mock import patch, MagicMock
+from mock import patch, MagicMock
from ansible.errors import AnsibleError, AnsibleParserError
-from ansible.module_utils.common._collections_compat import Container
from ansible.playbook.block import Block
from units.mock.loader import DictDataLoader
diff --git a/test/units/playbook/test_base.py b/test/units/playbook/test_base.py
index 648200af..de3412e5 100644
--- a/test/units/playbook/test_base.py
+++ b/test/units/playbook/test_base.py
@@ -480,7 +480,7 @@ class TestBaseSubClass(TestBase):
def test_attr_dict_string(self):
test_value = 'just_some_random_string'
ds = {'test_attr_dict': test_value}
- self.assertRaisesRegexp(AnsibleParserError, 'is not a dictionary', self._base_validate, ds)
+ self.assertRaisesRegex(AnsibleParserError, 'is not a dictionary', self._base_validate, ds)
def test_attr_class(self):
esc = ExampleSubClass()
@@ -503,14 +503,14 @@ class TestBaseSubClass(TestBase):
def test_attr_class_post_validate_class_not_instance(self):
not_a_esc = ExampleSubClass
ds = {'test_attr_class_post_validate': not_a_esc}
- self.assertRaisesRegexp(AnsibleParserError, 'is not a valid.*got a.*Meta.*instead',
- self._base_validate, ds)
+ self.assertRaisesRegex(AnsibleParserError, 'is not a valid.*got a.*Meta.*instead',
+ self._base_validate, ds)
def test_attr_class_post_validate_wrong_class(self):
not_a_esc = 37
ds = {'test_attr_class_post_validate': not_a_esc}
- self.assertRaisesRegexp(AnsibleParserError, 'is not a valid.*got a.*int.*instead',
- self._base_validate, ds)
+ self.assertRaisesRegex(AnsibleParserError, 'is not a valid.*got a.*int.*instead',
+ self._base_validate, ds)
def test_attr_remote_user(self):
ds = {'remote_user': 'testuser'}
@@ -599,8 +599,8 @@ class TestBaseSubClass(TestBase):
bsc.load_data(ds)
fake_loader = DictDataLoader({})
templar = Templar(loader=fake_loader)
- self.assertRaisesRegexp(AnsibleParserError, 'cannot have empty values',
- bsc.post_validate, templar)
+ self.assertRaisesRegex(AnsibleParserError, 'cannot have empty values',
+ bsc.post_validate, templar)
def test_attr_unknown(self):
a_list = ['some string']
diff --git a/test/units/playbook/test_conditional.py b/test/units/playbook/test_conditional.py
index 53811b6b..17284ca2 100644
--- a/test/units/playbook/test_conditional.py
+++ b/test/units/playbook/test_conditional.py
@@ -3,7 +3,7 @@ __metaclass__ = type
from units.compat import unittest
from units.mock.loader import DictDataLoader
-from units.compat.mock import MagicMock
+from mock import MagicMock
from ansible.template import Templar
from ansible import errors
@@ -50,8 +50,8 @@ class TestConditional(unittest.TestCase):
def test_undefined(self):
when = [u"{{ some_undefined_thing }}"]
- self.assertRaisesRegexp(errors.AnsibleError, "The conditional check '{{ some_undefined_thing }}' failed",
- self._eval_con, when, {})
+ self.assertRaisesRegex(errors.AnsibleError, "The conditional check '{{ some_undefined_thing }}' failed",
+ self._eval_con, when, {})
def test_defined(self):
variables = {'some_defined_thing': True}
@@ -100,12 +100,12 @@ class TestConditional(unittest.TestCase):
when = [u"some_dict.some_dict_key1 == hostvars['host3']"]
# self._eval_con(when, variables)
- self.assertRaisesRegexp(errors.AnsibleError,
- r"The conditional check 'some_dict.some_dict_key1 == hostvars\['host3'\]' failed",
- # "The conditional check 'some_dict.some_dict_key1 == hostvars['host3']' failed",
- # "The conditional check 'some_dict.some_dict_key1 == hostvars['host3']' failed.",
- self._eval_con,
- when, variables)
+ self.assertRaisesRegex(errors.AnsibleError,
+ r"The conditional check 'some_dict.some_dict_key1 == hostvars\['host3'\]' failed",
+ # "The conditional check 'some_dict.some_dict_key1 == hostvars['host3']' failed",
+ # "The conditional check 'some_dict.some_dict_key1 == hostvars['host3']' failed.",
+ self._eval_con,
+ when, variables)
def test_dict_undefined_values_bare(self):
variables = {'dict_value': 1,
@@ -116,10 +116,10 @@ class TestConditional(unittest.TestCase):
# raises an exception when a non-string conditional is passed to extract_defined_undefined()
when = [u"some_defined_dict_with_undefined_values"]
- self.assertRaisesRegexp(errors.AnsibleError,
- "The conditional check 'some_defined_dict_with_undefined_values' failed.",
- self._eval_con,
- when, variables)
+ self.assertRaisesRegex(errors.AnsibleError,
+ "The conditional check 'some_defined_dict_with_undefined_values' failed.",
+ self._eval_con,
+ when, variables)
def test_dict_undefined_values_is_defined(self):
variables = {'dict_value': 1,
@@ -129,10 +129,10 @@ class TestConditional(unittest.TestCase):
}}
when = [u"some_defined_dict_with_undefined_values is defined"]
- self.assertRaisesRegexp(errors.AnsibleError,
- "The conditional check 'some_defined_dict_with_undefined_values is defined' failed.",
- self._eval_con,
- when, variables)
+ self.assertRaisesRegex(errors.AnsibleError,
+ "The conditional check 'some_defined_dict_with_undefined_values is defined' failed.",
+ self._eval_con,
+ when, variables)
def test_is_defined(self):
variables = {'some_defined_thing': True}
@@ -195,10 +195,10 @@ class TestConditional(unittest.TestCase):
u'hostvars["some_host"] is defined',
u"{{ compare_targets.triple }} is defined",
u"{{ compare_targets.quadruple }} is defined"]
- self.assertRaisesRegexp(errors.AnsibleError,
- "The conditional check '{{ compare_targets.triple }} is defined' failed",
- self._eval_con,
- when, variables)
+ self.assertRaisesRegex(errors.AnsibleError,
+ "The conditional check '{{ compare_targets.triple }} is defined' failed",
+ self._eval_con,
+ when, variables)
def test_is_hostvars_host_is_defined(self):
variables = {'hostvars': {'some_host': {}, }}
diff --git a/test/units/playbook/test_helpers.py b/test/units/playbook/test_helpers.py
index 8574cb4c..a921a727 100644
--- a/test/units/playbook/test_helpers.py
+++ b/test/units/playbook/test_helpers.py
@@ -22,7 +22,7 @@ __metaclass__ = type
import os
from units.compat import unittest
-from units.compat.mock import MagicMock
+from mock import MagicMock
from units.mock.loader import DictDataLoader
from ansible import errors
@@ -107,30 +107,30 @@ class TestLoadListOfTasks(unittest.TestCase, MixinForMocks):
def test_empty_task(self):
ds = [{}]
- self.assertRaisesRegexp(errors.AnsibleParserError,
- "no module/action detected in task",
- helpers.load_list_of_tasks,
- ds, play=self.mock_play,
- variable_manager=self.mock_variable_manager, loader=self.fake_loader)
+ self.assertRaisesRegex(errors.AnsibleParserError,
+ "no module/action detected in task",
+ helpers.load_list_of_tasks,
+ ds, play=self.mock_play,
+ variable_manager=self.mock_variable_manager, loader=self.fake_loader)
def test_empty_task_use_handlers(self):
ds = [{}]
- self.assertRaisesRegexp(errors.AnsibleParserError,
- "no module/action detected in task.",
- helpers.load_list_of_tasks,
- ds,
- use_handlers=True,
- play=self.mock_play,
- variable_manager=self.mock_variable_manager,
- loader=self.fake_loader)
+ self.assertRaisesRegex(errors.AnsibleParserError,
+ "no module/action detected in task.",
+ helpers.load_list_of_tasks,
+ ds,
+ use_handlers=True,
+ play=self.mock_play,
+ variable_manager=self.mock_variable_manager,
+ loader=self.fake_loader)
def test_one_bogus_block(self):
ds = [{'block': None}]
- self.assertRaisesRegexp(errors.AnsibleParserError,
- "A malformed block was encountered",
- helpers.load_list_of_tasks,
- ds, play=self.mock_play,
- variable_manager=self.mock_variable_manager, loader=self.fake_loader)
+ self.assertRaisesRegex(errors.AnsibleParserError,
+ "A malformed block was encountered",
+ helpers.load_list_of_tasks,
+ ds, play=self.mock_play,
+ variable_manager=self.mock_variable_manager, loader=self.fake_loader)
def test_unknown_action(self):
action_name = 'foo_test_unknown_action'
@@ -172,11 +172,11 @@ class TestLoadListOfTasks(unittest.TestCase, MixinForMocks):
def test_one_bogus_block_use_handlers(self):
ds = [{'block': True}]
- self.assertRaisesRegexp(errors.AnsibleParserError,
- "A malformed block was encountered",
- helpers.load_list_of_tasks,
- ds, play=self.mock_play, use_handlers=True,
- variable_manager=self.mock_variable_manager, loader=self.fake_loader)
+ self.assertRaisesRegex(errors.AnsibleParserError,
+ "A malformed block was encountered",
+ helpers.load_list_of_tasks,
+ ds, play=self.mock_play, use_handlers=True,
+ variable_manager=self.mock_variable_manager, loader=self.fake_loader)
def test_one_bogus_include(self):
ds = [{'include': 'somefile.yml'}]
@@ -320,11 +320,11 @@ class TestLoadListOfRoles(unittest.TestCase, MixinForMocks):
def test_empty_role(self):
ds = [{}]
- self.assertRaisesRegexp(errors.AnsibleError,
- "role definitions must contain a role name",
- helpers.load_list_of_roles,
- ds, self.mock_play,
- variable_manager=self.mock_variable_manager, loader=self.fake_role_loader)
+ self.assertRaisesRegex(errors.AnsibleError,
+ "role definitions must contain a role name",
+ helpers.load_list_of_roles,
+ ds, self.mock_play,
+ variable_manager=self.mock_variable_manager, loader=self.fake_role_loader)
def test_empty_role_just_name(self):
ds = [{'name': 'bogus_role'}]
@@ -359,16 +359,16 @@ class TestLoadListOfBlocks(unittest.TestCase, MixinForMocks):
def test_empty_block(self):
ds = [{}]
mock_play = MagicMock(name='MockPlay')
- self.assertRaisesRegexp(errors.AnsibleParserError,
- "no module/action detected in task",
- helpers.load_list_of_blocks,
- ds, mock_play,
- parent_block=None,
- role=None,
- task_include=None,
- use_handlers=False,
- variable_manager=None,
- loader=None)
+ self.assertRaisesRegex(errors.AnsibleParserError,
+ "no module/action detected in task",
+ helpers.load_list_of_blocks,
+ ds, mock_play,
+ parent_block=None,
+ role=None,
+ task_include=None,
+ use_handlers=False,
+ variable_manager=None,
+ loader=None)
def test_block_unknown_action(self):
ds = [{'action': 'foo', 'collections': []}]
diff --git a/test/units/playbook/test_included_file.py b/test/units/playbook/test_included_file.py
index f143acb9..bf79b927 100644
--- a/test/units/playbook/test_included_file.py
+++ b/test/units/playbook/test_included_file.py
@@ -23,7 +23,7 @@ import os
import pytest
-from units.compat.mock import MagicMock
+from mock import MagicMock
from units.mock.loader import DictDataLoader
from ansible.playbook.block import Block
diff --git a/test/units/playbook/test_task.py b/test/units/playbook/test_task.py
index cc053885..53a66705 100644
--- a/test/units/playbook/test_task.py
+++ b/test/units/playbook/test_task.py
@@ -20,7 +20,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat import unittest
-from units.compat.mock import patch
+from mock import patch
from ansible.playbook.task import Task
from ansible.parsing.yaml import objects
from ansible import errors
diff --git a/test/units/plugins/action/test_action.py b/test/units/plugins/action/test_action.py
index 26c86bd6..70885181 100644
--- a/test/units/plugins/action/test_action.py
+++ b/test/units/plugins/action/test_action.py
@@ -25,7 +25,7 @@ import re
from ansible import constants as C
from units.compat import unittest
-from units.compat.mock import patch, MagicMock, mock_open
+from mock import patch, MagicMock, mock_open
from ansible.errors import AnsibleError, AnsibleAuthenticationFailure
from ansible.module_utils.six import text_type
@@ -346,7 +346,7 @@ class TestActionBase(unittest.TestCase):
self.assertEqual(runWithNoExpectation(execute), remote_paths)
def assertThrowRegex(regex, execute=False):
- self.assertRaisesRegexp(
+ self.assertRaisesRegex(
AnsibleError,
regex,
action_base._fixup_perms2,
diff --git a/test/units/plugins/action/test_gather_facts.py b/test/units/plugins/action/test_gather_facts.py
index 8f860e4a..e8a607b7 100644
--- a/test/units/plugins/action/test_gather_facts.py
+++ b/test/units/plugins/action/test_gather_facts.py
@@ -19,7 +19,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat import unittest
-from units.compat.mock import MagicMock, patch
+from mock import MagicMock, patch
from ansible import constants as C
from ansible.playbook.task import Task
diff --git a/test/units/plugins/action/test_raw.py b/test/units/plugins/action/test_raw.py
index a8bde6c1..da216385 100644
--- a/test/units/plugins/action/test_raw.py
+++ b/test/units/plugins/action/test_raw.py
@@ -22,7 +22,7 @@ import os
from ansible.errors import AnsibleActionFail
from units.compat import unittest
-from units.compat.mock import MagicMock, Mock
+from mock import MagicMock, Mock
from ansible.plugins.action.raw import ActionModule
from ansible.playbook.task import Task
from ansible.plugins.loader import connection_loader
diff --git a/test/units/plugins/cache/test_cache.py b/test/units/plugins/cache/test_cache.py
index c4e0079a..d0a39f39 100644
--- a/test/units/plugins/cache/test_cache.py
+++ b/test/units/plugins/cache/test_cache.py
@@ -23,7 +23,9 @@ import os
import shutil
import tempfile
-from units.compat import unittest, mock
+import mock
+
+from units.compat import unittest
from ansible.errors import AnsibleError
from ansible.plugins.cache import CachePluginAdjudicator
from ansible.plugins.cache.base import BaseCacheModule
@@ -185,9 +187,9 @@ class TestFactCache(unittest.TestCase):
# See https://github.com/ansible/ansible/issues/18751
# Note no fact_connection config set, so this will fail
with mock.patch('ansible.constants.CACHE_PLUGIN', 'json'):
- self.assertRaisesRegexp(AnsibleError,
- "Unable to load the facts cache plugin.*json.*",
- FactCache)
+ self.assertRaisesRegex(AnsibleError,
+ "Unable to load the facts cache plugin.*json.*",
+ FactCache)
def test_update(self):
self.cache.update({'cache_key': {'key2': 'updatedvalue'}})
diff --git a/test/units/plugins/callback/test_callback.py b/test/units/plugins/callback/test_callback.py
index c2ffbb4d..81ee3745 100644
--- a/test/units/plugins/callback/test_callback.py
+++ b/test/units/plugins/callback/test_callback.py
@@ -25,7 +25,7 @@ import textwrap
import types
from units.compat import unittest
-from units.compat.mock import MagicMock
+from mock import MagicMock
from ansible.executor.task_result import TaskResult
from ansible.inventory.host import Host
@@ -56,7 +56,7 @@ class TestCallback(unittest.TestCase):
def test_host_label(self):
result = TaskResult(host=Host('host1'), task=mock_task, return_data={})
- self.assertEquals(CallbackBase.host_label(result), 'host1')
+ self.assertEqual(CallbackBase.host_label(result), 'host1')
def test_host_label_delegated(self):
mock_task.delegate_to = 'host2'
@@ -65,7 +65,7 @@ class TestCallback(unittest.TestCase):
task=mock_task,
return_data={'_ansible_delegated_vars': {'ansible_host': 'host2'}},
)
- self.assertEquals(CallbackBase.host_label(result), 'host1 -> host2')
+ self.assertEqual(CallbackBase.host_label(result), 'host1 -> host2')
# TODO: import callback module so we can patch callback.cli/callback.C
diff --git a/test/units/plugins/connection/test_connection.py b/test/units/plugins/connection/test_connection.py
index 0f484e62..38d66910 100644
--- a/test/units/plugins/connection/test_connection.py
+++ b/test/units/plugins/connection/test_connection.py
@@ -20,14 +20,8 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from io import StringIO
-import sys
-import pytest
-from units.compat import mock
from units.compat import unittest
-from units.compat.mock import MagicMock
-from units.compat.mock import patch
-from ansible.errors import AnsibleError
from ansible.playbook.play_context import PlayContext
from ansible.plugins.connection import ConnectionBase
from ansible.plugins.loader import become_loader
diff --git a/test/units/plugins/connection/test_psrp.py b/test/units/plugins/connection/test_psrp.py
index f6416751..73516cc6 100644
--- a/test/units/plugins/connection/test_psrp.py
+++ b/test/units/plugins/connection/test_psrp.py
@@ -10,7 +10,7 @@ import pytest
import sys
from io import StringIO
-from units.compat.mock import MagicMock
+from mock import MagicMock
from ansible.playbook.play_context import PlayContext
from ansible.plugins.loader import connection_loader
diff --git a/test/units/plugins/connection/test_ssh.py b/test/units/plugins/connection/test_ssh.py
index 9b3e3c9d..e7f4dd12 100644
--- a/test/units/plugins/connection/test_ssh.py
+++ b/test/units/plugins/connection/test_ssh.py
@@ -27,7 +27,7 @@ import pytest
from ansible import constants as C
from ansible.errors import AnsibleAuthenticationFailure
from units.compat import unittest
-from units.compat.mock import patch, MagicMock, PropertyMock
+from mock import patch, MagicMock, PropertyMock
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.module_utils.compat.selectors import SelectorKey, EVENT_READ
from ansible.module_utils.six.moves import shlex_quote
diff --git a/test/units/plugins/connection/test_winrm.py b/test/units/plugins/connection/test_winrm.py
index e6bf9ad2..c3245ccb 100644
--- a/test/units/plugins/connection/test_winrm.py
+++ b/test/units/plugins/connection/test_winrm.py
@@ -6,11 +6,13 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import os
+
import pytest
from io import StringIO
-from units.compat.mock import MagicMock
+from mock import MagicMock
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils._text import to_bytes
from ansible.playbook.play_context import PlayContext
@@ -255,8 +257,9 @@ class TestWinRMKerbAuth(object):
assert len(mock_calls) == 1
assert mock_calls[0][1] == expected
actual_env = mock_calls[0][2]['env']
- assert list(actual_env.keys()) == ['KRB5CCNAME']
+ assert sorted(list(actual_env.keys())) == ['KRB5CCNAME', 'PATH']
assert actual_env['KRB5CCNAME'].startswith("FILE:/")
+ assert actual_env['PATH'] == os.environ['PATH']
@pytest.mark.parametrize('options, expected', [
[{"_extras": {}},
@@ -287,8 +290,9 @@ class TestWinRMKerbAuth(object):
mock_calls = mock_pexpect.mock_calls
assert mock_calls[0][1] == expected
actual_env = mock_calls[0][2]['env']
- assert list(actual_env.keys()) == ['KRB5CCNAME']
+ assert sorted(list(actual_env.keys())) == ['KRB5CCNAME', 'PATH']
assert actual_env['KRB5CCNAME'].startswith("FILE:/")
+ assert actual_env['PATH'] == os.environ['PATH']
assert mock_calls[0][2]['echo'] is False
assert mock_calls[1][0] == "().expect"
assert mock_calls[1][1] == (".*:",)
diff --git a/test/units/plugins/filter/test_mathstuff.py b/test/units/plugins/filter/test_mathstuff.py
index d44a7146..f7938714 100644
--- a/test/units/plugins/filter/test_mathstuff.py
+++ b/test/units/plugins/filter/test_mathstuff.py
@@ -62,26 +62,6 @@ class TestSymmetricDifference:
assert sorted(ms.symmetric_difference(env, tuple(dataset1), tuple(dataset2))) == expected[2]
-class TestMin:
- def test_min(self):
- assert ms.min(env, (1, 2)) == 1
- assert ms.min(env, (2, 1)) == 1
- assert ms.min(env, ('p', 'a', 'w', 'b', 'p')) == 'a'
- assert ms.min(env, ({'key': 'a'}, {'key': 'b'}, {'key': 'c'}), attribute='key') == {'key': 'a'}
- assert ms.min(env, ({'key': 1}, {'key': 2}, {'key': 3}), attribute='key') == {'key': 1}
- assert ms.min(env, ('a', 'A', 'b', 'B'), case_sensitive=True) == 'A'
-
-
-class TestMax:
- def test_max(self):
- assert ms.max(env, (1, 2)) == 2
- assert ms.max(env, (2, 1)) == 2
- assert ms.max(env, ('p', 'a', 'w', 'b', 'p')) == 'w'
- assert ms.max(env, ({'key': 'a'}, {'key': 'b'}, {'key': 'c'}), attribute='key') == {'key': 'c'}
- assert ms.max(env, ({'key': 1}, {'key': 2}, {'key': 3}), attribute='key') == {'key': 3}
- assert ms.max(env, ('a', 'A', 'b', 'B'), case_sensitive=True) == 'b'
-
-
class TestLogarithm:
def test_log_non_number(self):
# Message changed in python3.6
diff --git a/test/units/plugins/inventory/test_inventory.py b/test/units/plugins/inventory/test_inventory.py
index 66b5ec37..08148f8b 100644
--- a/test/units/plugins/inventory/test_inventory.py
+++ b/test/units/plugins/inventory/test_inventory.py
@@ -22,8 +22,9 @@ __metaclass__ = type
import string
import textwrap
+import mock
+
from ansible import constants as C
-from units.compat import mock
from units.compat import unittest
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_text
diff --git a/test/units/plugins/inventory/test_script.py b/test/units/plugins/inventory/test_script.py
index 5f054813..1a00946c 100644
--- a/test/units/plugins/inventory/test_script.py
+++ b/test/units/plugins/inventory/test_script.py
@@ -22,11 +22,11 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
+import mock
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.plugins.loader import PluginLoader
-from units.compat import mock
from units.compat import unittest
from ansible.module_utils._text import to_bytes, to_native
diff --git a/test/units/plugins/lookup/test_password.py b/test/units/plugins/lookup/test_password.py
index f6cf10d1..c496ee6e 100644
--- a/test/units/plugins/lookup/test_password.py
+++ b/test/units/plugins/lookup/test_password.py
@@ -32,7 +32,7 @@ import pytest
from units.mock.loader import DictDataLoader
from units.compat import unittest
-from units.compat.mock import mock_open, patch
+from mock import mock_open, patch
from ansible.errors import AnsibleError
from ansible.module_utils.six import text_type
from ansible.module_utils.six.moves import builtins
diff --git a/test/units/plugins/strategy/test_linear.py b/test/units/plugins/strategy/test_linear.py
index 74887030..3bce4856 100644
--- a/test/units/plugins/strategy/test_linear.py
+++ b/test/units/plugins/strategy/test_linear.py
@@ -7,7 +7,7 @@ __metaclass__ = type
from units.compat import unittest
-from units.compat.mock import patch, MagicMock
+from mock import patch, MagicMock
from ansible.executor.play_iterator import PlayIterator
from ansible.playbook import Playbook
diff --git a/test/units/plugins/strategy/test_strategy.py b/test/units/plugins/strategy/test_strategy.py
index 6b60e692..750e8069 100644
--- a/test/units/plugins/strategy/test_strategy.py
+++ b/test/units/plugins/strategy/test_strategy.py
@@ -23,12 +23,13 @@ from units.mock.loader import DictDataLoader
import uuid
from units.compat import unittest
-from units.compat.mock import patch, MagicMock
+from mock import patch, MagicMock
from ansible.executor.process.worker import WorkerProcess
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.executor.task_result import TaskResult
from ansible.inventory.host import Host
from ansible.module_utils.six.moves import queue as Queue
+from ansible.playbook.block import Block
from ansible.playbook.handler import Handler
from ansible.plugins.strategy import StrategyBase
@@ -464,7 +465,15 @@ class TestStrategyBase(unittest.TestCase):
mock_task = MagicMock()
mock_task._block = mock_block
mock_task._role = None
- mock_task._parent = None
+
+ # NOTE Mocking calls below to account for passing parent_block=ti_copy.build_parent_block()
+ # into load_list_of_blocks() in _load_included_file. Not doing so meant that retrieving
+ # `collection` attr from parent would result in getting MagicMock instance
+ # instead of an empty list.
+ mock_task._parent = MagicMock()
+ mock_task.copy.return_value = mock_task
+ mock_task.build_parent_block.return_value = mock_block
+ mock_block._get_parent_attribute.return_value = None
mock_iterator = MagicMock()
mock_iterator.mark_host_failed.return_value = None
@@ -474,6 +483,8 @@ class TestStrategyBase(unittest.TestCase):
mock_inc_file._filename = "test.yml"
res = strategy_base._load_included_file(included_file=mock_inc_file, iterator=mock_iterator)
+ self.assertEqual(len(res), 1)
+ self.assertTrue(isinstance(res[0], Block))
mock_inc_file._filename = "bad.yml"
res = strategy_base._load_included_file(included_file=mock_inc_file, iterator=mock_iterator)
diff --git a/test/units/plugins/test_plugins.py b/test/units/plugins/test_plugins.py
index c9d80cda..975fa420 100644
--- a/test/units/plugins/test_plugins.py
+++ b/test/units/plugins/test_plugins.py
@@ -23,8 +23,7 @@ __metaclass__ = type
import os
from units.compat import unittest
-from units.compat.builtins import BUILTINS
-from units.compat.mock import patch, MagicMock
+from mock import patch, MagicMock
from ansible.plugins.loader import PluginLoader, PluginPathContext
@@ -54,7 +53,7 @@ class TestErrors(unittest.TestCase):
bar.bam = bam
foo.return_value.bar = bar
pl = PluginLoader('test', 'foo.bar.bam', 'test', 'test_plugin')
- with patch('{0}.__import__'.format(BUILTINS), foo):
+ with patch('builtins.__import__', foo):
self.assertEqual(pl._get_package_paths(), ['/path/to/my/foo/bar/bam'])
def test_plugins__get_paths(self):
diff --git a/test/units/template/test_native_concat.py b/test/units/template/test_native_concat.py
index 4164bc45..ee1b7df1 100644
--- a/test/units/template/test_native_concat.py
+++ b/test/units/template/test_native_concat.py
@@ -5,45 +5,21 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import importlib
-import sys
-
-import pytest
-
-from ansible import constants as C
-from ansible.errors import AnsibleUndefinedVariable
from ansible.playbook.conditional import Conditional
+from ansible.template import Templar
from units.mock.loader import DictDataLoader
-@pytest.fixture
-def native_template_mod(monkeypatch):
- monkeypatch.delitem(sys.modules, 'ansible.template')
- monkeypatch.setattr(C, 'DEFAULT_JINJA2_NATIVE', True)
- return importlib.import_module('ansible.template')
-
-
-# https://github.com/ansible/ansible/issues/52158
-def test_undefined_variable(native_template_mod):
- fake_loader = DictDataLoader({})
- variables = {}
- templar = native_template_mod.Templar(loader=fake_loader, variables=variables)
- assert isinstance(templar.environment, native_template_mod.AnsibleNativeEnvironment)
-
- with pytest.raises(AnsibleUndefinedVariable):
- templar.template("{{ missing }}")
-
-
-def test_cond_eval(native_template_mod):
+def test_cond_eval():
fake_loader = DictDataLoader({})
# True must be stored in a variable to trigger templating. Using True
# directly would be caught by optimization for bools to short-circuit
# templating.
variables = {"foo": True}
- templar = native_template_mod.Templar(loader=fake_loader, variables=variables)
- assert isinstance(templar.environment, native_template_mod.AnsibleNativeEnvironment)
-
+ templar = Templar(loader=fake_loader, variables=variables)
cond = Conditional(loader=fake_loader)
cond.when = ["foo"]
- assert cond.evaluate_conditional(templar, variables)
+
+ with templar.set_temporary_context(jinja2_native=True):
+ assert cond.evaluate_conditional(templar, variables)
diff --git a/test/units/template/test_safe_eval.py b/test/units/template/test_safe_eval.py
deleted file mode 100644
index 89ff8a0e..00000000
--- a/test/units/template/test_safe_eval.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import sys
-from collections import defaultdict
-
-from units.compat import unittest
-from ansible.template.safe_eval import safe_eval
-
-
-class TestSafeEval(unittest.TestCase):
-
- def test_safe_eval_usage(self):
- # test safe eval calls with different possible types for the
- # locals dictionary, to ensure we don't run into problems like
- # ansible/ansible/issues/12206 again
- for locals_vars in (dict(), defaultdict(dict)):
- self.assertEqual(safe_eval('True', locals=locals_vars), True)
- self.assertEqual(safe_eval('False', locals=locals_vars), False)
- self.assertEqual(safe_eval('0', locals=locals_vars), 0)
- self.assertEqual(safe_eval('[]', locals=locals_vars), [])
- self.assertEqual(safe_eval('{}', locals=locals_vars), {})
-
- @unittest.skipUnless(sys.version_info[:2] >= (2, 7), "Python 2.6 has no set literals")
- def test_set_literals(self):
- self.assertEqual(safe_eval('{0}'), set([0]))
diff --git a/test/units/template/test_templar.py b/test/units/template/test_templar.py
index dd6985ce..e922f95f 100644
--- a/test/units/template/test_templar.py
+++ b/test/units/template/test_templar.py
@@ -22,7 +22,7 @@ __metaclass__ = type
from jinja2.runtime import Context
from units.compat import unittest
-from units.compat.mock import patch
+from mock import patch
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleUndefinedVariable
@@ -187,8 +187,7 @@ class TestTemplarTemplate(BaseTemplar, unittest.TestCase):
self.assertTrue(res)
self.assertEqual(res, 'bar')
- @patch('ansible.template.safe_eval', side_effect=AnsibleError)
- def test_template_convert_data_template_in_data(self, mock_safe_eval):
+ def test_template_convert_data_template_in_data(self):
res = self.templar.template('{{bam}}', convert_data=True)
self.assertTrue(res)
self.assertEqual(res, 'bar')
@@ -220,10 +219,10 @@ class TestTemplarTemplate(BaseTemplar, unittest.TestCase):
def test_weird(self):
data = u'''1 2 #}huh{# %}ddfg{% }}dfdfg{{ {%what%} {{#foo#}} {%{bar}%} {#%blip%#} {{asdfsd%} 3 4 {{foo}} 5 6 7'''
- self.assertRaisesRegexp(AnsibleError,
- 'template error while templating string',
- self.templar.template,
- data)
+ self.assertRaisesRegex(AnsibleError,
+ 'template error while templating string',
+ self.templar.template,
+ data)
def test_template_with_error(self):
"""Check that AnsibleError is raised, fail if an unhandled exception is raised"""
@@ -298,21 +297,21 @@ class TestTemplarMisc(BaseTemplar, unittest.TestCase):
class TestTemplarLookup(BaseTemplar, unittest.TestCase):
def test_lookup_missing_plugin(self):
- self.assertRaisesRegexp(AnsibleError,
- r'lookup plugin \(not_a_real_lookup_plugin\) not found',
- self.templar._lookup,
- 'not_a_real_lookup_plugin',
- 'an_arg', a_keyword_arg='a_keyword_arg_value')
+ self.assertRaisesRegex(AnsibleError,
+ r'lookup plugin \(not_a_real_lookup_plugin\) not found',
+ self.templar._lookup,
+ 'not_a_real_lookup_plugin',
+ 'an_arg', a_keyword_arg='a_keyword_arg_value')
def test_lookup_list(self):
res = self.templar._lookup('list', 'an_arg', 'another_arg')
self.assertEqual(res, 'an_arg,another_arg')
def test_lookup_jinja_undefined(self):
- self.assertRaisesRegexp(AnsibleUndefinedVariable,
- "'an_undefined_jinja_var' is undefined",
- self.templar._lookup,
- 'list', '{{ an_undefined_jinja_var }}')
+ self.assertRaisesRegex(AnsibleUndefinedVariable,
+ "'an_undefined_jinja_var' is undefined",
+ self.templar._lookup,
+ 'list', '{{ an_undefined_jinja_var }}')
def test_lookup_jinja_defined(self):
res = self.templar._lookup('list', '{{ some_var }}')
@@ -320,18 +319,18 @@ class TestTemplarLookup(BaseTemplar, unittest.TestCase):
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_dict_string_passed(self):
- self.assertRaisesRegexp(AnsibleError,
- "with_dict expects a dict",
- self.templar._lookup,
- 'dict',
- '{{ some_var }}')
+ self.assertRaisesRegex(AnsibleError,
+ "with_dict expects a dict",
+ self.templar._lookup,
+ 'dict',
+ '{{ some_var }}')
def test_lookup_jinja_dict_list_passed(self):
- self.assertRaisesRegexp(AnsibleError,
- "with_dict expects a dict",
- self.templar._lookup,
- 'dict',
- ['foo', 'bar'])
+ self.assertRaisesRegex(AnsibleError,
+ "with_dict expects a dict",
+ self.templar._lookup,
+ 'dict',
+ ['foo', 'bar'])
def test_lookup_jinja_kwargs(self):
res = self.templar._lookup('list', 'blip', random_keyword='12345')
@@ -343,12 +342,12 @@ class TestTemplarLookup(BaseTemplar, unittest.TestCase):
self.assertEqual(res, ["blip"])
def test_lookup_jinja_list_wantlist_undefined(self):
- self.assertRaisesRegexp(AnsibleUndefinedVariable,
- "'some_undefined_var' is undefined",
- self.templar._lookup,
- 'list',
- '{{ some_undefined_var }}',
- wantlist=True)
+ self.assertRaisesRegex(AnsibleUndefinedVariable,
+ "'some_undefined_var' is undefined",
+ self.templar._lookup,
+ 'list',
+ '{{ some_undefined_var }}',
+ wantlist=True)
def test_lookup_jinja_list_wantlist_unsafe(self):
res = self.templar._lookup('list', '{{ some_unsafe_var }}', wantlist=True)
@@ -444,3 +443,28 @@ class TestAnsibleContext(BaseTemplar, unittest.TestCase):
def test_is_unsafe(self):
context = self._context()
self.assertFalse(context._is_unsafe(AnsibleUndefined()))
+
+
+def test_unsafe_lookup():
+ res = Templar(
+ None,
+ variables={
+ 'var0': '{{ var1 }}',
+ 'var1': ['unsafe'],
+ }
+ ).template('{{ lookup("list", var0) }}')
+ assert getattr(res[0], '__UNSAFE__', False)
+
+
+def test_unsafe_lookup_no_conversion():
+ res = Templar(
+ None,
+ variables={
+ 'var0': '{{ var1 }}',
+ 'var1': ['unsafe'],
+ }
+ ).template(
+ '{{ lookup("list", var0) }}',
+ convert_data=False,
+ )
+ assert getattr(res, '__UNSAFE__', False)
diff --git a/test/units/template/test_vars.py b/test/units/template/test_vars.py
index 74e67839..3e04ba2f 100644
--- a/test/units/template/test_vars.py
+++ b/test/units/template/test_vars.py
@@ -20,7 +20,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat import unittest
-from units.compat.mock import MagicMock
+from mock import MagicMock
from ansible.template.vars import AnsibleJ2Vars
@@ -29,53 +29,13 @@ class TestVars(unittest.TestCase):
def setUp(self):
self.mock_templar = MagicMock(name='mock_templar')
- def test(self):
- ajvars = AnsibleJ2Vars(None, None)
- print(ajvars)
-
- def test_globals_empty_2_8(self):
- ajvars = AnsibleJ2Vars(self.mock_templar, {})
- res28 = self._dict_jinja28(ajvars)
- self.assertIsInstance(res28, dict)
-
- def test_globals_empty_2_9(self):
+ def test_globals_empty(self):
ajvars = AnsibleJ2Vars(self.mock_templar, {})
- res29 = self._dict_jinja29(ajvars)
- self.assertIsInstance(res29, dict)
+ res = dict(ajvars)
+ self.assertIsInstance(res, dict)
- def _assert_globals(self, res):
+ def test_globals(self):
+ res = dict(AnsibleJ2Vars(self.mock_templar, {'foo': 'bar', 'blip': [1, 2, 3]}))
self.assertIsInstance(res, dict)
self.assertIn('foo', res)
self.assertEqual(res['foo'], 'bar')
-
- def test_globals_2_8(self):
- ajvars = AnsibleJ2Vars(self.mock_templar, {'foo': 'bar', 'blip': [1, 2, 3]})
- res28 = self._dict_jinja28(ajvars)
- self._assert_globals(res28)
-
- def test_globals_2_9(self):
- ajvars = AnsibleJ2Vars(self.mock_templar, {'foo': 'bar', 'blip': [1, 2, 3]})
- res29 = self._dict_jinja29(ajvars)
- self._assert_globals(res29)
-
- def _dicts(self, ajvars):
- print(ajvars)
- res28 = self._dict_jinja28(ajvars)
- res29 = self._dict_jinja29(ajvars)
- # res28_other = self._dict_jinja28(ajvars, {'other_key': 'other_value'})
- # other = {'other_key': 'other_value'}
- # res29_other = self._dict_jinja29(ajvars, *other)
- print('res28: %s' % res28)
- print('res29: %s' % res29)
- # print('res28_other: %s' % res28_other)
- # print('res29_other: %s' % res29_other)
- # return (res28, res29, res28_other, res29_other)
- # assert ajvars == res28
- # assert ajvars == res29
- return (res28, res29)
-
- def _dict_jinja28(self, *args, **kwargs):
- return dict(*args, **kwargs)
-
- def _dict_jinja29(self, the_vars):
- return dict(the_vars)
diff --git a/test/units/utils/collection_loader/test_collection_loader.py b/test/units/utils/collection_loader/test_collection_loader.py
index 425f770c..3ae04cbd 100644
--- a/test/units/utils/collection_loader/test_collection_loader.py
+++ b/test/units/utils/collection_loader/test_collection_loader.py
@@ -9,6 +9,7 @@ import sys
from ansible.module_utils.six import PY3, string_types
from ansible.module_utils.compat.importlib import import_module
+from ansible.modules import ping as ping_module
from ansible.utils.collection_loader import AnsibleCollectionConfig, AnsibleCollectionRef
from ansible.utils.collection_loader._collection_finder import (
_AnsibleCollectionFinder, _AnsibleCollectionLoader, _AnsibleCollectionNSPkgLoader, _AnsibleCollectionPkgLoader,
@@ -16,7 +17,7 @@ from ansible.utils.collection_loader._collection_finder import (
_get_collection_name_from_path, _get_collection_role_path, _get_collection_metadata, _iter_modules_impl
)
from ansible.utils.collection_loader._collection_config import _EventSource
-from units.compat.mock import MagicMock, NonCallableMagicMock, patch
+from mock import MagicMock, NonCallableMagicMock, patch
# fixture to ensure we always clean up the import stuff when we're done
@@ -28,6 +29,17 @@ def teardown(*args, **kwargs):
# BEGIN STANDALONE TESTS - these exercise behaviors of the individual components without the import machinery
+@pytest.mark.skipif(not PY3, reason='Testing Python 2 codepath (find_module) on Python 3')
+def test_find_module_py3():
+ dir_to_a_file = os.path.dirname(ping_module.__file__)
+ path_hook_finder = _AnsiblePathHookFinder(_AnsibleCollectionFinder(), dir_to_a_file)
+
+ # setuptools may fall back to find_module on Python 3 if find_spec returns None
+ # see https://github.com/pypa/setuptools/pull/2918
+ assert path_hook_finder.find_spec('missing') is None
+ assert path_hook_finder.find_module('missing') is None
+
+
def test_finder_setup():
# ensure scalar path is listified
f = _AnsibleCollectionFinder(paths='/bogus/bogus')
diff --git a/test/units/utils/display/test_broken_cowsay.py b/test/units/utils/display/test_broken_cowsay.py
new file mode 100644
index 00000000..e93065d8
--- /dev/null
+++ b/test/units/utils/display/test_broken_cowsay.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+
+from ansible.utils.display import Display
+from mock import MagicMock
+
+
+def test_display_with_fake_cowsay_binary(capsys, mocker):
+ mocker.patch("ansible.constants.ANSIBLE_COW_PATH", "./cowsay.sh")
+
+ def mock_communicate(input=None, timeout=None):
+ return b"", b""
+
+ mock_popen = MagicMock()
+ mock_popen.return_value.communicate = mock_communicate
+ mock_popen.return_value.returncode = 1
+ mocker.patch("subprocess.Popen", mock_popen)
+
+ display = Display()
+ assert not hasattr(display, "cows_available")
+ assert display.b_cowsay is None
diff --git a/test/units/utils/test_display.py b/test/units/utils/test_display.py
index 1e73c2ad..8807b816 100644
--- a/test/units/utils/test_display.py
+++ b/test/units/utils/test_display.py
@@ -5,7 +5,7 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
-from units.compat.mock import MagicMock
+from mock import MagicMock
import pytest
diff --git a/test/units/utils/test_vars.py b/test/units/utils/test_vars.py
index c92ce4b6..1df0eab3 100644
--- a/test/units/utils/test_vars.py
+++ b/test/units/utils/test_vars.py
@@ -22,7 +22,9 @@ __metaclass__ = type
from collections import defaultdict
-from units.compat import mock, unittest
+import mock
+
+from units.compat import unittest
from ansible.errors import AnsibleError
from ansible.utils.vars import combine_vars, merge_hash
diff --git a/test/units/vars/test_variable_manager.py b/test/units/vars/test_variable_manager.py
index 65a79286..fa68fd3b 100644
--- a/test/units/vars/test_variable_manager.py
+++ b/test/units/vars/test_variable_manager.py
@@ -22,7 +22,7 @@ __metaclass__ = type
import os
from units.compat import unittest
-from units.compat.mock import MagicMock, patch
+from mock import MagicMock, patch
from ansible.inventory.manager import InventoryManager
from ansible.module_utils.six import iteritems
from ansible.playbook.play import Play